Merge branch 'release-0.7.0'
diff --git a/.gitignore b/.gitignore
index 1a14e10..06f8098 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@
 .project
 project/*
 !project/Build.scala
+!project/Common.scala
 !project/plugins.sbt
 target/
 build/
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..b9f4720
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,8 @@
+language: scala
+scala:
+  - "2.10.3"
+jdk:
+  - oraclejdk7
+  - openjdk7
+  - openjdk6
+services: mongodb
diff --git a/CREDITS b/CREDITS
index 7ee3f35..8e2c84e 100644
--- a/CREDITS
+++ b/CREDITS
@@ -13,6 +13,42 @@
 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
 
 
+GraphChi
+https://github.com/GraphChi/graphchi-cpp
+----------------------------------------
+Copyright [2012] [Aapo Kyrola, Guy Blelloch, Carlos Guestrin / Carnegie Mellon University]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+GraphChi Collaborative Filtering Toolkit
+https://github.com/GraphChi/graphchi-cpp/tree/master/toolkits/collaborative_filtering
+----------------------------------------
+Copyright [2012] [Danny Bickson / Carnegie Mellon University]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
 Movie Recommendations and More via MapReduce and Scalding
 https://github.com/echen/scaldingale
 ----------------------------------------
diff --git a/bin/build.sh b/bin/build.sh
index 71fd37a..f02ca4d 100755
--- a/bin/build.sh
+++ b/bin/build.sh
@@ -35,38 +35,74 @@
 else
     echo "+ Assemble Process Hadoop Scalding"
     BASE_TARGETS="$BASE_TARGETS processHadoopScalding/assembly"
-
-    echo "+ Assemble Process Commons Evaluations Scala Parameter Generator"
-    BASE_TARGETS="$BASE_TARGETS processEnginesCommonsEvalScalaParamGen/assembly"
-
-    echo "+ Assemble Process Commons Evaluations Scala U2I Training-Test Splitter"
-    BASE_TARGETS="$BASE_TARGETS processEnginesCommonsEvalScalaU2ITrainingTestSplit/assembly"
-
-    echo "+ Assemble Process ItemRec Algorithms Scala Mahout"
-    BASE_TARGETS="$BASE_TARGETS processEnginesItemRecAlgoScalaMahout/assembly"
-
-    echo "+ Assemble Process ItemRec Evaluations Scala Top-k Items Collector"
-    BASE_TARGETS="$BASE_TARGETS processEnginesItemRecEvalScalaTopKItems/assembly"
-
-    echo "+ Assemble Process ItemSim Evaluations Scala Top-k Items Collector"
-    BASE_TARGETS="$BASE_TARGETS processEnginesItemSimEvalScalaTopKItems/assembly"
 fi
 
+# Build Non-distributed Random Algorithm
+echo "+ Pack Non-distributed Random Algorithm"
+BASE_TARGETS="$BASE_TARGETS processEnginesCommonsAlgoScalaRandom/pack"
+
+# Build Generic Single Machine ItemRec Data Preparator
+echo "+ Pack Single Machine Generic ItemRec Data Preparator"
+BASE_TARGETS="$BASE_TARGETS processEnginesItemRecAlgoScalaGeneric/pack"
+
+# Build Mahout ItemRec Job and Model Construcotor
+echo "+ Pack Mahout ItemRec Job and Model Constructor"
+BASE_TARGETS="$BASE_TARGETS processEnginesItemRecAlgoScalaMahout/pack"
+
+# Build GraphChi Model Constructor
+echo "+ Pack GraphChi ItemRec Model Constructor"
+BASE_TARGETS="$BASE_TARGETS processEnginesItemRecAlgoScalaGraphChi/pack"
+
+# Build Generic Single Machine ItemSim Data Preparator
+echo "+ Pack Single Machine Generic ItemSim Data Preparator"
+BASE_TARGETS="$BASE_TARGETS processEnginesItemSimAlgoScalaGeneric/pack"
+
+# Build Mahout ItemSim Job and Model Construcotor
+echo "+ Pack Mahout ItemSim Job and Model Constructor"
+BASE_TARGETS="$BASE_TARGETS processEnginesItemSimAlgoScalaMahout/pack"
+
+# Build GraphChi Model Constructor
+echo "+ Pack GraphChi ItemSim Model Constructor"
+BASE_TARGETS="$BASE_TARGETS processEnginesItemSimAlgoScalaGraphChi/pack"
+
+# Build Single Machine U2I Action Splitter
+echo "+ Pack Single Machine U2I Action Splitter"
+BASE_TARGETS="$BASE_TARGETS processEnginesCommonsEvalScalaU2ISplit/pack"
+
+# Build Single Machine MAP@k
+echo "+ Pack Single Machine MAP@k"
+BASE_TARGETS="$BASE_TARGETS processEnginesCommonsEvalScalaMetricsMAP/pack"
+
+# Build parameter generator
+echo "+ Pack Process Commons Evaluations Scala Parameter Generator"
+BASE_TARGETS="$BASE_TARGETS processEnginesCommonsEvalScalaParamGen/pack"
+
+# Build Single Machine Top-K Collector
+echo "+ Pack Single Machine Top-K Collector"
+BASE_TARGETS="$BASE_TARGETS processEnginesCommonsEvalScalaTopKItems/pack"
+
+echo "+ Pack Process Commons Evaluations Scala U2I Training-Test Splitter Wrapper"
+BASE_TARGETS="$BASE_TARGETS processEnginesCommonsEvalScalaU2ITrainingTestSplit/pack"
+
 # Build connection check tool
 echo "+ Pack Connection Check Tool"
-BASE_TARGETS="$BASE_TARGETS toolsConncheck/stage"
+BASE_TARGETS="$BASE_TARGETS toolsConncheck/pack"
 
 # Build settings initialization tool
 echo "+ Pack Settings Initialization Tool"
-BASE_TARGETS="$BASE_TARGETS toolsSettingsInit/stage"
+BASE_TARGETS="$BASE_TARGETS toolsSettingsInit/pack"
 
 # Build software manager
 echo "+ Pack Software Manager"
-BASE_TARGETS="$BASE_TARGETS toolsSoftwareManager/stage"
+BASE_TARGETS="$BASE_TARGETS toolsSoftwareManager/pack"
 
 # Build user tool
 echo "+ Pack User Tool"
-BASE_TARGETS="$BASE_TARGETS toolsUsers/stage"
+BASE_TARGETS="$BASE_TARGETS toolsUsers/pack"
+
+# Build migration tool for 0.7
+echo "+ Pack 0.7 Migration Tool"
+BASE_TARGETS="$BASE_TARGETS toolsMigrationStandardizedInfoIDs/pack"
 
 $SBT $CLEAN $BASE_TARGETS
 
diff --git a/bin/common.sh b/bin/common.sh
index 695770b..5bdd727 100644
--- a/bin/common.sh
+++ b/bin/common.sh
@@ -2,7 +2,7 @@
 
 # This script should be sourced with $BASE set to the base of the repository
 
-VERSION=0.6.8
+VERSION=0.7.0
 
 # Play framework related
 PLAY_OPTS=
@@ -26,8 +26,12 @@
 
 # Packaging related
 PACKAGE_NAME="PredictionIO-$VERSION"
+PACKAGE_NAME_LINUX32="PredictionIO-linux-i686-$VERSION"
+PACKAGE_NAME_LINUX64="PredictionIO-linux-x86_64-$VERSION"
 DIST_DIR="$BASE/dist"
 PACKAGE_DIR="$DIST_DIR/target/$PACKAGE_NAME"
+PACKAGE_DIR_LINUX32="$DIST_DIR/target/$PACKAGE_NAME_LINUX32"
+PACKAGE_DIR_LINUX64="$DIST_DIR/target/$PACKAGE_NAME_LINUX64"
 
 # Kill the whole shell when Ctrl+C is pressed
 trap "exit 1" INT
diff --git a/bin/package.sh b/bin/package.sh
index 6ebd1b7..faebc24 100755
--- a/bin/package.sh
+++ b/bin/package.sh
@@ -9,7 +9,7 @@
 # Get the absolute path of the build script
 SCRIPT="$0"
 while [ -h "$SCRIPT" ] ; do
-	SCRIPT=`readlink "$SCRIPT"`
+    SCRIPT=`readlink "$SCRIPT"`
 done
 
 # Get the base directory of the repo
@@ -36,7 +36,7 @@
 $PLAY stage
 
 # Packaging
-rm -rf "$PACKAGE_DIR"
+rm -rf $PACKAGE_DIR $PACKAGE_DIR_LINUX32 $PACKAGE_DIR_LINUX64
 mkdir -p "$PACKAGE_DIR/bin"
 mkdir -p "$PACKAGE_DIR/lib"
 
@@ -52,24 +52,93 @@
 cp -R $DIST_DIR/conf $PACKAGE_DIR
 
 cp "$BASE/process/target/scala-2.10/predictionio-process-hadoop-scalding-assembly-$VERSION.jar" "$PACKAGE_DIR/lib"
-cp "$BASE/process/engines/commons/evaluations/scala/paramgen/target/scala-2.10/predictionio-process-commons-evaluations-paramgen-assembly-$VERSION.jar" "$PACKAGE_DIR/lib"
-cp "$BASE/process/engines/commons/evaluations/scala/u2itrainingtestsplit/target/scala-2.10/predictionio-process-commons-evaluations-scala-u2itrainingtestsplittime-assembly-$VERSION.jar" "$PACKAGE_DIR/lib"
-cp "$BASE/process/engines/itemrec/algorithms/scala/mahout/target/scala-2.10/predictionio-process-itemrec-algorithms-scala-mahout-assembly-$VERSION.jar" "$PACKAGE_DIR/lib"
-cp "$BASE/process/engines/itemrec/evaluations/scala/topkitems/target/scala-2.10/predictionio-process-itemrec-evaluations-topkitems-assembly-$VERSION.jar" "$PACKAGE_DIR/lib"
-cp "$BASE/process/engines/itemsim/evaluations/scala/topkitems/target/scala-2.10/predictionio-process-itemsim-evaluations-topkitems-assembly-$VERSION.jar" "$PACKAGE_DIR/lib"
-cp -n $BASE/tools/conncheck/target/universal/stage/bin/conncheck $PACKAGE_DIR/bin
-cp -n $BASE/tools/conncheck/target/universal/stage/lib/* $PACKAGE_DIR/lib
-cp -n $BASE/tools/settingsinit/target/universal/stage/bin/settingsinit $PACKAGE_DIR/bin
-cp -n $BASE/tools/settingsinit/target/universal/stage/lib/* $PACKAGE_DIR/lib
-cp -n $BASE/tools/softwaremanager/target/universal/stage/lib/* $PACKAGE_DIR/lib
-cp -n $BASE/tools/users/target/universal/stage/bin/users $PACKAGE_DIR/bin
-cp -n $BASE/tools/users/target/universal/stage/lib/* $PACKAGE_DIR/lib
 
-mkdir -p $PACKAGE_DIR/vendors/mahout-distribution-0.8
-cp $VENDOR_MAHOUT/mahout-core-0.8-job.jar $PACKAGE_DIR/vendors/mahout-distribution-0.8
+cp $BASE/process/engines/commons/algorithms/scala/random/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/commons/algorithms/scala/random/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/commons/evaluations/scala/map/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/commons/evaluations/scala/map/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/commons/evaluations/scala/paramgen/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/commons/evaluations/scala/paramgen/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/commons/evaluations/scala/topkitems/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/commons/evaluations/scala/topkitems/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/commons/evaluations/scala/u2isplit/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/commons/evaluations/scala/u2isplit/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/commons/evaluations/scala/u2itrainingtestsplit/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/commons/evaluations/scala/u2itrainingtestsplit/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/itemrec/algorithms/scala/generic/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/itemrec/algorithms/scala/generic/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/itemrec/algorithms/scala/mahout/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/itemrec/algorithms/scala/mahout/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/itemrec/algorithms/scala/graphchi/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/itemrec/algorithms/scala/graphchi/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/itemsim/algorithms/scala/generic/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/itemsim/algorithms/scala/generic/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/itemsim/algorithms/scala/mahout/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/itemsim/algorithms/scala/mahout/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/process/engines/itemsim/algorithms/scala/graphchi/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/process/engines/itemsim/algorithms/scala/graphchi/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/tools/conncheck/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/tools/conncheck/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/tools/settingsinit/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/tools/settingsinit/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/tools/softwaremanager/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/tools/softwaremanager/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/tools/users/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/tools/users/target/pack/lib/* $PACKAGE_DIR/lib
+
+cp $BASE/tools/migration/0.7/infos/target/pack/bin/* $PACKAGE_DIR/bin
+cp -n $BASE/tools/migration/0.7/infos/target/pack/lib/* $PACKAGE_DIR/lib
+
+mkdir -p $PACKAGE_DIR/vendors/mahout-distribution-0.9
+cp $VENDOR_MAHOUT/mahout-core-0.9-job.jar $PACKAGE_DIR/vendors/mahout-distribution-0.9
 
 cd $DIST_DIR/target
-rm "$PACKAGE_NAME.zip"
-zip -q -r "$PACKAGE_NAME.zip" "$PACKAGE_NAME"
 
-echo "Packaging finished at $DIST_DIR/target/$PACKAGE_NAME.zip"
+# Multi-arch targets
+
+if test "$MULTI_ARCH" = "1" ; then
+    cp -R $PACKAGE_DIR $PACKAGE_DIR_LINUX64
+    mv $PACKAGE_DIR $PACKAGE_DIR_LINUX32
+
+    cd $PACKAGE_DIR_LINUX32
+    cp $VENDOR_GRAPHCHI_CPP_CF_LINUX32/* $PACKAGE_DIR_LINUX32/bin
+
+    cd $PACKAGE_DIR_LINUX64
+    cp $VENDOR_GRAPHCHI_CPP_CF_LINUX64/* $PACKAGE_DIR_LINUX64/bin
+
+    cd $DIST_DIR/target
+
+    if [ -e "$PACKAGE_NAME_LINUX32.zip" ] ; then
+        rm "$PACKAGE_NAME_LINUX32.zip"
+    fi
+    if [ -e "$PACKAGE_NAME_LINUX64.zip" ] ; then
+        rm "$PACKAGE_NAME_LINUX64.zip"
+    fi 
+
+    zip -q -r "$PACKAGE_NAME_LINUX32.zip" "$PACKAGE_NAME_LINUX32"
+    zip -q -r "$PACKAGE_NAME_LINUX64.zip" "$PACKAGE_NAME_LINUX64"
+
+    echo "Packaging finished:"
+    echo "- $DIST_DIR/target/$PACKAGE_NAME_LINUX32.zip"
+    echo "- $DIST_DIR/target/$PACKAGE_NAME_LINUX64.zip"
+else
+    rm "$PACKAGE_NAME.zip"
+    zip -q -r "$PACKAGE_NAME.zip" "$PACKAGE_NAME"
+  
+    echo "Packaging finished at $DIST_DIR/target/$PACKAGE_NAME.zip"
+fi
diff --git a/bin/vendors.sh b/bin/vendors.sh
index 486e8a1..b01e135 100644
--- a/bin/vendors.sh
+++ b/bin/vendors.sh
@@ -7,45 +7,67 @@
     command -v "$1" >/dev/null 2>&1
 }
 
+# Third party software
+VENDORS_PATH="$BASE/vendors"
+VENDOR_SBT="$VENDORS_PATH/sbt-0.13.1/sbt"
+VENDOR_PLAY_VERSION="2.2.2"
+VENDOR_PLAY="$VENDORS_PATH/play-$VENDOR_PLAY_VERSION/play"
+VENDOR_MAHOUT="$VENDORS_PATH/mahout-distribution-0.9"
+VENDOR_GRAPHCHI_CPP_CF_LINUX32="$VENDORS_PATH/graphchi-cpp-cf-linux-i686-0a6545ccb7"
+VENDOR_GRAPHCHI_CPP_CF_LINUX64="$VENDORS_PATH/graphchi-cpp-cf-linux-x86_64-0a6545ccb7"
+
 install_sbt () {
-    echo "Going to download and install sbt 0.13.0..."
-    local VENDORS_PATH=$1/sbt-0.13.0
+    echo "Going to download and install sbt 0.13.1..."
+    local VENDORS_PATH=$1/sbt-0.13.1
     mkdir -p $VENDORS_PATH
     cd $VENDORS_PATH
-    curl -O http://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/0.13.0/sbt-launch.jar
+    curl -O http://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/0.13.1/sbt-launch.jar
     echo 'java -Xms512M -Xmx1536M -Xss1M -XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=512M -jar `dirname $0`/sbt-launch.jar "$@"' > sbt
     chmod a+x sbt
     cd $BASE
 }
 
 install_play () {
-    echo "Going to download and install Play Framework 2.2.0..."
+    echo "Going to download and install Play Framework $VENDOR_PLAY_VERSION..."
     local VENDORS_PATH=$1
     mkdir -p $VENDORS_PATH
     cd $VENDORS_PATH
-    curl -O http://downloads.typesafe.com/play/2.2.0/play-2.2.0.zip
-    unzip play-2.2.0.zip
+    curl -O http://downloads.typesafe.com/play/$VENDOR_PLAY_VERSION/play-$VENDOR_PLAY_VERSION.zip
+    unzip play-$VENDOR_PLAY_VERSION.zip
     cd $BASE
 }
 
 install_mahout () {
-    echo "Going to download and install Apache Mahout 0.8..."
+    echo "Going to download and install Apache Mahout 0.9..."
     mkdir -p $VENDORS_PATH
     cd $VENDORS_PATH
     echo "Retrieving Apache mirror list..."
-    curl -o apache_mahout_mirrors.txt http://www.apache.org/dyn/closer.cgi/mahout/0.8/mahout-distribution-0.8.tar.gz
+    curl -o apache_mahout_mirrors.txt http://www.apache.org/dyn/closer.cgi/mahout/0.9/mahout-distribution-0.9.tar.gz
     MAHOUT_URL=$(cat apache_mahout_mirrors.txt | grep -m 1 "<strong>.*</strong>" | sed 's/.*<strong>//' | sed 's/<\/strong>.*//')
     echo "Found mirror: $MAHOUT_URL"
     curl -O $MAHOUT_URL
-    tar zxvf mahout-distribution-0.8.tar.gz
+    #curl -O http://archive.apache.org/dist/mahout/0.8/mahout-distribution-0.8.tar.gz
+    tar zxvf mahout-distribution-0.9.tar.gz
     cd $BASE
 }
 
-# Third party software
-VENDORS_PATH="$BASE/vendors"
-VENDOR_SBT="$VENDORS_PATH/sbt-0.13.0/sbt"
-VENDOR_PLAY="$VENDORS_PATH/play-2.2.0/play"
-VENDOR_MAHOUT="$VENDORS_PATH/mahout-distribution-0.8"
+install_graphchi_cpp_cf_linux32 () {
+    echo "Going to download and install GraphChi C++ CF Toolbox for Linux 32-bit..."
+    mkdir -p $VENDORS_PATH
+    cd $VENDORS_PATH
+    curl -O http://download.prediction.io/graphchi-cpp-cf/graphchi-cpp-cf-linux-i686-0a6545ccb7.tar.gz
+    tar zxvf graphchi-cpp-cf-linux-i686-0a6545ccb7.tar.gz
+    cd $BASE
+}
+
+install_graphchi_cpp_cf_linux64 () {
+    echo "Going to download and install GraphChi C++ CF Toolbox for Linux 64-bit..."
+    mkdir -p $VENDORS_PATH
+    cd $VENDORS_PATH
+    curl -O http://download.prediction.io/graphchi-cpp-cf/graphchi-cpp-cf-linux-x86_64-0a6545ccb7.tar.gz
+    tar zxvf graphchi-cpp-cf-linux-x86_64-0a6545ccb7.tar.gz
+    cd $BASE
+}
 
 # Detect existing installations in search path
 # Do not use existing sbt to enforce JVM settings
@@ -53,12 +75,12 @@
 #   echo "Using sbt in search path. No additional JVM optimization will be set."
 #   SBT=sbt
 if [ -x "$VENDOR_SBT" ] ; then
-    echo "Using sbt 0.13.0 in vendors."
+    echo "Using sbt 0.13.1 in vendors."
     SBT="$VENDOR_SBT"
 elif install_sbt "$VENDORS_PATH" ; then
     SBT="$VENDOR_SBT"
 else
-    echo "Unable to locate sbt 0.13.0 and automatic installation failed. Aborting." >&2
+    echo "Unable to locate sbt 0.13.1 and automatic installation failed. Aborting." >&2
     exit 1
 fi
 
@@ -66,20 +88,39 @@
 #if command_exists "play" ; then
 #   PLAY=play
 if [ -x "$VENDOR_PLAY" ] ; then
-    echo "Using Play Framework 2.2.0 in vendors."
+    echo "Using Play Framework $VENDOR_PLAY_VERSION in vendors."
     PLAY="$VENDOR_PLAY"
 elif install_play "$VENDORS_PATH" ; then
     PLAY="$VENDOR_PLAY"
 else
-    echo "Unable to locate Play Framework 2.2.0 and automatic installation failed. Aborting." >&2
+    echo "Unable to locate Play Framework $VENDOR_PLAY_VERSION and automatic installation failed. Aborting." >&2
     exit 1
 fi
 
-if [ -r "$VENDOR_MAHOUT/mahout-core-0.8-job.jar" ] ; then
-    echo "Using Apache Mahout 0.8 in vendors."
+if [ -r "$VENDOR_MAHOUT/mahout-core-0.9-job.jar" ] ; then
+    echo "Using Apache Mahout 0.9 in vendors."
 elif install_mahout ; then
     echo ""
 else
-    echo "Unable to locate Apache Mahout 0.8 and automatic installation failed. Aborting." >&2
+    echo "Unable to locate Apache Mahout 0.9 and automatic installation failed. Aborting." >&2
     exit 1
 fi
+
+if [ -r "$VENDOR_GRAPHCHI_CPP_CF_LINUX32/als" ] ; then
+    echo "Using GraphChi C++ CF Toolbox for Linux 32-bit."
+elif install_graphchi_cpp_cf_linux32 ; then
+    echo ""
+else
+    echo "Unable to locate GraphChi C++ CF Toolbox for Linux 32-bit and automatic installation failed. Aborting." >&2
+    exit 1
+fi
+
+if [ -r "$VENDOR_GRAPHCHI_CPP_CF_LINUX64/als" ] ; then
+    echo "Using GraphChi C++ CF Toolbox for Linux 64-bit."
+elif install_graphchi_cpp_cf_linux64 ; then
+    echo ""
+else
+    echo "Unable to locate GraphChi C++ CF Toolbox for Linux 64-bit and automatic installation failed. Aborting." >&2
+    exit 1
+fi
+
diff --git a/build.sbt b/build.sbt
index 5c9de39..dd2fd3a 100644
--- a/build.sbt
+++ b/build.sbt
@@ -1,14 +1,10 @@
-import com.typesafe.sbt.SbtNativePackager.Universal
-
-import com.typesafe.sbt.packager.Keys._
-
 name := "predictionio"
 
-version in ThisBuild := "0.6.8"
+version in ThisBuild := "0.7.0"
 
 organization in ThisBuild := "io.prediction"
 
-scalaVersion in ThisBuild := "2.10.2"
+scalaVersion in ThisBuild := "2.10.3"
 
 scalacOptions in ThisBuild ++= Seq("-deprecation", "-unchecked", "-feature")
 
@@ -19,7 +15,7 @@
 libraryDependencies in ThisBuild ++= Seq(
   "com.github.nscala-time" %% "nscala-time" % "0.6.0",
   "org.slf4j" % "slf4j-log4j12" % "1.7.5" % "test",
-  "org.specs2" %% "specs2" % "1.14" % "test")
+  "org.specs2" %% "specs2" % "2.3.10" % "test")
 
 publishTo in ThisBuild := Some(Resolver.file("file",  new File(Path.userHome.absolutePath+"/.m2/repository")))
 
@@ -31,10 +27,12 @@
   commons,
   output,
   processHadoopScalding,
+  processEnginesCommonsAlgoScalaRandom,
+  processEnginesCommonsEvalScalaMetricsMAP,
   processEnginesCommonsEvalScalaParamGen,
+  processEnginesCommonsEvalScalaTopKItems,
   processEnginesCommonsEvalScalaU2ITrainingTestSplit,
   processEnginesItemRecAlgoScalaMahout,
-  processEnginesItemRecEvalScalaTopKItems,
   processEnginesItemSimEvalScalaTopKItems,
   toolsConncheck,
   toolsSettingsInit,
@@ -70,6 +68,11 @@
   .in(file("process/commons/hadoop/scalding")).dependsOn(commons)
   .settings(scalariformSettings: _*)
 
+lazy val processEnginesCommonsAlgoScalaRandom = project
+  .in(file("process/engines/commons/algorithms/scala/random"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
+
 lazy val processEnginesCommonsEvalHadoopScalding = project
   .in(file("process/engines/commons/evaluations/hadoop/scalding"))
   .aggregate(processEnginesCommonsEvalHadoopScaldingU2ITrainingTestSplit)
@@ -79,14 +82,30 @@
   .in(file("process/engines/commons/evaluations/hadoop/scalding/u2itrainingtestsplit"))
   .dependsOn(processCommonsHadoopScalding)
 
+lazy val processEnginesCommonsEvalScalaMetricsMAP = project
+  .in(file("process/engines/commons/evaluations/scala/map"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
+
 lazy val processEnginesCommonsEvalScalaParamGen = project
   .in(file("process/engines/commons/evaluations/scala/paramgen"))
   .dependsOn(commons)
   .settings(scalariformSettings: _*)
 
+lazy val processEnginesCommonsEvalScalaTopKItems = project
+  .in(file("process/engines/commons/evaluations/scala/topkitems"))
+  .dependsOn(commons, output)
+  .settings(scalariformSettings: _*)
+
+lazy val processEnginesCommonsEvalScalaU2ISplit = project
+  .in(file("process/engines/commons/evaluations/scala/u2isplit"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
+
 lazy val processEnginesCommonsEvalScalaU2ITrainingTestSplit = project
   .in(file("process/engines/commons/evaluations/scala/u2itrainingtestsplit"))
   .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemRecAlgoHadoopScalding = project
   .in(file("process/engines/itemrec/algorithms/hadoop/scalding"))
@@ -130,50 +149,18 @@
 
 lazy val processEnginesItemRecAlgoScalaMahout = project
   .in(file("process/engines/itemrec/algorithms/scala/mahout"))
-  .aggregate(
-    processEnginesItemRecAlgoScalaMahoutCommons,
-    processEnginesItemRecAlgoScalaMahoutALSWR,
-    processEnginesItemRecAlgoScalaMahoutKNNUserBased,
-    processEnginesItemRecAlgoScalaMahoutSlopeOne,
-    processEnginesItemRecAlgoScalaMahoutSVDPlusPlus,
-    processEnginesItemRecAlgoScalaMahoutSVDSGD,
-    processEnginesItemRecAlgoScalaMahoutThresholdUserBased)
-  .dependsOn(
-    processEnginesItemRecAlgoScalaMahoutCommons,
-    processEnginesItemRecAlgoScalaMahoutALSWR,
-    processEnginesItemRecAlgoScalaMahoutKNNUserBased,
-    processEnginesItemRecAlgoScalaMahoutSlopeOne,
-    processEnginesItemRecAlgoScalaMahoutSVDPlusPlus,
-    processEnginesItemRecAlgoScalaMahoutSVDSGD,
-    processEnginesItemRecAlgoScalaMahoutThresholdUserBased)
-
-lazy val processEnginesItemRecAlgoScalaMahoutCommons = project
-  .in(file("process/engines/itemrec/algorithms/scala/mahout/commons"))
   .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
-lazy val processEnginesItemRecAlgoScalaMahoutALSWR = project
-  .in(file("process/engines/itemrec/algorithms/scala/mahout/alswr"))
-  .dependsOn(processEnginesItemRecAlgoScalaMahoutCommons)
+lazy val processEnginesItemRecAlgoScalaGeneric = project
+  .in(file("process/engines/itemrec/algorithms/scala/generic"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
-lazy val processEnginesItemRecAlgoScalaMahoutKNNUserBased = project
-  .in(file("process/engines/itemrec/algorithms/scala/mahout/knnuserbased"))
-  .dependsOn(processEnginesItemRecAlgoScalaMahoutCommons)
-
-lazy val processEnginesItemRecAlgoScalaMahoutSlopeOne = project
-  .in(file("process/engines/itemrec/algorithms/scala/mahout/slopeone"))
-  .dependsOn(processEnginesItemRecAlgoScalaMahoutCommons)
-
-lazy val processEnginesItemRecAlgoScalaMahoutSVDPlusPlus = project
-  .in(file("process/engines/itemrec/algorithms/scala/mahout/svdplusplus"))
-  .dependsOn(processEnginesItemRecAlgoScalaMahoutCommons)
-
-lazy val processEnginesItemRecAlgoScalaMahoutSVDSGD = project
-  .in(file("process/engines/itemrec/algorithms/scala/mahout/svdsgd"))
-  .dependsOn(processEnginesItemRecAlgoScalaMahoutCommons)
-
-lazy val processEnginesItemRecAlgoScalaMahoutThresholdUserBased = project
-  .in(file("process/engines/itemrec/algorithms/scala/mahout/thresholduserbased"))
-  .dependsOn(processEnginesItemRecAlgoScalaMahoutCommons)
+lazy val processEnginesItemRecAlgoScalaGraphChi = project
+  .in(file("process/engines/itemrec/algorithms/scala/graphchi"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemRecEvalHadoopScalding = project
   .in(file("process/engines/itemrec/evaluations/hadoop/scalding"))
@@ -185,10 +172,7 @@
 lazy val processEnginesItemRecEvalHadoopScaldingMetricsMAP = project
   .in(file("process/engines/itemrec/evaluations/hadoop/scalding/metrics/map"))
   .dependsOn(processCommonsHadoopScalding)
-
-lazy val processEnginesItemRecEvalScalaTopKItems = project
-  .in(file("process/engines/itemrec/evaluations/scala/topkitems"))
-  .dependsOn(commons, output)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemSimAlgoHadoopScalding = project
   .in(file("process/engines/itemsim/algorithms/hadoop/scalding"))
@@ -206,18 +190,37 @@
 lazy val processEnginesItemSimAlgoHadoopScaldingItemSimCF = project
   .in(file("process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf"))
   .dependsOn(processCommonsHadoopScalding)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemSimAlgoHadoopScaldingLatestRank = project
   .in(file("process/engines/itemsim/algorithms/hadoop/scalding/latestrank"))
   .dependsOn(processCommonsHadoopScalding)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemSimAlgoHadoopScaldingMahout = project
   .in(file("process/engines/itemsim/algorithms/hadoop/scalding/mahout"))
   .dependsOn(processCommonsHadoopScalding)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemSimAlgoHadoopScaldingRandomRank = project
   .in(file("process/engines/itemsim/algorithms/hadoop/scalding/randomrank"))
   .dependsOn(processCommonsHadoopScalding)
+  .settings(scalariformSettings: _*)
+
+lazy val processEnginesItemSimAlgoScalaGeneric = project
+  .in(file("process/engines/itemsim/algorithms/scala/generic"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
+
+lazy val processEnginesItemSimAlgoScalaMahout = project
+  .in(file("process/engines/itemsim/algorithms/scala/mahout"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
+
+lazy val processEnginesItemSimAlgoScalaGraphChi = project
+  .in(file("process/engines/itemsim/algorithms/scala/graphchi"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemSimEvalHadoopScalding = project
   .in(file("process/engines/itemsim/evaluations/hadoop/scalding"))
@@ -229,21 +232,31 @@
 lazy val processEnginesItemSimEvalHadoopScaldingMetricsISMAP = project
   .in(file("process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap"))
   .dependsOn(processCommonsHadoopScalding)
+  .settings(scalariformSettings: _*)
 
 lazy val processEnginesItemSimEvalScalaTopKItems = project
   .in(file("process/engines/itemsim/evaluations/scala/topkitems"))
   .dependsOn(commons, output)
+  .settings(scalariformSettings: _*)
 
 // Tools Section
 
 lazy val toolsConncheck = project.in(file("tools/conncheck"))
   .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
 lazy val toolsSettingsInit = project.in(file("tools/settingsinit"))
   .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
 lazy val toolsSoftwareManager = project.in(file("tools/softwaremanager"))
   .dependsOn(commons)
+  .settings(scalariformSettings: _*)
 
 lazy val toolsUsers = project.in(file("tools/users"))
   .dependsOn(commons)
+  .settings(scalariformSettings: _*)
+
+lazy val toolsMigrationStandardizedInfoIDs = project.in(file("tools/migration/0.7/infos"))
+  .dependsOn(commons)
+  .settings(scalariformSettings: _*)
diff --git a/commons/build.sbt b/commons/build.sbt
index abe493d..11af6d9 100644
--- a/commons/build.sbt
+++ b/commons/build.sbt
@@ -4,6 +4,6 @@
 
 libraryDependencies ++= Seq(
   "com.typesafe" % "config" % "1.0.2",
-  "org.json4s" %% "json4s-native" % "3.2.6",
-  "org.json4s" %% "json4s-ext" % "3.2.6",
+  "org.json4s" %% "json4s-native" % "3.2.7",
+  "org.json4s" %% "json4s-ext" % "3.2.7",
   "org.mongodb" %% "casbah" % "2.6.2")
diff --git a/commons/src/main/scala/io/prediction/commons/Config.scala b/commons/src/main/scala/io/prediction/commons/Config.scala
index 932ca4f..b91c5d4 100644
--- a/commons/src/main/scala/io/prediction/commons/Config.scala
+++ b/commons/src/main/scala/io/prediction/commons/Config.scala
@@ -674,7 +674,7 @@
 
   /** Obtains a generic ModelData object for training with configured backend type. */
   def getModeldataTraining(engineinfoid: String): modeldata.ModelData = {
-    modeldataDbType match {
+    modeldataTrainingDbType match {
       case "mongodb" => {
         val thisObj = this
         engineinfoid match {
@@ -692,7 +692,7 @@
 
   /** Obtains an ItemRecScores object for training with configured backend type. */
   def getModeldataTrainingItemRecScores(): modeldata.ItemRecScores = {
-    modeldataDbType match {
+    modeldataTrainingDbType match {
       case "mongodb" => {
         new modeldata.mongodb.MongoItemRecScores(this, modeldataTrainingMongoDb.get)
       }
@@ -702,7 +702,7 @@
 
   /** Obtains an ItemSimScores object for training with configured backend type. */
   def getModeldataTrainingItemSimScores(): modeldata.ItemSimScores = {
-    modeldataDbType match {
+    modeldataTrainingDbType match {
       case "mongodb" => {
         new modeldata.mongodb.MongoItemSimScores(this, modeldataTrainingMongoDb.get)
       }
diff --git a/commons/src/main/scala/io/prediction/commons/appdata/Items.scala b/commons/src/main/scala/io/prediction/commons/appdata/Items.scala
index 33cdacb..40a4ba6 100644
--- a/commons/src/main/scala/io/prediction/commons/appdata/Items.scala
+++ b/commons/src/main/scala/io/prediction/commons/appdata/Items.scala
@@ -44,6 +44,9 @@
   /** Find items by App ID sorted by geolocation distance. */
   def getByAppidAndLatlng(appid: Int, latlng: Tuple2[Double, Double], within: Option[Double], unit: Option[String]): Iterator[Item]
 
+  /** Find items by App ID which belong to one of the itypes. */
+  def getByAppidAndItypes(appid: Int, itypes: Seq[String]): Iterator[Item]
+
   /** Get items by IDs. */
   def getByIds(appid: Int, ids: Seq[String]): Seq[Item]
 
diff --git a/commons/src/main/scala/io/prediction/commons/appdata/U2IActions.scala b/commons/src/main/scala/io/prediction/commons/appdata/U2IActions.scala
index 8902289..cfff17d 100644
--- a/commons/src/main/scala/io/prediction/commons/appdata/U2IActions.scala
+++ b/commons/src/main/scala/io/prediction/commons/appdata/U2IActions.scala
@@ -52,6 +52,9 @@
   /** Gets all user-to-item actions by App ID, User ID, and Item IDs. */
   def getAllByAppidAndUidAndIids(appid: Int, uid: String, iids: Seq[String]): Iterator[U2IAction]
 
+  /** Get all users-to-item actions by AppID, Item ID and optionally sort by User ID */
+  def getAllByAppidAndIid(appid: Int, iid: String, sortedByUid: Boolean = true): Iterator[U2IAction]
+
   /** Delete all user-to-item actions by App ID */
   def deleteByAppid(appid: Int): Unit
 
diff --git a/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoItems.scala b/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoItems.scala
index d889f02..a923994 100644
--- a/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoItems.scala
+++ b/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoItems.scala
@@ -59,6 +59,12 @@
     new MongoItemsIterator(itemColl.find(MongoDBObject("appid" -> appid, "lnglat" -> (nearSphereObj ++ maxDistObj))))
   }
 
+  def getByAppidAndItypes(appid: Int, itypes: Seq[String]): Iterator[Item] = {
+    new MongoItemsIterator(itemColl.find(MongoDBObject(
+      "appid" -> appid,
+      "itypes" -> MongoDBObject("$in" -> itypes))))
+  }
+
   def getByIds(appid: Int, ids: Seq[String]) = {
     itemColl.find(MongoDBObject("_id" -> MongoDBObject("$in" -> ids.map(idWithAppid(appid, _))))).toList map { dbObjToItem(_) }
   }
diff --git a/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoU2IActions.scala b/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoU2IActions.scala
index 9b7d997..fb7bee4 100644
--- a/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoU2IActions.scala
+++ b/commons/src/main/scala/io/prediction/commons/appdata/mongodb/MongoU2IActions.scala
@@ -32,6 +32,13 @@
     u2iActionColl.find(MongoDBObject("appid" -> appid, "uid" -> idWithAppid(appid, uid), "iid" -> MongoDBObject("$in" -> iids.map(idWithAppid(appid, _)))))
   )
 
+  def getAllByAppidAndIid(appid: Int, iid: String, sortedByUid: Boolean = true): Iterator[U2IAction] = {
+    if (sortedByUid)
+      new MongoU2IActionIterator(u2iActionColl.find(MongoDBObject("appid" -> appid, "iid" -> idWithAppid(appid, iid))).sort(MongoDBObject("uid" -> 1)))
+    else
+      new MongoU2IActionIterator(u2iActionColl.find(MongoDBObject("appid" -> appid, "iid" -> idWithAppid(appid, iid))))
+  }
+
   def deleteByAppid(appid: Int): Unit = {
     u2iActionColl.remove(MongoDBObject("appid" -> appid))
   }
diff --git a/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemRecScores.scala b/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemRecScores.scala
index cfc7bcc..89656a5 100644
--- a/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemRecScores.scala
+++ b/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemRecScores.scala
@@ -79,7 +79,7 @@
   }
 
   def existByAlgo(algo: Algo) = {
-    db.collectionExists(collectionName(algo.id, algo.modelset))
+    db.collectionExists(collectionName(algo.id, algo.modelset)) && db(collectionName(algo.id, algo.modelset)).find().hasNext
   }
 
   override def after(algoid: Int, modelset: Boolean) = {
diff --git a/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemSimScores.scala b/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemSimScores.scala
index 7124879..1408edd 100644
--- a/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemSimScores.scala
+++ b/commons/src/main/scala/io/prediction/commons/modeldata/mongodb/MongoItemSimScores.scala
@@ -80,7 +80,7 @@
   }
 
   def existByAlgo(algo: Algo) = {
-    db.collectionExists(collectionName(algo.id, algo.modelset))
+    db.collectionExists(collectionName(algo.id, algo.modelset)) && db(collectionName(algo.id, algo.modelset)).find().hasNext
   }
 
   override def after(algoid: Int, modelset: Boolean) = {
diff --git a/commons/src/test/scala/io/prediction/commons/ConfigSpec.scala b/commons/src/test/scala/io/prediction/commons/ConfigSpec.scala
index 725a2e0..8718b3c 100644
--- a/commons/src/test/scala/io/prediction/commons/ConfigSpec.scala
+++ b/commons/src/test/scala/io/prediction/commons/ConfigSpec.scala
@@ -54,6 +54,6 @@
 
   def sharding() = {
     mongoConfig.modeldataDbSharding must beTrue and
-      (mongoConfig.modeldataDbShardKeys must beSome(Seq("foo", "bar")))
+      (mongoConfig.modeldataDbShardKeys must beSome(===(Seq("foo", "bar"))))
   }
 }
diff --git a/commons/src/test/scala/io/prediction/commons/appdata/ItemsSpec.scala b/commons/src/test/scala/io/prediction/commons/appdata/ItemsSpec.scala
index bcc8ff6..852a5d3 100644
--- a/commons/src/test/scala/io/prediction/commons/appdata/ItemsSpec.scala
+++ b/commons/src/test/scala/io/prediction/commons/appdata/ItemsSpec.scala
@@ -22,6 +22,7 @@
     t ^
       "inserting and getting an item" ! insert(items) ^
       "getting items by App ID and geo data" ! getByAppidAndLatlng(items) ^
+      "getting items by App ID and itypes" ! getByAppidAndItypes(items) ^
       "getting items by IDs" ! getByIds(items) ^
       "getting items by IDs sorted by start time" ! getRecentByIds(items) ^
       "updating an item" ! update(items) ^
@@ -129,6 +130,68 @@
       (items.getByAppidAndLatlng(appid, (37.3229978, -122.0321823), Some(2.2), Some("mi")).toSeq must beEqualTo(Seq(dac, hsh, mvh)))
   }
 
+  def getByAppidAndItypes(items: Items) = {
+    val id = "getByAppidAndItypes"
+    val appid = 56
+    val dac = Item(
+      id = id + "dac",
+      appid = appid,
+      ct = DateTime.now,
+      itypes = List("type1", "type2"),
+      starttime = Some(DateTime.now.hour(14).minute(13)),
+      endtime = None,
+      price = Some(49.394),
+      profit = None,
+      latlng = Some((37.3197611, -122.0466141)),
+      inactive = None,
+      attributes = Some(Map("foo" -> "bar", "foo2" -> "bar2")))
+    val hsh = Item(
+      id = id + "hsh",
+      appid = appid,
+      ct = DateTime.now,
+      itypes = List("type1"),
+      starttime = Some(DateTime.now.hour(23).minute(13)),
+      endtime = None,
+      price = Some(49.394),
+      profit = None,
+      latlng = Some((37.3370801, -122.0493201)),
+      inactive = None,
+      attributes = None)
+    val mvh = Item(
+      id = id + "mvh",
+      appid = appid,
+      ct = DateTime.now,
+      itypes = List("type2", "type3"),
+      starttime = Some(DateTime.now.hour(17).minute(13)),
+      endtime = None,
+      price = Some(49.394),
+      profit = None,
+      latlng = Some((37.3154153, -122.0566829)),
+      inactive = None,
+      attributes = Some(Map("foo3" -> "bar3")))
+    val lbh = Item(
+      id = id + "lbh",
+      appid = appid,
+      ct = DateTime.now,
+      itypes = List("type4"),
+      starttime = Some(DateTime.now.hour(3).minute(13)),
+      endtime = None,
+      price = Some(49.394),
+      profit = None,
+      latlng = Some((37.2997029, -122.0034684)),
+      inactive = None,
+      attributes = Some(Map("foo4" -> "bar4", "foo5" -> "bar5")))
+
+    val allItems = Seq(dac, hsh, lbh, mvh)
+    allItems foreach { items.insert(_) }
+
+    (items.getByAppidAndItypes(appid, Seq("type1", "type2", "type3", "type4"))).toSeq must beEqualTo(Seq(dac, hsh, lbh, mvh)) and
+      ((items.getByAppidAndItypes(appid, Seq("type1"))).toSeq must beEqualTo(Seq(dac, hsh))) and
+      ((items.getByAppidAndItypes(appid, Seq("type2"))).toSeq must beEqualTo(Seq(dac, mvh))) and
+      ((items.getByAppidAndItypes(appid, Seq("type3", "type4"))).toSeq must beEqualTo(Seq(lbh, mvh)))
+
+  }
+
   def getByIds(items: Items) = {
     val id = "getByIds"
     val appid = 4
diff --git a/commons/src/test/scala/io/prediction/commons/appdata/U2IActionsSpec.scala b/commons/src/test/scala/io/prediction/commons/appdata/U2IActionsSpec.scala
index 42a8d5a..1edb562 100644
--- a/commons/src/test/scala/io/prediction/commons/appdata/U2IActionsSpec.scala
+++ b/commons/src/test/scala/io/prediction/commons/appdata/U2IActionsSpec.scala
@@ -22,6 +22,7 @@
     t ^
       "inserting and getting 3 U2IAction's" ! insert(u2iActions) ^
       "getting U2IActions by App ID, User ID, and Item IDs" ! getAllByAppidAndUidAndIids(u2iActions) ^
+      "getting U2IActions by App ID, Item" ! getAllByAppidAndIid(u2iActions) ^
       "delete U2IActions by appid" ! deleteByAppid(u2iActions) ^
       "count U2IActions by appid" ! countByAppid(u2iActions) ^
       bt
@@ -108,6 +109,78 @@
       (results(1) must beEqualTo(actions(2)))
   }
 
+  def getAllByAppidAndIid(u2iActions: U2IActions) = {
+    val appid = 109
+    val actions = List(U2IAction(
+      appid = appid,
+      action = u2iActions.rate,
+      uid = "dead",
+      iid = "meat",
+      t = DateTime.now,
+      latlng = None,
+      v = Some(3),
+      price = None
+    ), U2IAction(
+      appid = appid,
+      action = u2iActions.view,
+      uid = "dead",
+      iid = "creeper",
+      t = DateTime.now,
+      latlng = Some((94.3904, -29.4839)),
+      v = None,
+      price = None
+    ), U2IAction(
+      appid = appid,
+      action = u2iActions.like,
+      uid = "dead",
+      iid = "sub",
+      t = DateTime.now,
+      latlng = None,
+      v = Some(1),
+      price = Some(49.40)
+    ), U2IAction(
+      appid = appid,
+      action = u2iActions.rate,
+      uid = "dead2",
+      iid = "meat",
+      t = DateTime.now,
+      latlng = None,
+      v = Some(2),
+      price = None
+    ), U2IAction(
+      appid = appid,
+      action = u2iActions.rate,
+      uid = "dead3",
+      iid = "meat",
+      t = DateTime.now,
+      latlng = None,
+      v = Some(5),
+      price = None
+    ), U2IAction(
+      appid = appid,
+      action = u2iActions.rate,
+      uid = "dead4",
+      iid = "meat",
+      t = DateTime.now,
+      latlng = None,
+      v = Some(1),
+      price = None
+    ))
+    actions foreach { u2iActions.insert(_) }
+    val results = u2iActions.getAllByAppidAndIid(appid, "meat", sortedByUid = true).toList
+    val resultsNoSort = u2iActions.getAllByAppidAndIid(appid, "meat", sortedByUid = false).toList.sortWith((s, t) => s.uid < t.uid)
+
+    results.size must beEqualTo(4) and
+      (results(0) must beEqualTo(actions(0))) and
+      (results(1) must beEqualTo(actions(3))) and
+      (results(2) must beEqualTo(actions(4))) and
+      (results(3) must beEqualTo(actions(5))) and
+      (resultsNoSort(0) must beEqualTo(actions(0))) and
+      (resultsNoSort(1) must beEqualTo(actions(3))) and
+      (resultsNoSort(2) must beEqualTo(actions(4))) and
+      (resultsNoSort(3) must beEqualTo(actions(5)))
+  }
+
   def deleteByAppid(u2iActions: U2IActions) = {
     // insert a few u2iActions with appid1 and a few u2iActions with appid2.
     // delete all u2iActions of appid1.
diff --git a/dist/bin/backup b/dist/bin/backup
deleted file mode 100755
index a2e346c..0000000
--- a/dist/bin/backup
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/bin/bash
-
-###  ------------------------------- ###
-###  Helper methods for BASH scripts ###
-###  ------------------------------- ###
-
-realpath () {
-(
-  TARGET_FILE="$1"
-  CHECK_CYGWIN="$2"
-
-  cd $(dirname "$TARGET_FILE")
-  TARGET_FILE=$(basename "$TARGET_FILE")
-
-  COUNT=0
-  while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ]
-  do
-      TARGET_FILE=$(readlink "$TARGET_FILE")
-      cd $(dirname "$TARGET_FILE")
-      TARGET_FILE=$(basename "$TARGET_FILE")
-      COUNT=$(($COUNT + 1))
-  done
-
-  if [ "$TARGET_FILE" == "." -o "$TARGET_FILE" == ".." ]; then
-    cd "$TARGET_FILE"
-    TARGET_FILEPATH=
-  else
-    TARGET_FILEPATH=/$TARGET_FILE
-  fi
-
-  # make sure we grab the actual windows path, instead of cygwin's path.
-  if [[ "x$CHECK_CYGWIN" == "x" ]]; then
-    echo "$(pwd -P)/$TARGET_FILE"
-  else
-    echo $(cygwinpath "$(pwd -P)/$TARGET_FILE")
-  fi
-)
-}
-
-# TODO - Do we need to detect msys?
-
-# Uses uname to detect if we're in the odd cygwin environment.
-is_cygwin() {
-  local os=$(uname -s)
-  case "$os" in
-    CYGWIN*) return 0 ;;
-    *)  return 1 ;;
-  esac
-}
-
-# This can fix cygwin style /cygdrive paths so we get the
-# windows style paths.
-cygwinpath() {
-  local file="$1"
-  if is_cygwin; then
-    echo $(cygpath -w $file)
-  else
-    echo $file
-  fi
-}
-
-# Make something URI friendly
-make_url() {
-  url="$1"
-  local nospaces=${url// /%20}
-  if is_cygwin; then
-    echo "/${nospaces//\\//}"
-  else
-    echo "$nospaces"
-  fi
-}
-
-# This crazy function reads in a vanilla "linux" classpath string (only : are separators, and all /),
-# and returns a classpath with windows style paths, and ; separators.
-fixCygwinClasspath() {
-  OLDIFS=$IFS
-  IFS=":"
-  read -a classpath_members <<< "$1"
-  declare -a fixed_members
-  IFS=$OLDIFS
-  for i in "${!classpath_members[@]}"
-  do
-    fixed_members[i]=$(realpath "${classpath_members[i]}" "fix")
-  done
-  IFS=";"
-  echo "${fixed_members[*]}"
-  IFS=$OLDIFS
-}
-
-# Fix the classpath we use for cygwin.
-fix_classpath() {
-  cp="$1"
-  if is_cygwin; then
-    echo "$(fixCygwinClasspath "$cp")"
-  else
-    echo "$cp"
-  fi
-}
-# Detect if we should use JAVA_HOME or just try PATH.
-get_java_cmd() {
-  if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]];  then
-    echo "$JAVA_HOME/bin/java"
-  else
-    echo "java"
-  fi
-}
-
-echoerr () {
-  echo 1>&2 "$@"
-}
-vlog () {
-  [[ $verbose || $debug ]] && echoerr "$@"
-}
-dlog () {
-  [[ $debug ]] && echoerr "$@"
-}
-execRunner () {
-  # print the arguments one to a line, quoting any containing spaces
-  [[ $verbose || $debug ]] && echo "# Executing command line:" && {
-    for arg; do
-      if printf "%s\n" "$arg" | grep -q ' '; then
-        printf "\"%s\"\n" "$arg"
-      else
-        printf "%s\n" "$arg"
-      fi
-    done
-    echo ""
-  }
-
-  exec "$@"
-}
-addJava () {
-  dlog "[addJava] arg = '$1'"
-  java_args=( "${java_args[@]}" "$1" )
-}
-addApp () {
-  dlog "[addApp] arg = '$1'"
-  app_commands=( "${app_commands[@]}" "$1" )
-}
-addResidual () {
-  dlog "[residual] arg = '$1'"
-  residual_args=( "${residual_args[@]}" "$1" )
-}
-addDebugger () {
-  addJava "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"
-}
-# a ham-fisted attempt to move some memory settings in concert
-# so they need not be messed around with individually.
-get_mem_opts () {
-  local mem=${1:-1024}
-  local perm=$(( $mem / 4 ))
-  (( $perm > 256 )) || perm=256
-  (( $perm < 1024 )) || perm=1024
-  local codecache=$(( $perm / 2 ))
-
-  # if we detect any of these settings in ${java_opts} we need to NOT output our settings.
-  # The reason is the Xms/Xmx, if they don't line up, cause errors.
-  if [[ "${java_opts}" == *-Xmx* ]] || [[ "${java_opts}" == *-Xms* ]] || [[ "${java_opts}" == *-XX:MaxPermSize* ]] || [[ "${java_opts}" == *-XX:ReservedCodeCacheSize* ]]; then
-     echo ""
-  else
-    echo "-Xms${mem}m -Xmx${mem}m -XX:MaxPermSize=${perm}m -XX:ReservedCodeCacheSize=${codecache}m"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-is_function_defined() {
-  declare -f "$1" > /dev/null
-}
-
-# Attempt to detect if the script is running via a GUI or not
-# TODO - Determine where/how we use this generically
-detect_terminal_for_ui() {
-  [[ ! -t 0 ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-  # SPECIAL TEST FOR MAC
-  [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-}
-
-# Processes incoming arguments and places them in appropriate global variables.  called by the run method.
-process_args () {
-  while [[ $# -gt 0 ]]; do
-    case "$1" in
-       -h|-help) usage; exit 1 ;;
-    -v|-verbose) verbose=1 && shift ;;
-      -d|-debug) debug=1 && shift ;;
-
-           -mem) require_arg integer "$1" "$2" && app_mem="$2" && shift 2 ;;
-     -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;;
-
-     -java-home) require_arg path "$1" "$2" && java_cmd="$2/bin/java" && shift 2 ;;
-
-            -D*) addJava "$1" && shift ;;
-            -J*) addJava "${1:2}" && shift ;;
-              *) addResidual "$1" && shift ;;
-    esac
-  done
-
-  is_function_defined process_my_args && {
-    myargs=("${residual_args[@]}")
-    residual_args=()
-    process_my_args "${myargs[@]}"
-  }
-}
-
-# Actually runs the script.
-run() {
-  # TODO - check for sane environment
-
-  # process the combined args, then reset "$@" to the residuals
-  process_args "$@"
-  set -- "${residual_args[@]}"
-  argumentCount=$#
-
-  #check for jline terminal fixes on cygwin
-  if is_cygwin; then
-    stty -icanon min 1 -echo > /dev/null 2>&1
-    addJava "-Djline.terminal=jline.UnixTerminal"
-    addJava "-Dsbt.cygwin=true"
-  fi
-
-  # Now we check to see if there are any java opts on the environemnt. These get listed first, with the script able to override them.
-  if [[ "$JAVA_OPTS" != "" ]]; then
-    java_opts="${JAVA_OPTS}"
-  fi
-
-  # run sbt
-  execRunner "$java_cmd" \
-    $(get_mem_opts $app_mem) \
-    ${java_opts} \
-    ${java_args[@]} \
-    -cp "$(fix_classpath "$app_classpath")" \
-    $app_mainclass \
-    "${app_commands[@]}" \
-    "${residual_args[@]}"
-
-  local exit_code=$?
-  if is_cygwin; then
-    stty icanon echo > /dev/null 2>&1
-  fi
-  exit $exit_code
-}
-
-# Loads a configuration file full of default command line options for this script.
-loadConfigFile() {
-  cat "$1" | sed '/^\#/d'
-}
-
-###  ------------------------------- ###
-###  Start of customized settings    ###
-###  ------------------------------- ###
-usage() {
- cat <<EOM
-Usage: $script_name [options]
-
-  -h | -help         print this message
-  -v | -verbose      this runner is chattier
-  -d | -debug        set sbt log level to debug
-  -mem    <integer>  set memory options (default: $sbt_mem, which is $(get_mem_opts $sbt_mem))
-  -jvm-debug <port>  Turn on JVM debugging, open at the given port.
-
-  # java version (default: java from PATH, currently $(java -version 2>&1 | grep version))
-  -java-home <path>         alternate JAVA_HOME
-
-  # jvm options and output control
-  JAVA_OPTS          environment variable, if unset uses "$java_opts"
-  -Dkey=val          pass -Dkey=val directly to the java runtime
-  -J-X               pass option -X directly to the java runtime
-                     (-J is stripped)
-
-In the case of duplicated or conflicting options, the order above
-shows precedence: JAVA_OPTS lowest, command line options highest.
-EOM
-}
-
-###  ------------------------------- ###
-###  Main script                     ###
-###  ------------------------------- ###
-
-declare -a residual_args
-declare -a java_args
-declare -a app_commands
-declare -r real_script_path="$(realpath "$0")"
-declare -r app_home="$(realpath "$(dirname "$real_script_path")")"
-# TODO - Check whether this is ok in cygwin...
-declare -r lib_dir="$(realpath "${app_home}/../lib")"
-declare -r app_mainclass="io.prediction.tools.softwaremanager.Backup"
-
-declare -r app_classpath="$lib_dir/com.github.nscala-time.nscala-time_2.10-0.6.0.jar:$lib_dir/com.github.scopt.scopt_2.10-3.1.0.jar:$lib_dir/com.thoughtworks.paranamer.paranamer-2.6.jar:$lib_dir/com.typesafe.config-1.0.2.jar:$lib_dir/commons-io.commons-io-2.4.jar:$lib_dir/io.prediction.predictionio-commons-0.6.8.jar:$lib_dir/io.prediction.softwaremanager-0.6.8.jar:$lib_dir/joda-time.joda-time-2.3.jar:$lib_dir/org.joda.joda-convert-1.5.jar:$lib_dir/org.json4s.json4s-ast_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-core_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-ext_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-native_2.10-3.2.6.jar:$lib_dir/org.mongodb.casbah-commons_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-core_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-gridfs_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-query_2.10-2.6.2.jar:$lib_dir/org.mongodb.mongo-java-driver-2.11.2.jar:$lib_dir/org.scala-lang.scala-compiler-2.10.0.jar:$lib_dir/org.scala-lang.scala-library-2.10.2.jar:$lib_dir/org.scala-lang.scala-reflect-2.10.0.jar:$lib_dir/org.scala-lang.scalap-2.10.0.jar:$lib_dir/org.slf4j.slf4j-api-1.6.0.jar:$lib_dir/org.slf4j.slf4j-nop-1.6.0.jar"
-
-addJava "-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/.."
-declare -r java_cmd=$(get_java_cmd)
-
-# Now check to see if it's a good enough version
-# TODO - Check to see if we have a configured default java version, otherwise use 1.6
-declare -r java_version=$("$java_cmd" -version 2>&1 | awk -F '"' '/version/ {print $2}')
-if [[ "$java_version" == "" ]]; then
-  echo
-  echo No java installations was detected.
-  echo Please go to http://www.java.com/getjava/ and download
-  echo
-  exit 1
-elif [[ ! "$java_version" > "1.6" ]]; then
-  echo
-  echo The java installation you have is not up to date
-  echo $app_name requires at least version 1.6+, you have
-  echo version $java_version
-  echo
-  echo Please go to http://www.java.com/getjava/ and download
-  echo a valid Java Runtime and install before running $app_name.
-  echo
-  exit 1
-fi
-
-
-# if configuration files exist, prepend their contents to $@ so it can be processed by this runner
-[[ -f "$script_conf_file" ]] && set -- $(loadConfigFile "$script_conf_file") "$@"
-
-run "$@"
diff --git a/dist/bin/common.sh b/dist/bin/common.sh
index d22c3de..030b8eb 100644
--- a/dist/bin/common.sh
+++ b/dist/bin/common.sh
@@ -1,8 +1,8 @@
 # PredictionIO Shared Shell Code
 
-# This script should be sourced with $BASE set to the base of the repository
+# This script should be sourced with $BASE set to the base of the binary package
 
-VERSION=0.6.8
+VERSION=0.7.0
 
 # Play framework related
 PLAY_OPTS=""
diff --git a/dist/bin/restore b/dist/bin/restore
deleted file mode 100755
index aaed7c2..0000000
--- a/dist/bin/restore
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/bin/bash
-
-###  ------------------------------- ###
-###  Helper methods for BASH scripts ###
-###  ------------------------------- ###
-
-realpath () {
-(
-  TARGET_FILE="$1"
-  CHECK_CYGWIN="$2"
-
-  cd $(dirname "$TARGET_FILE")
-  TARGET_FILE=$(basename "$TARGET_FILE")
-
-  COUNT=0
-  while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ]
-  do
-      TARGET_FILE=$(readlink "$TARGET_FILE")
-      cd $(dirname "$TARGET_FILE")
-      TARGET_FILE=$(basename "$TARGET_FILE")
-      COUNT=$(($COUNT + 1))
-  done
-
-  if [ "$TARGET_FILE" == "." -o "$TARGET_FILE" == ".." ]; then
-    cd "$TARGET_FILE"
-    TARGET_FILEPATH=
-  else
-    TARGET_FILEPATH=/$TARGET_FILE
-  fi
-
-  # make sure we grab the actual windows path, instead of cygwin's path.
-  if [[ "x$CHECK_CYGWIN" == "x" ]]; then
-    echo "$(pwd -P)/$TARGET_FILE"
-  else
-    echo $(cygwinpath "$(pwd -P)/$TARGET_FILE")
-  fi
-)
-}
-
-# TODO - Do we need to detect msys?
-
-# Uses uname to detect if we're in the odd cygwin environment.
-is_cygwin() {
-  local os=$(uname -s)
-  case "$os" in
-    CYGWIN*) return 0 ;;
-    *)  return 1 ;;
-  esac
-}
-
-# This can fix cygwin style /cygdrive paths so we get the
-# windows style paths.
-cygwinpath() {
-  local file="$1"
-  if is_cygwin; then
-    echo $(cygpath -w $file)
-  else
-    echo $file
-  fi
-}
-
-# Make something URI friendly
-make_url() {
-  url="$1"
-  local nospaces=${url// /%20}
-  if is_cygwin; then
-    echo "/${nospaces//\\//}"
-  else
-    echo "$nospaces"
-  fi
-}
-
-# This crazy function reads in a vanilla "linux" classpath string (only : are separators, and all /),
-# and returns a classpath with windows style paths, and ; separators.
-fixCygwinClasspath() {
-  OLDIFS=$IFS
-  IFS=":"
-  read -a classpath_members <<< "$1"
-  declare -a fixed_members
-  IFS=$OLDIFS
-  for i in "${!classpath_members[@]}"
-  do
-    fixed_members[i]=$(realpath "${classpath_members[i]}" "fix")
-  done
-  IFS=";"
-  echo "${fixed_members[*]}"
-  IFS=$OLDIFS
-}
-
-# Fix the classpath we use for cygwin.
-fix_classpath() {
-  cp="$1"
-  if is_cygwin; then
-    echo "$(fixCygwinClasspath "$cp")"
-  else
-    echo "$cp"
-  fi
-}
-# Detect if we should use JAVA_HOME or just try PATH.
-get_java_cmd() {
-  if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]];  then
-    echo "$JAVA_HOME/bin/java"
-  else
-    echo "java"
-  fi
-}
-
-echoerr () {
-  echo 1>&2 "$@"
-}
-vlog () {
-  [[ $verbose || $debug ]] && echoerr "$@"
-}
-dlog () {
-  [[ $debug ]] && echoerr "$@"
-}
-execRunner () {
-  # print the arguments one to a line, quoting any containing spaces
-  [[ $verbose || $debug ]] && echo "# Executing command line:" && {
-    for arg; do
-      if printf "%s\n" "$arg" | grep -q ' '; then
-        printf "\"%s\"\n" "$arg"
-      else
-        printf "%s\n" "$arg"
-      fi
-    done
-    echo ""
-  }
-
-  exec "$@"
-}
-addJava () {
-  dlog "[addJava] arg = '$1'"
-  java_args=( "${java_args[@]}" "$1" )
-}
-addApp () {
-  dlog "[addApp] arg = '$1'"
-  app_commands=( "${app_commands[@]}" "$1" )
-}
-addResidual () {
-  dlog "[residual] arg = '$1'"
-  residual_args=( "${residual_args[@]}" "$1" )
-}
-addDebugger () {
-  addJava "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"
-}
-# a ham-fisted attempt to move some memory settings in concert
-# so they need not be messed around with individually.
-get_mem_opts () {
-  local mem=${1:-1024}
-  local perm=$(( $mem / 4 ))
-  (( $perm > 256 )) || perm=256
-  (( $perm < 1024 )) || perm=1024
-  local codecache=$(( $perm / 2 ))
-
-  # if we detect any of these settings in ${java_opts} we need to NOT output our settings.
-  # The reason is the Xms/Xmx, if they don't line up, cause errors.
-  if [[ "${java_opts}" == *-Xmx* ]] || [[ "${java_opts}" == *-Xms* ]] || [[ "${java_opts}" == *-XX:MaxPermSize* ]] || [[ "${java_opts}" == *-XX:ReservedCodeCacheSize* ]]; then
-     echo ""
-  else
-    echo "-Xms${mem}m -Xmx${mem}m -XX:MaxPermSize=${perm}m -XX:ReservedCodeCacheSize=${codecache}m"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-is_function_defined() {
-  declare -f "$1" > /dev/null
-}
-
-# Attempt to detect if the script is running via a GUI or not
-# TODO - Determine where/how we use this generically
-detect_terminal_for_ui() {
-  [[ ! -t 0 ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-  # SPECIAL TEST FOR MAC
-  [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-}
-
-# Processes incoming arguments and places them in appropriate global variables.  called by the run method.
-process_args () {
-  while [[ $# -gt 0 ]]; do
-    case "$1" in
-       -h|-help) usage; exit 1 ;;
-    -v|-verbose) verbose=1 && shift ;;
-      -d|-debug) debug=1 && shift ;;
-
-           -mem) require_arg integer "$1" "$2" && app_mem="$2" && shift 2 ;;
-     -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;;
-
-     -java-home) require_arg path "$1" "$2" && java_cmd="$2/bin/java" && shift 2 ;;
-
-            -D*) addJava "$1" && shift ;;
-            -J*) addJava "${1:2}" && shift ;;
-              *) addResidual "$1" && shift ;;
-    esac
-  done
-
-  is_function_defined process_my_args && {
-    myargs=("${residual_args[@]}")
-    residual_args=()
-    process_my_args "${myargs[@]}"
-  }
-}
-
-# Actually runs the script.
-run() {
-  # TODO - check for sane environment
-
-  # process the combined args, then reset "$@" to the residuals
-  process_args "$@"
-  set -- "${residual_args[@]}"
-  argumentCount=$#
-
-  #check for jline terminal fixes on cygwin
-  if is_cygwin; then
-    stty -icanon min 1 -echo > /dev/null 2>&1
-    addJava "-Djline.terminal=jline.UnixTerminal"
-    addJava "-Dsbt.cygwin=true"
-  fi
-
-  # Now we check to see if there are any java opts on the environemnt. These get listed first, with the script able to override them.
-  if [[ "$JAVA_OPTS" != "" ]]; then
-    java_opts="${JAVA_OPTS}"
-  fi
-
-  # run sbt
-  execRunner "$java_cmd" \
-    $(get_mem_opts $app_mem) \
-    ${java_opts} \
-    ${java_args[@]} \
-    -cp "$(fix_classpath "$app_classpath")" \
-    $app_mainclass \
-    "${app_commands[@]}" \
-    "${residual_args[@]}"
-
-  local exit_code=$?
-  if is_cygwin; then
-    stty icanon echo > /dev/null 2>&1
-  fi
-  exit $exit_code
-}
-
-# Loads a configuration file full of default command line options for this script.
-loadConfigFile() {
-  cat "$1" | sed '/^\#/d'
-}
-
-###  ------------------------------- ###
-###  Start of customized settings    ###
-###  ------------------------------- ###
-usage() {
- cat <<EOM
-Usage: $script_name [options]
-
-  -h | -help         print this message
-  -v | -verbose      this runner is chattier
-  -d | -debug        set sbt log level to debug
-  -mem    <integer>  set memory options (default: $sbt_mem, which is $(get_mem_opts $sbt_mem))
-  -jvm-debug <port>  Turn on JVM debugging, open at the given port.
-
-  # java version (default: java from PATH, currently $(java -version 2>&1 | grep version))
-  -java-home <path>         alternate JAVA_HOME
-
-  # jvm options and output control
-  JAVA_OPTS          environment variable, if unset uses "$java_opts"
-  -Dkey=val          pass -Dkey=val directly to the java runtime
-  -J-X               pass option -X directly to the java runtime
-                     (-J is stripped)
-
-In the case of duplicated or conflicting options, the order above
-shows precedence: JAVA_OPTS lowest, command line options highest.
-EOM
-}
-
-###  ------------------------------- ###
-###  Main script                     ###
-###  ------------------------------- ###
-
-declare -a residual_args
-declare -a java_args
-declare -a app_commands
-declare -r real_script_path="$(realpath "$0")"
-declare -r app_home="$(realpath "$(dirname "$real_script_path")")"
-# TODO - Check whether this is ok in cygwin...
-declare -r lib_dir="$(realpath "${app_home}/../lib")"
-declare -r app_mainclass="io.prediction.tools.softwaremanager.Restore"
-
-declare -r app_classpath="$lib_dir/com.github.nscala-time.nscala-time_2.10-0.6.0.jar:$lib_dir/com.github.scopt.scopt_2.10-3.1.0.jar:$lib_dir/com.thoughtworks.paranamer.paranamer-2.6.jar:$lib_dir/com.typesafe.config-1.0.2.jar:$lib_dir/commons-io.commons-io-2.4.jar:$lib_dir/io.prediction.predictionio-commons-0.6.8.jar:$lib_dir/io.prediction.softwaremanager-0.6.8.jar:$lib_dir/joda-time.joda-time-2.3.jar:$lib_dir/org.joda.joda-convert-1.5.jar:$lib_dir/org.json4s.json4s-ast_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-core_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-ext_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-native_2.10-3.2.6.jar:$lib_dir/org.mongodb.casbah-commons_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-core_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-gridfs_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-query_2.10-2.6.2.jar:$lib_dir/org.mongodb.mongo-java-driver-2.11.2.jar:$lib_dir/org.scala-lang.scala-compiler-2.10.0.jar:$lib_dir/org.scala-lang.scala-library-2.10.2.jar:$lib_dir/org.scala-lang.scala-reflect-2.10.0.jar:$lib_dir/org.scala-lang.scalap-2.10.0.jar:$lib_dir/org.slf4j.slf4j-api-1.6.0.jar:$lib_dir/org.slf4j.slf4j-nop-1.6.0.jar"
-
-addJava "-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/.."
-declare -r java_cmd=$(get_java_cmd)
-
-# Now check to see if it's a good enough version
-# TODO - Check to see if we have a configured default java version, otherwise use 1.6
-declare -r java_version=$("$java_cmd" -version 2>&1 | awk -F '"' '/version/ {print $2}')
-if [[ "$java_version" == "" ]]; then
-  echo
-  echo No java installations was detected.
-  echo Please go to http://www.java.com/getjava/ and download
-  echo
-  exit 1
-elif [[ ! "$java_version" > "1.6" ]]; then
-  echo
-  echo The java installation you have is not up to date
-  echo $app_name requires at least version 1.6+, you have
-  echo version $java_version
-  echo
-  echo Please go to http://www.java.com/getjava/ and download
-  echo a valid Java Runtime and install before running $app_name.
-  echo
-  exit 1
-fi
-
-
-# if configuration files exist, prepend their contents to $@ so it can be processed by this runner
-[[ -f "$script_conf_file" ]] && set -- $(loadConfigFile "$script_conf_file") "$@"
-
-run "$@"
diff --git a/dist/bin/setup-vendors.sh b/dist/bin/setup-vendors.sh
index 64cfba0..a9fc9f9 100755
--- a/dist/bin/setup-vendors.sh
+++ b/dist/bin/setup-vendors.sh
@@ -17,6 +17,14 @@
 . "$BASE/bin/vendors.sh"
 
 # Detect existing installations in search path
+if vendor_graphchi_exists ; then
+	echo "Found GraphChi C++ Collaborative Filtering Toolkit in vendors area. Assuming it has been installed."
+else
+	echo "Cannot find GraphChi C++ Collaborative Filtering Toolkit from vendors area. Installing it from the Internet."
+	install_graphchi "$VENDORS_PATH"
+fi
+
+# Detect existing installations in search path
 if [ $(process_exists "mongod") -gt "0" ] ; then
 	echo "mongod is running. Skipping MongoDB installation."
 elif command_exists "mongod" ; then
diff --git a/dist/bin/updatecheck b/dist/bin/updatecheck
deleted file mode 100755
index 5ac4e5d..0000000
--- a/dist/bin/updatecheck
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/bin/bash
-
-###  ------------------------------- ###
-###  Helper methods for BASH scripts ###
-###  ------------------------------- ###
-
-realpath () {
-(
-  TARGET_FILE="$1"
-  CHECK_CYGWIN="$2"
-
-  cd $(dirname "$TARGET_FILE")
-  TARGET_FILE=$(basename "$TARGET_FILE")
-
-  COUNT=0
-  while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ]
-  do
-      TARGET_FILE=$(readlink "$TARGET_FILE")
-      cd $(dirname "$TARGET_FILE")
-      TARGET_FILE=$(basename "$TARGET_FILE")
-      COUNT=$(($COUNT + 1))
-  done
-
-  if [ "$TARGET_FILE" == "." -o "$TARGET_FILE" == ".." ]; then
-    cd "$TARGET_FILE"
-    TARGET_FILEPATH=
-  else
-    TARGET_FILEPATH=/$TARGET_FILE
-  fi
-
-  # make sure we grab the actual windows path, instead of cygwin's path.
-  if [[ "x$CHECK_CYGWIN" == "x" ]]; then
-    echo "$(pwd -P)/$TARGET_FILE"
-  else
-    echo $(cygwinpath "$(pwd -P)/$TARGET_FILE")
-  fi
-)
-}
-
-# TODO - Do we need to detect msys?
-
-# Uses uname to detect if we're in the odd cygwin environment.
-is_cygwin() {
-  local os=$(uname -s)
-  case "$os" in
-    CYGWIN*) return 0 ;;
-    *)  return 1 ;;
-  esac
-}
-
-# This can fix cygwin style /cygdrive paths so we get the
-# windows style paths.
-cygwinpath() {
-  local file="$1"
-  if is_cygwin; then
-    echo $(cygpath -w $file)
-  else
-    echo $file
-  fi
-}
-
-# Make something URI friendly
-make_url() {
-  url="$1"
-  local nospaces=${url// /%20}
-  if is_cygwin; then
-    echo "/${nospaces//\\//}"
-  else
-    echo "$nospaces"
-  fi
-}
-
-# This crazy function reads in a vanilla "linux" classpath string (only : are separators, and all /),
-# and returns a classpath with windows style paths, and ; separators.
-fixCygwinClasspath() {
-  OLDIFS=$IFS
-  IFS=":"
-  read -a classpath_members <<< "$1"
-  declare -a fixed_members
-  IFS=$OLDIFS
-  for i in "${!classpath_members[@]}"
-  do
-    fixed_members[i]=$(realpath "${classpath_members[i]}" "fix")
-  done
-  IFS=";"
-  echo "${fixed_members[*]}"
-  IFS=$OLDIFS
-}
-
-# Fix the classpath we use for cygwin.
-fix_classpath() {
-  cp="$1"
-  if is_cygwin; then
-    echo "$(fixCygwinClasspath "$cp")"
-  else
-    echo "$cp"
-  fi
-}
-# Detect if we should use JAVA_HOME or just try PATH.
-get_java_cmd() {
-  if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]];  then
-    echo "$JAVA_HOME/bin/java"
-  else
-    echo "java"
-  fi
-}
-
-echoerr () {
-  echo 1>&2 "$@"
-}
-vlog () {
-  [[ $verbose || $debug ]] && echoerr "$@"
-}
-dlog () {
-  [[ $debug ]] && echoerr "$@"
-}
-execRunner () {
-  # print the arguments one to a line, quoting any containing spaces
-  [[ $verbose || $debug ]] && echo "# Executing command line:" && {
-    for arg; do
-      if printf "%s\n" "$arg" | grep -q ' '; then
-        printf "\"%s\"\n" "$arg"
-      else
-        printf "%s\n" "$arg"
-      fi
-    done
-    echo ""
-  }
-
-  exec "$@"
-}
-addJava () {
-  dlog "[addJava] arg = '$1'"
-  java_args=( "${java_args[@]}" "$1" )
-}
-addApp () {
-  dlog "[addApp] arg = '$1'"
-  app_commands=( "${app_commands[@]}" "$1" )
-}
-addResidual () {
-  dlog "[residual] arg = '$1'"
-  residual_args=( "${residual_args[@]}" "$1" )
-}
-addDebugger () {
-  addJava "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"
-}
-# a ham-fisted attempt to move some memory settings in concert
-# so they need not be messed around with individually.
-get_mem_opts () {
-  local mem=${1:-1024}
-  local perm=$(( $mem / 4 ))
-  (( $perm > 256 )) || perm=256
-  (( $perm < 1024 )) || perm=1024
-  local codecache=$(( $perm / 2 ))
-
-  # if we detect any of these settings in ${java_opts} we need to NOT output our settings.
-  # The reason is the Xms/Xmx, if they don't line up, cause errors.
-  if [[ "${java_opts}" == *-Xmx* ]] || [[ "${java_opts}" == *-Xms* ]] || [[ "${java_opts}" == *-XX:MaxPermSize* ]] || [[ "${java_opts}" == *-XX:ReservedCodeCacheSize* ]]; then
-     echo ""
-  else
-    echo "-Xms${mem}m -Xmx${mem}m -XX:MaxPermSize=${perm}m -XX:ReservedCodeCacheSize=${codecache}m"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-is_function_defined() {
-  declare -f "$1" > /dev/null
-}
-
-# Attempt to detect if the script is running via a GUI or not
-# TODO - Determine where/how we use this generically
-detect_terminal_for_ui() {
-  [[ ! -t 0 ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-  # SPECIAL TEST FOR MAC
-  [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-}
-
-# Processes incoming arguments and places them in appropriate global variables.  called by the run method.
-process_args () {
-  while [[ $# -gt 0 ]]; do
-    case "$1" in
-       -h|-help) usage; exit 1 ;;
-    -v|-verbose) verbose=1 && shift ;;
-      -d|-debug) debug=1 && shift ;;
-
-           -mem) require_arg integer "$1" "$2" && app_mem="$2" && shift 2 ;;
-     -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;;
-
-     -java-home) require_arg path "$1" "$2" && java_cmd="$2/bin/java" && shift 2 ;;
-
-            -D*) addJava "$1" && shift ;;
-            -J*) addJava "${1:2}" && shift ;;
-              *) addResidual "$1" && shift ;;
-    esac
-  done
-
-  is_function_defined process_my_args && {
-    myargs=("${residual_args[@]}")
-    residual_args=()
-    process_my_args "${myargs[@]}"
-  }
-}
-
-# Actually runs the script.
-run() {
-  # TODO - check for sane environment
-
-  # process the combined args, then reset "$@" to the residuals
-  process_args "$@"
-  set -- "${residual_args[@]}"
-  argumentCount=$#
-
-  #check for jline terminal fixes on cygwin
-  if is_cygwin; then
-    stty -icanon min 1 -echo > /dev/null 2>&1
-    addJava "-Djline.terminal=jline.UnixTerminal"
-    addJava "-Dsbt.cygwin=true"
-  fi
-
-  # Now we check to see if there are any java opts on the environemnt. These get listed first, with the script able to override them.
-  if [[ "$JAVA_OPTS" != "" ]]; then
-    java_opts="${JAVA_OPTS}"
-  fi
-
-  # run sbt
-  execRunner "$java_cmd" \
-    $(get_mem_opts $app_mem) \
-    ${java_opts} \
-    ${java_args[@]} \
-    -cp "$(fix_classpath "$app_classpath")" \
-    $app_mainclass \
-    "${app_commands[@]}" \
-    "${residual_args[@]}"
-
-  local exit_code=$?
-  if is_cygwin; then
-    stty icanon echo > /dev/null 2>&1
-  fi
-  exit $exit_code
-}
-
-# Loads a configuration file full of default command line options for this script.
-loadConfigFile() {
-  cat "$1" | sed '/^\#/d'
-}
-
-###  ------------------------------- ###
-###  Start of customized settings    ###
-###  ------------------------------- ###
-usage() {
- cat <<EOM
-Usage: $script_name [options]
-
-  -h | -help         print this message
-  -v | -verbose      this runner is chattier
-  -d | -debug        set sbt log level to debug
-  -mem    <integer>  set memory options (default: $sbt_mem, which is $(get_mem_opts $sbt_mem))
-  -jvm-debug <port>  Turn on JVM debugging, open at the given port.
-
-  # java version (default: java from PATH, currently $(java -version 2>&1 | grep version))
-  -java-home <path>         alternate JAVA_HOME
-
-  # jvm options and output control
-  JAVA_OPTS          environment variable, if unset uses "$java_opts"
-  -Dkey=val          pass -Dkey=val directly to the java runtime
-  -J-X               pass option -X directly to the java runtime
-                     (-J is stripped)
-
-In the case of duplicated or conflicting options, the order above
-shows precedence: JAVA_OPTS lowest, command line options highest.
-EOM
-}
-
-###  ------------------------------- ###
-###  Main script                     ###
-###  ------------------------------- ###
-
-declare -a residual_args
-declare -a java_args
-declare -a app_commands
-declare -r real_script_path="$(realpath "$0")"
-declare -r app_home="$(realpath "$(dirname "$real_script_path")")"
-# TODO - Check whether this is ok in cygwin...
-declare -r lib_dir="$(realpath "${app_home}/../lib")"
-declare -r app_mainclass="io.prediction.tools.softwaremanager.UpdateCheck"
-
-declare -r app_classpath="$lib_dir/com.github.nscala-time.nscala-time_2.10-0.6.0.jar:$lib_dir/com.github.scopt.scopt_2.10-3.1.0.jar:$lib_dir/com.thoughtworks.paranamer.paranamer-2.6.jar:$lib_dir/com.typesafe.config-1.0.2.jar:$lib_dir/commons-io.commons-io-2.4.jar:$lib_dir/io.prediction.predictionio-commons-0.6.8.jar:$lib_dir/io.prediction.softwaremanager-0.6.8.jar:$lib_dir/joda-time.joda-time-2.3.jar:$lib_dir/org.joda.joda-convert-1.5.jar:$lib_dir/org.json4s.json4s-ast_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-core_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-ext_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-native_2.10-3.2.6.jar:$lib_dir/org.mongodb.casbah-commons_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-core_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-gridfs_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-query_2.10-2.6.2.jar:$lib_dir/org.mongodb.mongo-java-driver-2.11.2.jar:$lib_dir/org.scala-lang.scala-compiler-2.10.0.jar:$lib_dir/org.scala-lang.scala-library-2.10.2.jar:$lib_dir/org.scala-lang.scala-reflect-2.10.0.jar:$lib_dir/org.scala-lang.scalap-2.10.0.jar:$lib_dir/org.slf4j.slf4j-api-1.6.0.jar:$lib_dir/org.slf4j.slf4j-nop-1.6.0.jar"
-
-addJava "-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/.."
-declare -r java_cmd=$(get_java_cmd)
-
-# Now check to see if it's a good enough version
-# TODO - Check to see if we have a configured default java version, otherwise use 1.6
-declare -r java_version=$("$java_cmd" -version 2>&1 | awk -F '"' '/version/ {print $2}')
-if [[ "$java_version" == "" ]]; then
-  echo
-  echo No java installations was detected.
-  echo Please go to http://www.java.com/getjava/ and download
-  echo
-  exit 1
-elif [[ ! "$java_version" > "1.6" ]]; then
-  echo
-  echo The java installation you have is not up to date
-  echo $app_name requires at least version 1.6+, you have
-  echo version $java_version
-  echo
-  echo Please go to http://www.java.com/getjava/ and download
-  echo a valid Java Runtime and install before running $app_name.
-  echo
-  exit 1
-fi
-
-
-# if configuration files exist, prepend their contents to $@ so it can be processed by this runner
-[[ -f "$script_conf_file" ]] && set -- $(loadConfigFile "$script_conf_file") "$@"
-
-run "$@"
diff --git a/dist/bin/upgrade b/dist/bin/upgrade
deleted file mode 100755
index 2ca33ca..0000000
--- a/dist/bin/upgrade
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/bin/bash
-
-###  ------------------------------- ###
-###  Helper methods for BASH scripts ###
-###  ------------------------------- ###
-
-realpath () {
-(
-  TARGET_FILE="$1"
-  CHECK_CYGWIN="$2"
-
-  cd $(dirname "$TARGET_FILE")
-  TARGET_FILE=$(basename "$TARGET_FILE")
-
-  COUNT=0
-  while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ]
-  do
-      TARGET_FILE=$(readlink "$TARGET_FILE")
-      cd $(dirname "$TARGET_FILE")
-      TARGET_FILE=$(basename "$TARGET_FILE")
-      COUNT=$(($COUNT + 1))
-  done
-
-  if [ "$TARGET_FILE" == "." -o "$TARGET_FILE" == ".." ]; then
-    cd "$TARGET_FILE"
-    TARGET_FILEPATH=
-  else
-    TARGET_FILEPATH=/$TARGET_FILE
-  fi
-
-  # make sure we grab the actual windows path, instead of cygwin's path.
-  if [[ "x$CHECK_CYGWIN" == "x" ]]; then
-    echo "$(pwd -P)/$TARGET_FILE"
-  else
-    echo $(cygwinpath "$(pwd -P)/$TARGET_FILE")
-  fi
-)
-}
-
-# TODO - Do we need to detect msys?
-
-# Uses uname to detect if we're in the odd cygwin environment.
-is_cygwin() {
-  local os=$(uname -s)
-  case "$os" in
-    CYGWIN*) return 0 ;;
-    *)  return 1 ;;
-  esac
-}
-
-# This can fix cygwin style /cygdrive paths so we get the
-# windows style paths.
-cygwinpath() {
-  local file="$1"
-  if is_cygwin; then
-    echo $(cygpath -w $file)
-  else
-    echo $file
-  fi
-}
-
-# Make something URI friendly
-make_url() {
-  url="$1"
-  local nospaces=${url// /%20}
-  if is_cygwin; then
-    echo "/${nospaces//\\//}"
-  else
-    echo "$nospaces"
-  fi
-}
-
-# This crazy function reads in a vanilla "linux" classpath string (only : are separators, and all /),
-# and returns a classpath with windows style paths, and ; separators.
-fixCygwinClasspath() {
-  OLDIFS=$IFS
-  IFS=":"
-  read -a classpath_members <<< "$1"
-  declare -a fixed_members
-  IFS=$OLDIFS
-  for i in "${!classpath_members[@]}"
-  do
-    fixed_members[i]=$(realpath "${classpath_members[i]}" "fix")
-  done
-  IFS=";"
-  echo "${fixed_members[*]}"
-  IFS=$OLDIFS
-}
-
-# Fix the classpath we use for cygwin.
-fix_classpath() {
-  cp="$1"
-  if is_cygwin; then
-    echo "$(fixCygwinClasspath "$cp")"
-  else
-    echo "$cp"
-  fi
-}
-# Detect if we should use JAVA_HOME or just try PATH.
-get_java_cmd() {
-  if [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]];  then
-    echo "$JAVA_HOME/bin/java"
-  else
-    echo "java"
-  fi
-}
-
-echoerr () {
-  echo 1>&2 "$@"
-}
-vlog () {
-  [[ $verbose || $debug ]] && echoerr "$@"
-}
-dlog () {
-  [[ $debug ]] && echoerr "$@"
-}
-execRunner () {
-  # print the arguments one to a line, quoting any containing spaces
-  [[ $verbose || $debug ]] && echo "# Executing command line:" && {
-    for arg; do
-      if printf "%s\n" "$arg" | grep -q ' '; then
-        printf "\"%s\"\n" "$arg"
-      else
-        printf "%s\n" "$arg"
-      fi
-    done
-    echo ""
-  }
-
-  exec "$@"
-}
-addJava () {
-  dlog "[addJava] arg = '$1'"
-  java_args=( "${java_args[@]}" "$1" )
-}
-addApp () {
-  dlog "[addApp] arg = '$1'"
-  app_commands=( "${app_commands[@]}" "$1" )
-}
-addResidual () {
-  dlog "[residual] arg = '$1'"
-  residual_args=( "${residual_args[@]}" "$1" )
-}
-addDebugger () {
-  addJava "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"
-}
-# a ham-fisted attempt to move some memory settings in concert
-# so they need not be messed around with individually.
-get_mem_opts () {
-  local mem=${1:-1024}
-  local perm=$(( $mem / 4 ))
-  (( $perm > 256 )) || perm=256
-  (( $perm < 1024 )) || perm=1024
-  local codecache=$(( $perm / 2 ))
-
-  # if we detect any of these settings in ${java_opts} we need to NOT output our settings.
-  # The reason is the Xms/Xmx, if they don't line up, cause errors.
-  if [[ "${java_opts}" == *-Xmx* ]] || [[ "${java_opts}" == *-Xms* ]] || [[ "${java_opts}" == *-XX:MaxPermSize* ]] || [[ "${java_opts}" == *-XX:ReservedCodeCacheSize* ]]; then
-     echo ""
-  else
-    echo "-Xms${mem}m -Xmx${mem}m -XX:MaxPermSize=${perm}m -XX:ReservedCodeCacheSize=${codecache}m"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-require_arg () {
-  local type="$1"
-  local opt="$2"
-  local arg="$3"
-  if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
-    die "$opt requires <$type> argument"
-  fi
-}
-is_function_defined() {
-  declare -f "$1" > /dev/null
-}
-
-# Attempt to detect if the script is running via a GUI or not
-# TODO - Determine where/how we use this generically
-detect_terminal_for_ui() {
-  [[ ! -t 0 ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-  # SPECIAL TEST FOR MAC
-  [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]] && [[ "${#residual_args}" == "0" ]] && {
-    echo "true"
-  }
-}
-
-# Processes incoming arguments and places them in appropriate global variables.  called by the run method.
-process_args () {
-  while [[ $# -gt 0 ]]; do
-    case "$1" in
-       -h|-help) usage; exit 1 ;;
-    -v|-verbose) verbose=1 && shift ;;
-      -d|-debug) debug=1 && shift ;;
-
-           -mem) require_arg integer "$1" "$2" && app_mem="$2" && shift 2 ;;
-     -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;;
-
-     -java-home) require_arg path "$1" "$2" && java_cmd="$2/bin/java" && shift 2 ;;
-
-            -D*) addJava "$1" && shift ;;
-            -J*) addJava "${1:2}" && shift ;;
-              *) addResidual "$1" && shift ;;
-    esac
-  done
-
-  is_function_defined process_my_args && {
-    myargs=("${residual_args[@]}")
-    residual_args=()
-    process_my_args "${myargs[@]}"
-  }
-}
-
-# Actually runs the script.
-run() {
-  # TODO - check for sane environment
-
-  # process the combined args, then reset "$@" to the residuals
-  process_args "$@"
-  set -- "${residual_args[@]}"
-  argumentCount=$#
-
-  #check for jline terminal fixes on cygwin
-  if is_cygwin; then
-    stty -icanon min 1 -echo > /dev/null 2>&1
-    addJava "-Djline.terminal=jline.UnixTerminal"
-    addJava "-Dsbt.cygwin=true"
-  fi
-
-  # Now we check to see if there are any java opts on the environemnt. These get listed first, with the script able to override them.
-  if [[ "$JAVA_OPTS" != "" ]]; then
-    java_opts="${JAVA_OPTS}"
-  fi
-
-  # run sbt
-  execRunner "$java_cmd" \
-    $(get_mem_opts $app_mem) \
-    ${java_opts} \
-    ${java_args[@]} \
-    -cp "$(fix_classpath "$app_classpath")" \
-    $app_mainclass \
-    "${app_commands[@]}" \
-    "${residual_args[@]}"
-
-  local exit_code=$?
-  if is_cygwin; then
-    stty icanon echo > /dev/null 2>&1
-  fi
-  exit $exit_code
-}
-
-# Loads a configuration file full of default command line options for this script.
-loadConfigFile() {
-  cat "$1" | sed '/^\#/d'
-}
-
-###  ------------------------------- ###
-###  Start of customized settings    ###
-###  ------------------------------- ###
-usage() {
- cat <<EOM
-Usage: $script_name [options]
-
-  -h | -help         print this message
-  -v | -verbose      this runner is chattier
-  -d | -debug        set sbt log level to debug
-  -mem    <integer>  set memory options (default: $sbt_mem, which is $(get_mem_opts $sbt_mem))
-  -jvm-debug <port>  Turn on JVM debugging, open at the given port.
-
-  # java version (default: java from PATH, currently $(java -version 2>&1 | grep version))
-  -java-home <path>         alternate JAVA_HOME
-
-  # jvm options and output control
-  JAVA_OPTS          environment variable, if unset uses "$java_opts"
-  -Dkey=val          pass -Dkey=val directly to the java runtime
-  -J-X               pass option -X directly to the java runtime
-                     (-J is stripped)
-
-In the case of duplicated or conflicting options, the order above
-shows precedence: JAVA_OPTS lowest, command line options highest.
-EOM
-}
-
-###  ------------------------------- ###
-###  Main script                     ###
-###  ------------------------------- ###
-
-declare -a residual_args
-declare -a java_args
-declare -a app_commands
-declare -r real_script_path="$(realpath "$0")"
-declare -r app_home="$(realpath "$(dirname "$real_script_path")")"
-# TODO - Check whether this is ok in cygwin...
-declare -r lib_dir="$(realpath "${app_home}/../lib")"
-declare -r app_mainclass="io.prediction.tools.softwaremanager.Upgrade"
-
-declare -r app_classpath="$lib_dir/com.github.nscala-time.nscala-time_2.10-0.6.0.jar:$lib_dir/com.github.scopt.scopt_2.10-3.1.0.jar:$lib_dir/com.thoughtworks.paranamer.paranamer-2.6.jar:$lib_dir/com.typesafe.config-1.0.2.jar:$lib_dir/commons-io.commons-io-2.4.jar:$lib_dir/io.prediction.predictionio-commons-0.6.8.jar:$lib_dir/io.prediction.softwaremanager-0.6.8.jar:$lib_dir/joda-time.joda-time-2.3.jar:$lib_dir/org.joda.joda-convert-1.5.jar:$lib_dir/org.json4s.json4s-ast_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-core_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-ext_2.10-3.2.6.jar:$lib_dir/org.json4s.json4s-native_2.10-3.2.6.jar:$lib_dir/org.mongodb.casbah-commons_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-core_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-gridfs_2.10-2.6.2.jar:$lib_dir/org.mongodb.casbah-query_2.10-2.6.2.jar:$lib_dir/org.mongodb.mongo-java-driver-2.11.2.jar:$lib_dir/org.scala-lang.scala-compiler-2.10.0.jar:$lib_dir/org.scala-lang.scala-library-2.10.2.jar:$lib_dir/org.scala-lang.scala-reflect-2.10.0.jar:$lib_dir/org.scala-lang.scalap-2.10.0.jar:$lib_dir/org.slf4j.slf4j-api-1.6.0.jar:$lib_dir/org.slf4j.slf4j-nop-1.6.0.jar"
-
-addJava "-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/.."
-declare -r java_cmd=$(get_java_cmd)
-
-# Now check to see if it's a good enough version
-# TODO - Check to see if we have a configured default java version, otherwise use 1.6
-declare -r java_version=$("$java_cmd" -version 2>&1 | awk -F '"' '/version/ {print $2}')
-if [[ "$java_version" == "" ]]; then
-  echo
-  echo No java installations was detected.
-  echo Please go to http://www.java.com/getjava/ and download
-  echo
-  exit 1
-elif [[ ! "$java_version" > "1.6" ]]; then
-  echo
-  echo The java installation you have is not up to date
-  echo $app_name requires at least version 1.6+, you have
-  echo version $java_version
-  echo
-  echo Please go to http://www.java.com/getjava/ and download
-  echo a valid Java Runtime and install before running $app_name.
-  echo
-  exit 1
-fi
-
-
-# if configuration files exist, prepend their contents to $@ so it can be processed by this runner
-[[ -f "$script_conf_file" ]] && set -- $(loadConfigFile "$script_conf_file") "$@"
-
-run "$@"
diff --git a/dist/bin/vendors.sh b/dist/bin/vendors.sh
index 7706929..ada3039 100644
--- a/dist/bin/vendors.sh
+++ b/dist/bin/vendors.sh
@@ -2,30 +2,74 @@
 
 # PredictionIO Third Party Software Utilities
 
-# Third party software
-VENDORS_PATH="$BASE/vendors"
-VENDOR_HADOOP_PATH="$VENDORS_PATH/hadoop-1.2.1"
-VENDOR_MONGODB_PATH="$VENDORS_PATH/mongodb-linux-x86_64-2.4.6"
-
-VENDOR_HADOOP_NAME="Apache Hadoop 1.2.1"
-VENDOR_MONGODB_NAME="MongoDB 2.4.6 (64-bit Linux)"
+# This script should be sourced with $BASE set to the base of the binary package
 
 # Utilities
 command_exists () {
 	command -v "$1" >/dev/null 2>&1
 }
 
+ostype () {
+	case $(uname) in
+		Linux*)
+			echo "linux";;
+		Darwin*)
+			echo "osx";;
+	esac
+}
+
+hosttype () {
+	case $(uname -m) in
+		i[3456]86)
+			echo "i686";;
+		x86_64)
+			echo "x86_64";;
+	esac
+}
+
+OS=$(ostype)
+ARCH=$(hosttype)
+
 process_exists () {
 	echo $(ps -ef | grep "$1" | grep -v "grep" | wc -l)
 }
 
-install_mongodb () {
-	echo "Going to download and install $VENDOR_MONGODB_NAME..."
+# Third party software
+VENDORS_PATH="$BASE/vendors"
+
+VENDOR_GRAPHCHI_VERSION="0a6545ccb7"
+VENDOR_HADOOP_VERSION="1.2.1"
+VENDOR_MONGODB_VERSION="2.4.9"
+
+VENDOR_GRAPHCHI_PATH="$VENDORS_PATH/graphchi-cpp-cf-$OS-$ARCH-$VENDOR_GRAPHCHI_VERSION"
+VENDOR_HADOOP_PATH="$VENDORS_PATH/hadoop-$VENDOR_HADOOP_VERSION"
+VENDOR_MONGODB_PATH="$VENDORS_PATH/mongodb-$OS-$ARCH-$VENDOR_MONGODB_VERSION"
+
+VENDOR_GRAPHCHI_NAME="GraphChi C++ Collaborative Filtering Toolkit $VENDOR_GRAPHCHI_VERSION"
+VENDOR_HADOOP_NAME="Apache Hadoop $VENDOR_HADOOP_VERSION"
+VENDOR_MONGODB_NAME="MongoDB $VENDOR_MONGODB_VERSION"
+
+install_graphchi () {
+	local FN="graphchi-cpp-cf-$OS-$ARCH-$VENDOR_GRAPHCHI_VERSION.tar.gz"
+	local URL="http://download.prediction.io/graphchi-cpp-cf/$FN"
+	echo "Going to download and install $VENDOR_GRAPHCHI_NAME ($URL)..."
 	local VENDORS_PATH=$1
 	mkdir -p $VENDORS_PATH
 	cd $VENDORS_PATH
-	curl -O http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-2.4.6.tgz
-	tar zxvf mongodb-linux-x86_64-2.4.6.tgz
+	curl -O $URL
+	tar zxvf $FN
+	cp $VENDOR_GRAPHCHI_PATH/* $BASE/bin
+}
+
+install_mongodb () {
+	local FN="mongodb-$OS-$ARCH-$VENDOR_MONGODB_VERSION.tgz"
+	local URL="http://fastdl.mongodb.org/$OS/$FN"
+	echo "Going to download and install $VENDOR_MONGODB_NAME ($URL)..."
+	local VENDORS_PATH=$1
+	mkdir -p $VENDORS_PATH
+	cd $VENDORS_PATH
+	curl -O $URL
+	tar zxvf $FN
 }
 
 install_hadoop () {
@@ -42,8 +86,8 @@
 	local VENDORS_PATH=$1
 	mkdir -p $VENDORS_PATH
 	cd $VENDORS_PATH
-	curl -O http://archive.apache.org/dist/hadoop/common/hadoop-1.2.1/hadoop-1.2.1-bin.tar.gz
-	tar zxvf hadoop-1.2.1-bin.tar.gz
+	curl -O http://archive.apache.org/dist/hadoop/common/hadoop-$VENDOR_HADOOP_VERSION/hadoop-$VENDOR_HADOOP_VERSION-bin.tar.gz
+	tar zxvf hadoop-$VENDOR_HADOOP_VERSION-bin.tar.gz
 	echo "Configuring Hadoop in pseudo-distributed mode..."
 	cp ../conf/hadoop/* $VENDOR_HADOOP_PATH/conf
 	echo "export JAVA_HOME=$JAVA_HOME" >> $VENDOR_HADOOP_PATH/conf/hadoop-env.sh
@@ -70,6 +114,10 @@
 	$VENDOR_HADOOP_PATH/bin/stop-all.sh
 }
 
+vendor_graphchi_exists () {
+	[ -e "$VENDOR_GRAPHCHI_PATH/als" ]
+}
+
 vendor_mongodb_exists () {
 	[ -e "$VENDOR_MONGODB_PATH/bin/mongod" ]
 }
diff --git a/dist/conf/graphchi.cnf b/dist/conf/graphchi.cnf
new file mode 100644
index 0000000..8274e64
--- /dev/null
+++ b/dist/conf/graphchi.cnf
@@ -0,0 +1,29 @@
+
+# GraphChi configuration.
+# Commandline parameters override values in the configuration file.
+# execthreads=2
+loadthreads = 4
+niothreads = 2
+
+# Good for 8gigs
+#membudget_mb = 2000
+#cachesize_mb = 1000
+
+# Good for 4 gigs
+membudget_mb = 800
+cachesize_mb = 0
+
+# I/O settings
+io.blocksize = 1048576 
+mmap = 0  # Use mmaped files where applicable
+
+
+# Comma-delimited list of metrics output reporters.
+# Can be "console", "file" or "html"
+metrics.reporter = console,file,html
+metrics.reporter.filename = graphchi_metrics.txt
+metrics.reporter.htmlfile = graphchi_metrics.html
+
+
+
+
diff --git a/dist/conf/init.json b/dist/conf/init.json
index 74041f0..1909fd1 100644
--- a/dist/conf/init.json
+++ b/dist/conf/init.json
@@ -1,47 +1,32 @@
 {
     "systeminfos": {
         "version": {
-            "value": "0.6.8",
+            "value": "0.7.0",
             "description": "PredictionIO version"
         },
         "jars.pdioItemrecAlgo": {
-            "value": "predictionio-process-hadoop-scalding-assembly-0.6.8.jar"
+            "value": "predictionio-process-hadoop-scalding-assembly-0.7.0.jar"
         },
         "jars.pdioItemsimAlgo": {
-            "value": "predictionio-process-hadoop-scalding-assembly-0.6.8.jar"
-        },
-        "jars.mahoutItemrecAlgo": {
-            "value": "predictionio-process-itemrec-algorithms-scala-mahout-assembly-0.6.8.jar"
+            "value": "predictionio-process-hadoop-scalding-assembly-0.7.0.jar"
         },
         "jars.pdioItemrecEval": {
-            "value": "predictionio-process-hadoop-scalding-assembly-0.6.8.jar"
+            "value": "predictionio-process-hadoop-scalding-assembly-0.7.0.jar"
         },
         "jars.pdioItemsimEval": {
-            "value": "predictionio-process-hadoop-scalding-assembly-0.6.8.jar"
-        },
-        "jars.pdioItemrecTopK": {
-            "value": "predictionio-process-itemrec-evaluations-topkitems-assembly-0.6.8.jar"
-        },
-        "jars.pdioItemsimTopK": {
-            "value": "predictionio-process-itemsim-evaluations-topkitems-assembly-0.6.8.jar"
+            "value": "predictionio-process-hadoop-scalding-assembly-0.7.0.jar"
         },
         "jars.pdioCommonsEval": {
-            "value": "predictionio-process-hadoop-scalding-assembly-0.6.8.jar"
-        },
-        "jars.pdioCommonsParamGen": {
-            "value": "predictionio-process-commons-evaluations-paramgen-assembly-0.6.8.jar"
-        },
-        "jars.pdioCommonsU2ITrainingTestSplit": {
-            "value": "predictionio-process-commons-evaluations-scala-u2itrainingtestsplittime-assembly-0.6.8.jar"
+            "value": "predictionio-process-hadoop-scalding-assembly-0.7.0.jar"
         }
     },
     "engineinfos": {
         "itemrec": {
             "name": "Item Recommendation Engine",
             "description": "<h6>Recommend interesting items to each user personally.</h6><p>Sample Use Cases</p><ul><li>recommend top N items to users personally</li><li>predict users' future preferences</li><li>help users to discover new topics they may be interested in</li><li>personalize content</li><li>optimize sales</li></ul>",
-            "defaultalgoinfoid": "mahout-itembased",
-            "defaultofflineevalmetricinfoid": "map_k",
-            "defaultofflineevalsplitterinfoid": "trainingtestsplit",
+            "defaultalgoinfoid": "pio-itemrec-single-mahout-knnitembased",
+            "defaultofflineevalmetricinfoid": "pio-itemrec-single-map_k",
+            "defaultofflineevalsplitterinfoid": "pio-single-trainingtestsplit",
             "params": {
                 "serendipity": {
                     "name": "Serendipity",
@@ -180,9 +165,9 @@
         "itemsim": {
             "name": "Item Similarity Engine",
             "description": "<h6>Discover similar items.</h6><p>Sample Use Cases</p><ul><li>predict what else would a user like if this user likes a, i.e. \"People who like this also like....\"</li><li>automatic item grouping</li></ul>",
-            "defaultalgoinfoid": "mahout-itemsimcf",
-            "defaultofflineevalmetricinfoid": "ismap_k",
-            "defaultofflineevalsplitterinfoid": "trainingtestsplit",
+            "defaultalgoinfoid": "pio-itemsim-single-mahout-itemsimcf",
+            "defaultofflineevalmetricinfoid": "pio-itemsim-single-ismap_k",
+            "defaultofflineevalsplitterinfoid": "pio-single-trainingtestsplit",
             "params": {
                 "serendipity": {
                     "name": "Serendipity",
@@ -292,7 +277,43 @@
         }
     },
     "algoinfos": {
-        "pdio-randomrank": {
+        "pio-itemrec-single-random": {
+            "name": "Random Rank (Single Machine)",
+            "description": "Predict user preferences randomly.",
+            "batchcommands": [
+                "$base$/bin/random --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numPredictions $numRecommendations$ --modelSet $modelset$ --recommendationTime $recommendationTime$"
+            ],
+            "offlineevalcommands": [
+                "$base$/bin/random --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numPredictions $numRecommendations$ --modelSet false --recommendationTime $recommendationTime$ --evalid $evalid$"
+            ],
+            "paramorder": [],
+            "engineinfoid": "itemrec",
+            "techreq": [],
+            "datareq": [
+                "Users and Items."
+            ],
+            "params": {},
+            "paramsections": []
+        },
+        "pio-itemsim-single-random": {
+            "name": "Random Rank (Single Machine)",
+            "description": "Predict user preferences randomly.",
+            "batchcommands": [
+                "$base$/bin/random --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numPredictions $numSimilarItems$ --modelSet $modelset$ --recommendationTime $recommendationTime$"
+            ],
+            "offlineevalcommands": [
+                "$base$/bin/random --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numPredictions $numSimilarItems$ --modelSet false --recommendationTime $recommendationTime$ --evalid $evalid$"
+            ],
+            "paramorder": [],
+            "engineinfoid": "itemsim",
+            "techreq": [],
+            "datareq": [
+                "Users and Items."
+            ],
+            "params": {},
+            "paramsections": []
+        },
+        "pio-itemrec-distributed-random": {
             "name": "Random Rank",
             "description": "Predict user preferences randomly.",
             "batchcommands": [
@@ -312,7 +333,7 @@
             "params": {},
             "paramsections": []
         },
-        "pdio-latestrank": {
+        "pio-itemrec-distributed-latest": {
             "name": "Latest Rank",
             "description": "Recommend latest items to users.",
             "batchcommands": [
@@ -332,27 +353,24 @@
             "params": {},
             "paramsections": []
         },
-        "pdio-knnitembased": {
-            "name": "kNN Item Based Collaborative Filtering",
-            "description": "This item-based k-NearestNeighbor algorithm predicts user preferences based on previous behaviors of users on similar items.",
+        "pio-itemrec-single-graphchi-als": {
+            "name": "GraphChi's Alternating Least Squares Collaborative Filtering",
+            "description": "Predict user preferences based on matrix factorization.",
             "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.itemrec.knnitembased.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.itemrec.knnitembased.KNNItemBased --hdfs --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --measureParam $measureParam$ --priorCountParam $priorCountParam$ --priorCorrelParam $priorCorrelParam$ --minNumRatersParam $minNumRatersParam$ --maxNumRatersParam $maxNumRatersParam$ --minIntersectionParam $minIntersectionParam$ --minNumRatedSimParam $minNumRatedSimParam$ --numRecommendations $numRecommendations$ --unseenOnly $unseenOnly$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.itemrec.knnitembased.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/als --training=$localTempDir$ratings.mm --D=$d$ --lambda=$lambda$ --minval=1 --maxval=5 --max_iter=$maxIter$ --halt_on_rmse_increase=$haltRMSEInc$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.itemrec.knnitembased.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.itemrec.knnitembased.KNNItemBased --hdfs --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --measureParam $measureParam$ --priorCountParam $priorCountParam$ --priorCorrelParam $priorCorrelParam$ --minNumRatersParam $minNumRatersParam$ --maxNumRatersParam $maxNumRatersParam$ --minIntersectionParam $minIntersectionParam$ --minNumRatedSimParam $minNumRatedSimParam$ --numRecommendations $numRecommendations$ --unseenOnly $unseenOnly$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.itemrec.knnitembased.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet false --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/als --training=$localTempDir$ratings.mm --D=$d$ --lambda=$lambda$ --minval=1 --maxval=5 --max_iter=$maxIter$ --halt_on_rmse_increase=$haltRMSEInc$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --evalid $evalid$ --modelSet false --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "paramorder": [
-                "measureParam",
-                "priorCountParam",
-                "priorCorrelParam",
-                "minNumRatersParam",
-                "maxNumRatersParam",
-                "minIntersectionParam",
-                "minNumRatedSimParam",
+                "d",
+                "lambda",
+                "maxIter",
+                "haltRMSEInc",
                 "viewParam",
                 "likeParam",
                 "dislikeParam",
@@ -361,40 +379,15 @@
             ],
             "engineinfoid": "itemrec",
             "techreq": [
-                "Hadoop"
+                "GraphChi"
             ],
             "datareq": [
                 "Users, Items, and U2I Actions such as Like, Conversion and Rate."
             ],
             "params": {
-                "measureParam": {
-                    "name": "Distance Function",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "string"
-                    },
-                    "defaultvalue": "correl",
-                    "ui": {
-                        "uitype": "selection",
-                        "selections": [
-                            {
-                                "name": "Pearson Correlation Similarity",
-                                "value": "correl"
-                            },
-                            {
-                                "name": "Cosine Similarity",
-                                "value": "cosine"
-                            },
-                            {
-                                "name": "Jaccard Similarity",
-                                "value": "jaccard"
-                            }
-                        ]
-                    }
-                },
-                "priorCountParam": {
-                    "name": "Virtual Count",
-                    "description": "Suggested range: 0 to 100.",
+                "d": {
+                    "name": "Number of Factorized Features",
+                    "description": "Width of the factorized matrix.",
                     "constraint": {
                         "paramtype": "integer"
                     },
@@ -403,9 +396,91 @@
                     },
                     "defaultvalue": 20
                 },
-                "priorCorrelParam": {
-                    "name": "Prior Correlation",
-                    "description": "",
+                "dMin": {
+                    "name": "Number of Factorized Features (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 2
+                },
+                "dMax": {
+                    "name": "Number of Factorized Features (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 30
+                },
+                "lambda": {
+                    "name": "Lambda",
+                    "description": "Regularization parameter to avoid overfitting.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.065
+                },
+                "lambdaMin": {
+                    "name": "Lambda (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.01
+                },
+                "lambdaMax": {
+                    "name": "Lambda (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.1
+                },
+                "maxIter": {
+                    "name": "Maximum Number of Iterations",
+                    "description": "Maximum number of training iterations allowed.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 6
+                },
+                "maxIterMin": {
+                    "name": "Maximum Number of Iterations (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1
+                },
+                "maxIterMax": {
+                    "name": "Maximum Number of Iterations (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "haltRMSEInc": {
+                    "name": "Halt on RMSE Increase",
+                    "description": "Stop execution when validation error goes up. Run at least the number of iterations specified in the flag.",
                     "constraint": {
                         "paramtype": "integer"
                     },
@@ -414,49 +489,25 @@
                     },
                     "defaultvalue": 0
                 },
-                "minNumRatersParam": {
-                    "name": "Minimum Number of Raters",
-                    "description": "",
+                "haltRMSEIncMin": {
+                    "name": "Halt on RMSE Increase (Min)",
                     "constraint": {
                         "paramtype": "integer"
                     },
                     "ui": {
                         "uitype": "text"
                     },
-                    "defaultvalue": 1
+                    "defaultvalue": 0
                 },
-                "maxNumRatersParam": {
-                    "name": "Maximum Number of Raters",
-                    "description": "",
+                "haltRMSEIncMax": {
+                    "name": "Halt on RMSE Increase (Max)",
                     "constraint": {
                         "paramtype": "integer"
                     },
                     "ui": {
                         "uitype": "text"
                     },
-                    "defaultvalue": 10000
-                },
-                "minIntersectionParam": {
-                    "name": "Minimum Intersection",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
-                },
-                "minNumRatedSimParam": {
-                    "name": "Minimum Number of Rated Similar Items",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
+                    "defaultvalue": 20
                 },
                 "viewParam": {
                     "name": "View Score",
@@ -630,126 +681,6 @@
                         ]
                     },
                     "defaultvalue": "latest"
-                },
-                "priorCountParamMin": {
-                    "name": "priorCountParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 10
-                },
-                "priorCountParamMax": {
-                    "name": "priorCountParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 30
-                },
-                "minNumRatersParamMin": {
-                    "name": "minNumRatersParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
-                },
-                "minNumRatersParamMax": {
-                    "name": "minNumRatersParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 5
-                },
-                "maxNumRatersParamMin": {
-                    "name": "maxNumRatersParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 10000
-                },
-                "maxNumRatersParamMax": {
-                    "name": "maxNumRatersParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 10000
-                },
-                "minIntersectionParamMin": {
-                    "name": "minIntersectionParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
-                },
-                "minIntersectionParamMax": {
-                    "name": "minIntersectionParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 5
-                },
-                "minNumRatedSimParamMin": {
-                    "name": "minNumRatedSimParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
-                },
-                "minNumRatedSimParamMax": {
-                    "name": "minNumRatedSimParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 5
-                },
-                "priorCorrelParamMin": {
-                    "name": "priorCorrelParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 0
-                },
-                "priorCorrelParamMax": {
-                    "name": "priorCorrelParamMax",
-                    "constraint": {
-                        "paramtype": "double"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 0.1
                 }
             },
             "paramsections": [
@@ -758,36 +689,13 @@
                     "sectiontype": "normal",
                     "subsections": [
                         {
-                            "name": "Item Similarity Measurement",
-                            "sectiontype": "normal",
-                            "params": [
-                                "measureParam"
-                            ]
-                        },
-                        {
                             "name": "Numeric Parameters",
                             "sectiontype": "tuning",
-                            "subsections": [
-                                {
-                                    "name": "Regularization",
-                                    "sectiontype": "normal",
-                                    "description": "Add virtual item pairs that have zero correlation. This helps avoid noise if some item pairs have very few user actions in common.",
-                                    "params": [
-                                        "priorCountParam",
-                                        "priorCorrelParam"
-                                    ]
-                                },
-                                {
-                                    "name": "Other Parameters",
-                                    "sectiontype": "normal",
-                                    "description": "Filters to speed up computation and reduce noise.",
-                                    "params": [
-                                        "minNumRatersParam",
-                                        "maxNumRatersParam",
-                                        "minIntersectionParam",
-                                        "minNumRatedSimParam"
-                                    ]
-                                }
+                            "params": [
+                                "d",
+                                "lambda",
+                                "maxIter",
+                                "haltRMSEInc"
                             ]
                         }
                     ]
@@ -819,7 +727,1323 @@
                 }
             ]
         },
-        "mahout-itembased": {
+        "pio-itemrec-single-graphchi-als_coord": {
+            "name": "GraphChi's CCD++ Alternating Least Squares Collaborative Filtering with Parallel Coordinate Descent",
+            "description": "Predict user preferences based on matrix factorization.",
+            "batchcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/als_coord --training=$localTempDir$ratings.mm --D=$d$ --lambda=$lambda$ --minval=1 --maxval=5 --max_iter=$maxIter$ --halt_on_rmse_increase=$haltRMSEInc$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
+            ],
+            "offlineevalcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/als_coord --training=$localTempDir$ratings.mm --D=$d$ --lambda=$lambda$ --minval=1 --maxval=5 --max_iter=$maxIter$ --halt_on_rmse_increase=$haltRMSEInc$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --evalid $evalid$ --modelSet false --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
+            ],
+            "paramorder": [
+                "d",
+                "lambda",
+                "maxIter",
+                "haltRMSEInc",
+                "viewParam",
+                "likeParam",
+                "dislikeParam",
+                "conversionParam",
+                "conflictParam"
+            ],
+            "engineinfoid": "itemrec",
+            "techreq": [
+                "GraphChi"
+            ],
+            "datareq": [
+                "Users, Items, and U2I Actions such as Like, Conversion and Rate."
+            ],
+            "params": {
+                "d": {
+                    "name": "Number of Factorized Features",
+                    "description": "Width of the factorized matrix.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "dMin": {
+                    "name": "Number of Factorized Features (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 2
+                },
+                "dMax": {
+                    "name": "Number of Factorized Features (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 30
+                },
+                "lambda": {
+                    "name": "Lambda",
+                    "description": "Regularization parameter to avoid overfitting.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.065
+                },
+                "lambdaMin": {
+                    "name": "Lambda (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.01
+                },
+                "lambdaMax": {
+                    "name": "Lambda (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.1
+                },
+                "maxIter": {
+                    "name": "Maximum Number of Iterations",
+                    "description": "Maximum number of training iterations allowed.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 6
+                },
+                "maxIterMin": {
+                    "name": "Maximum Number of Iterations (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1
+                },
+                "maxIterMax": {
+                    "name": "Maximum Number of Iterations (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "haltRMSEInc": {
+                    "name": "Halt on RMSE Increase",
+                    "description": "Stop execution when validation error goes up. Run at least the number of iterations specified in the flag.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0
+                },
+                "haltRMSEIncMin": {
+                    "name": "Halt on RMSE Increase (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0
+                },
+                "haltRMSEIncMax": {
+                    "name": "Halt on RMSE Increase (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "viewParam": {
+                    "name": "View Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "3"
+                },
+                "likeParam": {
+                    "name": "Like Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "5"
+                },
+                "dislikeParam": {
+                    "name": "Dislike Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "1"
+                },
+                "conversionParam": {
+                    "name": "Conversion Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "4"
+                },
+                "conflictParam": {
+                    "name": "Override",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "Use the latest action",
+                                "value": "latest"
+                            },
+                            {
+                                "name": "Use the highest preference score one",
+                                "value": "highest"
+                            },
+                            {
+                                "name": "Use the lowest preference score one",
+                                "value": "lowest"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "latest"
+                }
+            },
+            "paramsections": [
+                {
+                    "name": "Parameter Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "Numeric Parameters",
+                            "sectiontype": "tuning",
+                            "params": [
+                                "d",
+                                "lambda",
+                                "maxIter",
+                                "haltRMSEInc"
+                            ]
+                        }
+                    ]
+                },
+                {
+                    "name": "User Actions Representation Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "User Action Scores",
+                            "sectiontype": "normal",
+                            "description": "Define the preference score represented by each user action from 1 to 5. 5 is the most preferred, 1 is the least preferred. 3 is neutral.",
+                            "params": [
+                                "viewParam",
+                                "likeParam",
+                                "dislikeParam",
+                                "conversionParam"
+                            ]
+                        },
+                        {
+                            "name": "Overriding",
+                            "sectiontype": "normal",
+                            "description": "When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, determine which action will be considered as final preference.",
+                            "params": [
+                                "conflictParam"
+                            ]
+                        }
+                    ]
+                }
+            ]
+        },
+        "pio-itemrec-single-graphchi-sgd": {
+            "name": "GraphChi's Stochastic Gradient Descent",
+            "description": "Predict user preferences based on matrix factorization.",
+            "batchcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/sgd --training=$localTempDir$ratings.mm --D=$d$ --sgd_lambda=$lambda$ --sgd_gamma=$gamma$ --sgd_step_dec=$stepDec$ --minval=1 --maxval=5 --max_iter=$maxIter$ --halt_on_rmse_increase=$haltRMSEInc$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
+            ],
+            "offlineevalcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/sgd --training=$localTempDir$ratings.mm --D=$d$ --sgd_lambda=$lambda$ --sgd_gamma=$gamma$ --sgd_step_dec=$stepDec$ --minval=1 --maxval=5 --max_iter=$maxIter$ --halt_on_rmse_increase=$haltRMSEInc$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --evalid $evalid$ --modelSet false --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
+            ],
+            "paramorder": [
+                "d",
+                "lambda",
+                "gamma",
+                "stepDec",
+                "maxIter",
+                "haltRMSEInc",
+                "viewParam",
+                "likeParam",
+                "dislikeParam",
+                "conversionParam",
+                "conflictParam"
+            ],
+            "engineinfoid": "itemrec",
+            "techreq": [
+                "GraphChi"
+            ],
+            "datareq": [
+                "Users, Items, and U2I Actions such as Like, Conversion and Rate."
+            ],
+            "params": {
+                "d": {
+                    "name": "Number of Factorized Features",
+                    "description": "Width of the factorized matrix.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "dMin": {
+                    "name": "Number of Factorized Features (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 2
+                },
+                "dMax": {
+                    "name": "Number of Factorized Features (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 30
+                },
+                "lambda": {
+                    "name": "Lambda",
+                    "description": "Regularization parameter to avoid overfitting.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-4
+                },
+                "lambdaMin": {
+                    "name": "Lambda (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-5
+                },
+                "lambdaMax": {
+                    "name": "Lambda (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-3
+                },
+                "gamma": {
+                    "name": "Gamma",
+                    "description": "Gradient step size.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-4
+                },
+                "gammaMin": {
+                    "name": "Gamma (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-5
+                },
+                "gammaMax": {
+                    "name": "Gamma (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-3
+                },
+                "stepDec": {
+                    "name": "Step Decrement",
+                    "description": "Multiplicative step decrement.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.9
+                },
+                "stepDecMin": {
+                    "name": "Step Decrement (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.8
+                },
+                "stepDecMax": {
+                    "name": "Step Decrement (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.99
+                },
+                "maxIter": {
+                    "name": "Maximum Number of Iterations",
+                    "description": "Maximum number of training iterations allowed.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 6
+                },
+                "maxIterMin": {
+                    "name": "Maximum Number of Iterations (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1
+                },
+                "maxIterMax": {
+                    "name": "Maximum Number of Iterations (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "haltRMSEInc": {
+                    "name": "Halt on RMSE Increase",
+                    "description": "Stop execution when validation error goes up. Run at least the number of iterations specified in the flag.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0
+                },
+                "haltRMSEIncMin": {
+                    "name": "Halt on RMSE Increase (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0
+                },
+                "haltRMSEIncMax": {
+                    "name": "Halt on RMSE Increase (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "viewParam": {
+                    "name": "View Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "3"
+                },
+                "likeParam": {
+                    "name": "Like Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "5"
+                },
+                "dislikeParam": {
+                    "name": "Dislike Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "1"
+                },
+                "conversionParam": {
+                    "name": "Conversion Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "4"
+                },
+                "conflictParam": {
+                    "name": "Override",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "Use the latest action",
+                                "value": "latest"
+                            },
+                            {
+                                "name": "Use the highest preference score one",
+                                "value": "highest"
+                            },
+                            {
+                                "name": "Use the lowest preference score one",
+                                "value": "lowest"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "latest"
+                }
+            },
+            "paramsections": [
+                {
+                    "name": "Parameter Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "Numeric Parameters",
+                            "sectiontype": "tuning",
+                            "params": [
+                                "d",
+                                "lambda",
+                                "gamma",
+                                "stepDec",
+                                "maxIter",
+                                "haltRMSEInc"
+                            ]
+                        }
+                    ]
+                },
+                {
+                    "name": "User Actions Representation Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "User Action Scores",
+                            "sectiontype": "normal",
+                            "description": "Define the preference score represented by each user action from 1 to 5. 5 is the most preferred, 1 is the least preferred. 3 is neutral.",
+                            "params": [
+                                "viewParam",
+                                "likeParam",
+                                "dislikeParam",
+                                "conversionParam"
+                            ]
+                        },
+                        {
+                            "name": "Overriding",
+                            "sectiontype": "normal",
+                            "description": "When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, determine which action will be considered as final preference.",
+                            "params": [
+                                "conflictParam"
+                            ]
+                        }
+                    ]
+                }
+            ]
+        },
+        "pio-itemrec-single-graphchi-climf": {
+            "name": "GraphChi's CLiMF algorithm",
+            "description": "Predict user preferences based on matrix factorization and optimizing MRR (mean reciprocal rank)",
+            "batchcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/climf --training=$localTempDir$ratings.mm --D=$d$ --binary_relevance_thresh=$relevanceThresh$ --halt_on_mrr_decrease=$haltMRRDec$ --num_ratings=$numRatings$ --sgd_lambda=$lambda$ --sgd_gamma=$gamma$ --sgd_step_dec=$stepDec$ --minval=1 --maxval=5 --max_iter=$maxIter$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
+            ],
+            "offlineevalcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket true",
+                "$base$/bin/climf --training=$localTempDir$ratings.mm --D=$d$ --binary_relevance_thresh=$relevanceThresh$ --halt_on_mrr_decrease=$haltMRRDec$ --num_ratings=$numRatings$ --sgd_lambda=$lambda$ --sgd_gamma=$gamma$ --sgd_step_dec=$stepDec$ --minval=1 --maxval=5 --max_iter=$maxIter$ --quiet=1",
+                "$base$/bin/itemrec.graphchi.modelcon --inputDir $localTempDir$ --appid $appid$ --algoid $algoid$ --evalid $evalid$ --modelSet false --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
+            ],
+            "paramorder": [
+                "d",
+                "relevanceThresh",
+                "haltMRRDec",
+                "numRatings",
+                "lambda",
+                "gamma",
+                "stepDec",
+                "maxIter",
+                "viewParam",
+                "likeParam",
+                "dislikeParam",
+                "conversionParam",
+                "conflictParam"
+            ],
+            "engineinfoid": "itemrec",
+            "techreq": [
+                "GraphChi"
+            ],
+            "datareq": [
+                "Users, Items, and U2I Actions such as Like, Conversion and Rate."
+            ],
+            "params": {
+                "d": {
+                    "name": "Number of Factorized Features",
+                    "description": "Width of the factorized matrix.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "dMin": {
+                    "name": "Number of Factorized Features (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 2
+                },
+                "dMax": {
+                    "name": "Number of Factorized Features (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 30
+                },
+                "relevanceThresh": {
+                    "name": "Binary Relevant Threshold",
+                    "description": "Consider the item liked/relevant if rating is at least this value.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0
+                },
+                "relevanceThreshMin": {
+                    "name": "Binary Relevant Threshold (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0
+                },
+                "relevanceThreshMax": {
+                    "name": "Binary Relevant Threshold (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 3
+                },
+                "haltMRRDec": {
+                    "name": "Halt on MRR Decrease",
+                    "description": "Halt if the training set objective (smoothed MRR) decreases.",
+                    "constraint": {
+                        "paramtype": "boolean"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "True",
+                                "value": "true"
+                            },
+                            {
+                                "name": "False",
+                                "value": "false"
+                            }
+                        ]
+                    },
+                    "defaultvalue": false
+                },
+                "numRatings": {
+                    "name": "Number of Ratings",
+                    "description": "Consider this many top predicted items when computing actual MRR on validation set.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 10000
+                },
+                "numRatingsMin": {
+                    "name": "Number of Ratings (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 10
+                },
+                "numRatingsMax": {
+                    "name": "Number of Ratings (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 10000
+                },
+                "lambda": {
+                    "name": "Lambda",
+                    "description": "Regularization parameter to avoid overfitting.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-6
+                },
+                "lambdaMin": {
+                    "name": "Lambda (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-7
+                },
+                "lambdaMax": {
+                    "name": "Lambda (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-5
+                },
+                "gamma": {
+                    "name": "Gamma",
+                    "description": "Gradient step size.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-6
+                },
+                "gammaMin": {
+                    "name": "Gamma (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-7
+                },
+                "gammaMax": {
+                    "name": "Gamma (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1E-5
+                },
+                "maxIter": {
+                    "name": "Maximum Number of Iterations",
+                    "description": "Maximum number of training iterations allowed.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 6
+                },
+                "maxIterMin": {
+                    "name": "Maximum Number of Iterations (Min)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 1
+                },
+                "maxIterMax": {
+                    "name": "Maximum Number of Iterations (Max)",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 20
+                },
+                "stepDec": {
+                    "name": "Step Decrement",
+                    "description": "Multiplicative step decrement.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.9
+                },
+                "stepDecMin": {
+                    "name": "Step Decrement (Min)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.8
+                },
+                "stepDecMax": {
+                    "name": "Step Decrement (Max)",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.99
+                },
+                "viewParam": {
+                    "name": "View Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "3"
+                },
+                "likeParam": {
+                    "name": "Like Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "5"
+                },
+                "dislikeParam": {
+                    "name": "Dislike Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "1"
+                },
+                "conversionParam": {
+                    "name": "Conversion Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "4"
+                },
+                "conflictParam": {
+                    "name": "Override",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "Use the latest action",
+                                "value": "latest"
+                            },
+                            {
+                                "name": "Use the highest preference score one",
+                                "value": "highest"
+                            },
+                            {
+                                "name": "Use the lowest preference score one",
+                                "value": "lowest"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "latest"
+                }
+            },
+            "paramsections": [
+                {
+                    "name": "Parameter Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "Boolean Parameter",
+                            "sectiontype": "normal",
+                            "params": [
+                                "haltMRRDec"
+                            ]
+                        },
+                        {
+                            "name": "Numeric Parameters",
+                            "sectiontype": "tuning",
+                            "params": [
+                                "d",
+                                "relevanceThresh",
+                                "numRatings",
+                                "lambda",
+                                "gamma",
+                                "stepDec",
+                                "maxIter"
+                            ]
+                        }
+                    ]
+                },
+                {
+                    "name": "User Actions Representation Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "User Action Scores",
+                            "sectiontype": "normal",
+                            "description": "Define the preference score represented by each user action from 1 to 5. 5 is the most preferred, 1 is the least preferred. 3 is neutral.",
+                            "params": [
+                                "viewParam",
+                                "likeParam",
+                                "dislikeParam",
+                                "conversionParam"
+                            ]
+                        },
+                        {
+                            "name": "Overriding",
+                            "sectiontype": "normal",
+                            "description": "When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, determine which action will be considered as final preference.",
+                            "params": [
+                                "conflictParam"
+                            ]
+                        }
+                    ]
+                }
+            ]
+        },
+        "pio-itemrec-distributed-mahout-itembased": {
             "name": "Mahout's Threshold Item Based Collaborative Filtering",
             "description": "Predicts user preferences based on previous behaviors of users on similar items.",
             "batchcommands": [
@@ -830,7 +2054,7 @@
                 "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$",
                 "$hadoop$ fs -getmerge $dataFilePrefix$recommendItems.csv $localTempDir$recommendItemsM.csv",
                 "$hadoop$ fs -copyFromLocal $localTempDir$recommendItemsM.csv $dataFilePrefix$recommendItemsM.csv",
-                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.item.RecommenderJob --input $dataFilePrefix$ratings.csv --itemsFile $dataFilePrefix$recommendItemsM.csv --output $algoFilePrefix$predicted.tsv --tempDir $mahoutTempDir$ --numRecommendations $numRecommendations$ --booleanData $booleanData$ --maxPrefsPerUser $maxPrefsPerUser$ --minPrefsPerUser $minPrefsPerUser$ --maxSimilaritiesPerItem $maxSimilaritiesPerItem$ --maxPrefsPerUserInItemSimilarity $maxPrefsPerUserInItemSimilarity$ --similarityClassname $similarityClassname$ --threshold $threshold$",
+                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.item.RecommenderJob --input $dataFilePrefix$ratings.csv --itemsFile $dataFilePrefix$recommendItemsM.csv --output $algoFilePrefix$predicted.tsv --tempDir $mahoutTempDir$ --numRecommendations $numRecommendations$ --booleanData $booleanData$ --maxPrefsPerUser $maxPrefsPerUser$ --minPrefsPerUser $minPrefsPerUser$ --maxSimilaritiesPerItem $maxSimilaritiesPerItem$ --maxPrefsInItemSimilarity $maxPrefsInItemSimilarity$ --similarityClassname $similarityClassname$ --threshold $threshold$",
                 "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$ --booleanData $booleanData$"
             ],
             "offlineevalcommands": [
@@ -841,7 +2065,7 @@
                 "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$",
                 "$hadoop$ fs -getmerge $dataFilePrefix$recommendItems.csv $localTempDir$recommendItemsM.csv",
                 "$hadoop$ fs -copyFromLocal $localTempDir$recommendItemsM.csv $dataFilePrefix$recommendItemsM.csv",
-                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.item.RecommenderJob --input $dataFilePrefix$ratings.csv --itemsFile $dataFilePrefix$recommendItemsM.csv --output $algoFilePrefix$predicted.tsv --tempDir $mahoutTempDir$ --numRecommendations $numRecommendations$ --booleanData $booleanData$ --maxPrefsPerUser $maxPrefsPerUser$ --minPrefsPerUser $minPrefsPerUser$ --maxSimilaritiesPerItem $maxSimilaritiesPerItem$ --maxPrefsPerUserInItemSimilarity $maxPrefsPerUserInItemSimilarity$ --similarityClassname $similarityClassname$ --threshold $threshold$",
+                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.item.RecommenderJob --input $dataFilePrefix$ratings.csv --itemsFile $dataFilePrefix$recommendItemsM.csv --output $algoFilePrefix$predicted.tsv --tempDir $mahoutTempDir$ --numRecommendations $numRecommendations$ --booleanData $booleanData$ --maxPrefsPerUser $maxPrefsPerUser$ --minPrefsPerUser $minPrefsPerUser$ --maxSimilaritiesPerItem $maxSimilaritiesPerItem$ --maxPrefsInItemSimilarity $maxPrefsInItemSimilarity$ --similarityClassname $similarityClassname$ --threshold $threshold$",
                 "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$ --booleanData $booleanData$"
             ],
             "paramorder": [
@@ -849,7 +2073,7 @@
                 "maxPrefsPerUser",
                 "minPrefsPerUser",
                 "maxSimilaritiesPerItem",
-                "maxPrefsPerUserInItemSimilarity",
+                "maxPrefsInItemSimilarity",
                 "similarityClassname",
                 "threshold",
                 "viewParam",
@@ -920,7 +2144,7 @@
                     },
                     "defaultvalue": 100
                 },
-                "maxPrefsPerUserInItemSimilarity": {
+                "maxPrefsInItemSimilarity": {
                     "name": "Max Num of Preferences per User in Item Similarity",
                     "description": "Max number of preferences to consider per user in the item similarity computation phase, users with more preferences will be sampled down.",
                     "constraint": {
@@ -1236,8 +2460,8 @@
                     },
                     "defaultvalue": 150
                 },
-                "maxPrefsPerUserInItemSimilarityMin": {
-                    "name": "maxPrefsPerUserInItemSimilarityMin",
+                "maxPrefsInItemSimilarityMin": {
+                    "name": "maxPrefsInItemSimilarityMin",
                     "constraint": {
                         "paramtype": "integer"
                     },
@@ -1246,8 +2470,8 @@
                     },
                     "defaultvalue": 500
                 },
-                "maxPrefsPerUserInItemSimilarityMax": {
-                    "name": "maxPrefsPerUserInItemSimilarityMax",
+                "maxPrefsInItemSimilarityMax": {
+                    "name": "maxPrefsInItemSimilarityMax",
                     "constraint": {
                         "paramtype": "integer"
                     },
@@ -1284,7 +2508,7 @@
                                 "maxPrefsPerUser",
                                 "minPrefsPerUser",
                                 "maxSimilaritiesPerItem",
-                                "maxPrefsPerUserInItemSimilarity"
+                                "maxPrefsInItemSimilarity"
                             ]
                         }
                     ]
@@ -1316,7 +2540,7 @@
                 }
             ]
         },
-        "mahout-parallelals": {
+        "pio-itemrec-distributed-mahout-parallelals": {
             "name": "Mahout's Parallel ALS-WR",
             "description": "Predicts user preferences based on previous behaviors of users.",
             "batchcommands": [
@@ -1723,20 +2947,457 @@
                 }
             ]
         },
-        "mahout-knnuserbased": {
-            "name": "Mahout's kNN User Based Collaborative Filtering (Non-distributed)",
-            "description": "Predicts user preferences based on previous behaviors of users who are the k-nearest neighbors (Non-distributed).",
+        "pio-itemrec-single-mahout-knnitembased": {
+            "name": "Mahout's kNN Item Based Collaborative Filtering (extension to GenericItemBased) (Single Machine)",
+            "description": "Predicts user preferences based on previous behaviors of users on similar items (Single Machine).",
             "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.knnuserbased.KNNUserBasedJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --nearestN $nearestN$ --userSimilarity $userSimilarity$ --weighted $weighted$ --minSimilarity $minSimilarity$ --samplingRate $samplingRate$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$ --booleanData $booleanData$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.knnitembased.KNNItemBasedJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --outputSim $localTempDir$itemSim.csv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --itemSimilarity $itemSimilarity$ --weighted $weighted$ --nearestN $nearestN$ --threshold $threshold$ --preComputeItemSim $preComputeItemSim$ --similarItemsPerItem $similarItemsPerItem$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --booleanData $booleanData$"
             ],
             "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.knnuserbased.KNNUserBasedJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --nearestN $nearestN$ --userSimilarity $userSimilarity$ --weighted $weighted$ --minSimilarity $minSimilarity$ --samplingRate $samplingRate$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$ --booleanData $booleanData$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.knnitembased.KNNItemBasedJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --outputSim $localTempDir$itemSim.csv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --itemSimilarity $itemSimilarity$ --weighted $weighted$ --nearestN $nearestN$ --threshold $threshold$ --preComputeItemSim $preComputeItemSim$ --similarItemsPerItem $similarItemsPerItem$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --booleanData $booleanData$"
+            ],
+            "paramorder": [
+                "booleanData",
+                "itemSimilarity",
+                "weighted",
+                "nearestN",
+                "threshold",
+                "preComputeItemSim",
+                "similarItemsPerItem",
+                "viewParam",
+                "likeParam",
+                "dislikeParam",
+                "conversionParam",
+                "conflictParam"
+            ],
+            "engineinfoid": "itemrec",
+            "techreq": [],
+            "datareq": [
+                "Users, Items, and U2I Actions such as Like, Conversion and Rate."
+            ],
+            "params": {
+                "booleanData": {
+                    "name": "Boolean Data",
+                    "description": "Treat input data as having no preference values.",
+                    "constraint": {
+                        "paramtype": "boolean"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "True",
+                                "value": "true"
+                            },
+                            {
+                                "name": "False",
+                                "value": "false"
+                            }
+                        ]
+                    },
+                    "defaultvalue": false
+                },
+                "itemSimilarity": {
+                    "name": "Item Similarity",
+                    "description": "Item Similarity Measure.",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "City Block",
+                                "value": "CityBlockSimilarity"
+                            },
+                            {
+                                "name": "Euclidean Distance",
+                                "value": "EuclideanDistanceSimilarity"
+                            },
+                            {
+                                "name": "Log-Likelihood",
+                                "value": "LogLikelihoodSimilarity"
+                            },
+                            {
+                                "name": "Pearson Correlation",
+                                "value": "PearsonCorrelationSimilarity"
+                            },
+                            {
+                                "name": "Tanimoto Coefficient",
+                                "value": "TanimotoCoefficientSimilarity"
+                            },
+                            {
+                                "name": "Uncentered Cosine",
+                                "value": "UncenteredCosineSimilarity"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "LogLikelihoodSimilarity"
+                },
+                "weighted": {
+                    "name": "Weighted",
+                    "description": "The Similarity score is weighted (only applied to Euclidean Distance, Pearson Correlation, Uncentered Cosine item similarity).",
+                    "constraint": {
+                        "paramtype": "boolean"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "True",
+                                "value": "true"
+                            },
+                            {
+                                "name": "False",
+                                "value": "false"
+                            }
+                        ]
+                    },
+                    "defaultvalue": false
+                },
+                "viewParam": {
+                    "name": "View Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "3"
+                },
+                "nearestN": {
+                    "name": "Nearest K",
+                    "description": "K-nearest rated item neighbors.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 10
+                },
+                "threshold": {
+                    "name": "Threshold",
+                    "description": "Similarity threshold.",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 5e-324
+                },
+                "preComputeItemSim": {
+                    "name": "Pre-Compute Item Similarity",
+                    "description": "Pre-compute item similarity for better run time performance (but use more storage space and memory).",
+                    "constraint": {
+                        "paramtype": "boolean"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "True",
+                                "value": "true"
+                            },
+                            {
+                                "name": "False",
+                                "value": "false"
+                            }
+                        ]
+                    },
+                    "defaultvalue": false
+                },
+                "similarItemsPerItem": {
+                    "name": "Number of Similar Items per Items",
+                    "description": "Number of Similar items in pre-computed item similarity. Only applicable when Pre-Compute Item Similarity = true. Should set this cautiously if you have large number of items (>100K). Storage space needed is about number of items * number of similar items * 30Bytes. Also, the pre-computed similarity will be loaded into memory.",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 100
+                },
+                "likeParam": {
+                    "name": "Like Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "5"
+                },
+                "dislikeParam": {
+                    "name": "Dislike Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "1"
+                },
+                "conversionParam": {
+                    "name": "Conversion Score",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "1",
+                                "value": "1"
+                            },
+                            {
+                                "name": "2",
+                                "value": "2"
+                            },
+                            {
+                                "name": "3",
+                                "value": "3"
+                            },
+                            {
+                                "name": "4",
+                                "value": "4"
+                            },
+                            {
+                                "name": "5",
+                                "value": "5"
+                            },
+                            {
+                                "name": "Ignore",
+                                "value": "ignore"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "4"
+                },
+                "conflictParam": {
+                    "name": "Override",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "string"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "Use the latest action",
+                                "value": "latest"
+                            },
+                            {
+                                "name": "Use the highest preference score one",
+                                "value": "highest"
+                            },
+                            {
+                                "name": "Use the lowest preference score one",
+                                "value": "lowest"
+                            }
+                        ]
+                    },
+                    "defaultvalue": "latest"
+                },
+                "nearestNMin": {
+                    "name": "nearestNMin",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 5
+                },
+                "nearestNMax": {
+                    "name": "nearestNMax",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 15
+                },
+                "thresholdMin": {
+                    "name": "thresholdMin",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 5e-324
+                },
+                "thresholdMax": {
+                    "name": "thresholdMax",
+                    "constraint": {
+                        "paramtype": "double"
+                    },
+                    "ui": {
+                        "uitype": "text"
+                    },
+                    "defaultvalue": 0.2
+                }
+            },
+            "paramsections": [
+                {
+                    "name": "Parameter Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "Item Similarity Measurement",
+                            "sectiontype": "normal",
+                            "params": [
+                                "itemSimilarity"
+                            ]
+                        },
+                        {
+                            "name": "Advanced Parameters",
+                            "sectiontype": "normal",
+                            "params": [
+                                "booleanData",
+                                "weighted",
+                                "preComputeItemSim",
+                                "similarItemsPerItem"
+                            ]
+                        },
+                        {
+                            "name": "Numeric Parameters",
+                            "sectiontype": "tuning",
+                            "params": [
+                                "nearestN",
+                                "threshold"
+                            ]
+                        }
+                    ]
+                },
+                {
+                    "name": "User Actions Representation Settings",
+                    "sectiontype": "normal",
+                    "subsections": [
+                        {
+                            "name": "User Action Scores",
+                            "sectiontype": "normal",
+                            "description": "Define the preference score represented by each user action from 1 to 5. 5 is the most preferred, 1 is the least preferred. 3 is neutral.",
+                            "params": [
+                                "viewParam",
+                                "likeParam",
+                                "dislikeParam",
+                                "conversionParam"
+                            ]
+                        },
+                        {
+                            "name": "Overriding",
+                            "sectiontype": "normal",
+                            "description": "When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, determine which action will be considered as final preference.",
+                            "params": [
+                                "conflictParam"
+                            ]
+                        }
+                    ]
+                }
+            ]
+        },
+        "pio-itemrec-single-mahout-knnuserbased": {
+            "name": "Mahout's kNN User Based Collaborative Filtering (Single Machine)",
+            "description": "Predicts user preferences based on previous behaviors of users who are the k-nearest neighbors (Single Machine).",
+            "batchcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.knnuserbased.KNNUserBasedJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --nearestN $nearestN$ --userSimilarity $userSimilarity$ --weighted $weighted$ --minSimilarity $minSimilarity$ --samplingRate $samplingRate$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --booleanData $booleanData$"
+            ],
+            "offlineevalcommands": [
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.knnuserbased.KNNUserBasedJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --nearestN $nearestN$ --userSimilarity $userSimilarity$ --weighted $weighted$ --minSimilarity $minSimilarity$ --samplingRate $samplingRate$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --booleanData $booleanData$"
             ],
             "paramorder": [
                 "booleanData",
@@ -1752,9 +3413,7 @@
                 "conflictParam"
             ],
             "engineinfoid": "itemrec",
-            "techreq": [
-                "Hadoop"
-            ],
+            "techreq": [],
             "datareq": [
                 "Users, Items, and U2I Actions such as Like, Conversion and Rate."
             ],
@@ -2167,20 +3826,18 @@
                 }
             ]
         },
-        "mahout-thresholduserbased": {
-            "name": "Mahout's Threshold User Based Collaborative Filtering (Non-distributed)",
-            "description": "Predicts user preferences based on previous behaviors of users whose similarity meets or exceeds a certain threshold (Non-distributed).",
+        "pio-itemrec-single-mahout-thresholduserbased": {
+            "name": "Mahout's Threshold User Based Collaborative Filtering (Single Machine)",
+            "description": "Predicts user preferences based on previous behaviors of users whose similarity meets or exceeds a certain threshold (Single Machine).",
             "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.thresholduserbased.ThresholdUserBasedJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --threshold $threshold$ --userSimilarity $userSimilarity$ --weighted $weighted$ --samplingRate $samplingRate$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$ --booleanData $booleanData$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.thresholduserbased.ThresholdUserBasedJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --threshold $threshold$ --userSimilarity $userSimilarity$ --weighted $weighted$ --samplingRate $samplingRate$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --booleanData $booleanData$"
             ],
             "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.thresholduserbased.ThresholdUserBasedJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --threshold $threshold$ --userSimilarity $userSimilarity$ --weighted $weighted$ --samplingRate $samplingRate$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$ --booleanData $booleanData$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.thresholduserbased.ThresholdUserBasedJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --booleanData $booleanData$ --numRecommendations $numRecommendations$ --threshold $threshold$ --userSimilarity $userSimilarity$ --weighted $weighted$ --samplingRate $samplingRate$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --booleanData $booleanData$"
             ],
             "paramorder": [
                 "booleanData",
@@ -2195,9 +3852,7 @@
                 "conflictParam"
             ],
             "engineinfoid": "itemrec",
-            "techreq": [
-                "Hadoop"
-            ],
+            "techreq": [],
             "datareq": [
                 "Users, Items, and U2I Actions such as Like, Conversion and Rate."
             ],
@@ -2578,291 +4233,18 @@
                 }
             ]
         },
-        "mahout-slopeone": {
-            "name": "Mahout's SlopeOne Rating Based Collaborative Filtering (Non-distributed)",
-            "description": "Predicts user preferences based on average difference in preference values between new items and the items for which the user has indicated preferences (Non-distributed).",
+        "pio-itemrec-single-mahout-alswr": {
+            "name": "Mahout's ALS-WR (Single Machine)",
+            "description": "Predict user preferences using matrix factorization (Single Machine).",
             "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.slopeone.SlopeOneJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numRecommendations $numRecommendations$ --weighting $weighting$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.alswr.ALSWRJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --lambda $lambda$ --numIterations $numIterations$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.slopeone.SlopeOneJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --numRecommendations $numRecommendations$ --weighting $weighting$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
-            ],
-            "paramorder": [
-                "weighting",
-                "viewParam",
-                "likeParam",
-                "dislikeParam",
-                "conversionParam",
-                "conflictParam"
-            ],
-            "engineinfoid": "itemrec",
-            "techreq": [
-                "Hadoop"
-            ],
-            "datareq": [
-                "Users, Items, and U2I Actions such as Like, Conversion and Rate."
-            ],
-            "params": {
-                "weighting": {
-                    "name": "Weighting",
-                    "description": "Weighted preference difference.",
-                    "constraint": {
-                        "paramtype": "string"
-                    },
-                    "ui": {
-                        "uitype": "selection",
-                        "selections": [
-                            {
-                                "name": "No Weighting",
-                                "value": "No_Weighting"
-                            },
-                            {
-                                "name": "Count",
-                                "value": "Count"
-                            },
-                            {
-                                "name": "Standard Deviation",
-                                "value": "Standard_Deviation"
-                            }
-                        ]
-                    },
-                    "defaultvalue": "Standard_Deviation"
-                },
-                "viewParam": {
-                    "name": "View Score",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "string"
-                    },
-                    "ui": {
-                        "uitype": "selection",
-                        "selections": [
-                            {
-                                "name": "1",
-                                "value": "1"
-                            },
-                            {
-                                "name": "2",
-                                "value": "2"
-                            },
-                            {
-                                "name": "3",
-                                "value": "3"
-                            },
-                            {
-                                "name": "4",
-                                "value": "4"
-                            },
-                            {
-                                "name": "5",
-                                "value": "5"
-                            },
-                            {
-                                "name": "Ignore",
-                                "value": "ignore"
-                            }
-                        ]
-                    },
-                    "defaultvalue": "3"
-                },
-                "likeParam": {
-                    "name": "Like Score",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "string"
-                    },
-                    "ui": {
-                        "uitype": "selection",
-                        "selections": [
-                            {
-                                "name": "1",
-                                "value": "1"
-                            },
-                            {
-                                "name": "2",
-                                "value": "2"
-                            },
-                            {
-                                "name": "3",
-                                "value": "3"
-                            },
-                            {
-                                "name": "4",
-                                "value": "4"
-                            },
-                            {
-                                "name": "5",
-                                "value": "5"
-                            },
-                            {
-                                "name": "Ignore",
-                                "value": "ignore"
-                            }
-                        ]
-                    },
-                    "defaultvalue": "5"
-                },
-                "dislikeParam": {
-                    "name": "Dislike Score",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "string"
-                    },
-                    "ui": {
-                        "uitype": "selection",
-                        "selections": [
-                            {
-                                "name": "1",
-                                "value": "1"
-                            },
-                            {
-                                "name": "2",
-                                "value": "2"
-                            },
-                            {
-                                "name": "3",
-                                "value": "3"
-                            },
-                            {
-                                "name": "4",
-                                "value": "4"
-                            },
-                            {
-                                "name": "5",
-                                "value": "5"
-                            },
-                            {
-                                "name": "Ignore",
-                                "value": "ignore"
-                            }
-                        ]
-                    },
-                    "defaultvalue": "1"
-                },
-                "conversionParam": {
-                    "name": "Conversion Score",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "string"
-                    },
-                    "ui": {
-                        "uitype": "selection",
-                        "selections": [
-                            {
-                                "name": "1",
-                                "value": "1"
-                            },
-                            {
-                                "name": "2",
-                                "value": "2"
-                            },
-                            {
-                                "name": "3",
-                                "value": "3"
-                            },
-                            {
-                                "name": "4",
-                                "value": "4"
-                            },
-                            {
-                                "name": "5",
-                                "value": "5"
-                            },
-                            {
-                                "name": "Ignore",
-                                "value": "ignore"
-                            }
-                        ]
-                    },
-                    "defaultvalue": "4"
-                },
-                "conflictParam": {
-                    "name": "Override",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "string"
-                    },
-                    "ui": {
-                        "uitype": "selection",
-                        "selections": [
-                            {
-                                "name": "Use the latest action",
-                                "value": "latest"
-                            },
-                            {
-                                "name": "Use the highest preference score one",
-                                "value": "highest"
-                            },
-                            {
-                                "name": "Use the lowest preference score one",
-                                "value": "lowest"
-                            }
-                        ]
-                    },
-                    "defaultvalue": "latest"
-                }
-            },
-            "paramsections": [
-                {
-                    "name": "Parameter Settings",
-                    "sectiontype": "normal",
-                    "subsections": [
-                        {
-                            "name": "Weighting Settings",
-                            "sectiontype": "normal",
-                            "params": [
-                                "weighting"
-                            ]
-                        }
-                    ]
-                },
-                {
-                    "name": "User Actions Representation Settings",
-                    "sectiontype": "normal",
-                    "subsections": [
-                        {
-                            "name": "User Action Scores",
-                            "sectiontype": "normal",
-                            "description": "Define the preference score represented by each user action from 1 to 5. 5 is the most preferred, 1 is the least preferred. 3 is neutral.",
-                            "params": [
-                                "viewParam",
-                                "likeParam",
-                                "dislikeParam",
-                                "conversionParam"
-                            ]
-                        },
-                        {
-                            "name": "Overriding",
-                            "sectiontype": "normal",
-                            "description": "When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, determine which action will be considered as final preference.",
-                            "params": [
-                                "conflictParam"
-                            ]
-                        }
-                    ]
-                }
-            ]
-        },
-        "mahout-alswr": {
-            "name": "Mahout's ALS-WR (Non-distributed)",
-            "description": "Predict user preferences using matrix factorization (Non-distributed).",
-            "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.alswr.ALSWRJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --lambda $lambda$ --numIterations $numIterations$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
-            ],
-            "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.alswr.ALSWRJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --lambda $lambda$ --numIterations $numIterations$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.alswr.ALSWRJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --lambda $lambda$ --numIterations $numIterations$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "paramorder": [
                 "numFeatures",
@@ -2875,9 +4257,7 @@
                 "conflictParam"
             ],
             "engineinfoid": "itemrec",
-            "techreq": [
-                "Hadoop"
-            ],
+            "techreq": [],
             "datareq": [
                 "Users, Items, and U2I Actions such as Like, Conversion and Rate."
             ],
@@ -3192,20 +4572,18 @@
                 }
             ]
         },
-        "mahout-svdsgd": {
-            "name": "Mahout's SVD-RatingSGD Recommender (Non-distributed)",
-            "description": "Predict user preferences using matrix factorization (Non-distributed).",
+        "pio-itemrec-single-mahout-svdsgd": {
+            "name": "Mahout's SVD-RatingSGD Recommender (Single Machine)",
+            "description": "Predict user preferences using matrix factorization (Single Machine).",
             "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.svdsgd.SVDSGDJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.svdsgd.SVDSGDJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.svdsgd.SVDSGDJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.svdsgd.SVDSGDJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "paramorder": [
                 "numFeatures",
@@ -3221,9 +4599,7 @@
                 "conflictParam"
             ],
             "engineinfoid": "itemrec",
-            "techreq": [
-                "Hadoop"
-            ],
+            "techreq": [],
             "datareq": [
                 "Users, Items, and U2I Actions such as Like, Conversion and Rate."
             ],
@@ -3634,20 +5010,18 @@
                 }
             ]
         },
-        "mahout-svdplusplus": {
-            "name": "Mahout's SVDPlusPlus Recommender (Non-distributed)",
-            "description": "Predict user preferences using matrix factorization (Non-distributed).",
+        "pio-itemrec-single-mahout-svdplusplus": {
+            "name": "Mahout's SVDPlusPlus Recommender (Single Machine)",
+            "description": "Predict user preferences using matrix factorization (Single Machine).",
             "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.svdplusplus.SVDPlusPlusJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.svdplusplus.SVDPlusPlusJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataCopy --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -jar $base$/lib/$mahoutItemrecAlgo$ io.prediction.algorithms.mahout.itemrec.svdplusplus.SVDPlusPlusJob --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
-                "$hadoop$ jar $base$/lib/$pdioItemrecAlgo$ io.prediction.algorithms.scalding.mahout.itemrec.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemrec.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemrec.mahout.mahoutjob io.prediction.algorithms.mahout.itemrec.svdplusplus.SVDPlusPlusJob --input $localTempDir$ratings.csv --output $localTempDir$predicted.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --numRecommendations $numRecommendations$ --numFeatures $numFeatures$ --learningRate $learningRate$ --preventOverfitting $preventOverfitting$ --randomNoise $randomNoise$ --numIterations $numIterations$ --learningRateDecay $learningRateDecay$",
+                "$base$/bin/itemrec.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --unseenOnly $unseenOnly$ --numRecommendations $numRecommendations$"
             ],
             "paramorder": [
                 "numFeatures",
@@ -3663,9 +5037,7 @@
                 "conflictParam"
             ],
             "engineinfoid": "itemrec",
-            "techreq": [
-                "Hadoop"
-            ],
+            "techreq": [],
             "datareq": [
                 "Users, Items, and U2I Actions such as Like, Conversion and Rate."
             ],
@@ -4076,7 +5448,7 @@
                 }
             ]
         },
-        "pdio-itemsimrandomrank": {
+        "pio-itemsim-distributed-random": {
             "name": "Random Rank",
             "description": "Predict item similarities randomly.",
             "batchcommands": [
@@ -4096,7 +5468,7 @@
             "params": {},
             "paramsections": []
         },
-        "pdio-itemsimlatestrank": {
+        "pio-itemsim-distributed-latest": {
             "name": "Latest Rank",
             "description": "Consider latest items as most similar.",
             "batchcommands": [
@@ -4116,26 +5488,23 @@
             "params": {},
             "paramsections": []
         },
-        "pdio-itemsimcf": {
-            "name": "Item Similarity Collaborative Filtering",
-            "description": "This algorithm predicts similar items which the user may also like.",
+        "pio-itemsim-single-mahout-itemsimcf": {
+            "name": "Mahout's Item Similarity Collaborative Filtering (Single Machine)",
+            "description": "This algorithm predicts similar items which the user may also like (Single Machine).",
             "batchcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.itemsim.itemsimcf.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.itemsim.itemsimcf.ItemSimilarity --hdfs --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --measureParam $measureParam$ --priorCountParam $priorCountParam$ --priorCorrelParam $priorCorrelParam$ --minNumRatersParam $minNumRatersParam$ --maxNumRatersParam $maxNumRatersParam$ --minIntersectionParam $minIntersectionParam$ --numSimilarItems $numSimilarItems$",
-                "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.itemsim.itemsimcf.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --recommendationTime $recommendationTime$"
+                "$base$/bin/itemsim.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemsim.mahout.mahoutjob io.prediction.algorithms.mahout.itemsim.itemsimcf.ItemSimCFJob --input $localTempDir$ratings.csv --output $localTempDir$similarities.tsv --itemsFile $localTempDir$validItemsIndex.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --booleanData $booleanData$ --numSimilarItems $numSimilarItems$ --itemSimilarity $itemSimilarity$ --weighted $weighted$",
+                "$base$/bin/itemsim.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --numSimilarItems $numSimilarItems$"
             ],
             "offlineevalcommands": [
-                "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.itemsim.itemsimcf.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.itemsim.itemsimcf.ItemSimilarity --hdfs --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --measureParam $measureParam$ --priorCountParam $priorCountParam$ --priorCorrelParam $priorCorrelParam$ --minNumRatersParam $minNumRatersParam$ --maxNumRatersParam $maxNumRatersParam$ --minIntersectionParam $minIntersectionParam$ --numSimilarItems $numSimilarItems$",
-                "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.itemsim.itemsimcf.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet false --recommendationTime $recommendationTime$"
+                "$base$/bin/itemsim.generic.dataprep --outputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$ --recommendationTime $recommendationTime$ --matrixMarket false",
+                "$base$/bin/itemsim.mahout.mahoutjob io.prediction.algorithms.mahout.itemsim.itemsimcf.ItemSimCFJob  --input $localTempDir$ratings.csv --output $localTempDir$similarities.tsv --itemsFile $localTempDir$validItemsIndex.tsv --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --booleanData $booleanData$ --numSimilarItems $numSimilarItems$ --itemSimilarity $itemSimilarity$ --weighted $weighted$",
+                "$base$/bin/itemsim.mahout.modelcon --inputDir $localTempDir$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --numSimilarItems $numSimilarItems$"
             ],
             "paramorder": [
-                "measureParam",
-                "priorCountParam",
-                "priorCorrelParam",
-                "minNumRatersParam",
-                "maxNumRatersParam",
-                "minIntersectionParam",
+                "booleanData",
+                "itemSimilarity",
+                "weighted",
                 "viewParam",
                 "likeParam",
                 "dislikeParam",
@@ -4143,16 +5512,35 @@
                 "conflictParam"
             ],
             "engineinfoid": "itemsim",
-            "techreq": [
-                "Hadoop"
-            ],
+            "techreq": [],
             "datareq": [
                 "Users, Items, and U2I Actions such as Like, Conversion and Rate."
             ],
             "params": {
-                "measureParam": {
-                    "name": "Distance Function",
-                    "description": "",
+                "booleanData": {
+                    "name": "Boolean Data",
+                    "description": "Treat input data as having no preference values.",
+                    "constraint": {
+                        "paramtype": "boolean"
+                    },
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "True",
+                                "value": "true"
+                            },
+                            {
+                                "name": "False",
+                                "value": "false"
+                            }
+                        ]
+                    },
+                    "defaultvalue": false
+                },
+                "itemSimilarity": {
+                    "name": "Item Similarity",
+                    "description": "Item Similarity Measure.",
                     "constraint": {
                         "paramtype": "string"
                     },
@@ -4160,75 +5548,53 @@
                         "uitype": "selection",
                         "selections": [
                             {
-                                "name": "Pearson Correlation Similarity",
-                                "value": "correl"
+                                "name": "City Block",
+                                "value": "CityBlockSimilarity"
                             },
                             {
-                                "name": "Cosine Similarity",
-                                "value": "cosine"
+                                "name": "Euclidean Distance",
+                                "value": "EuclideanDistanceSimilarity"
                             },
                             {
-                                "name": "Jaccard Similarity",
-                                "value": "jaccard"
+                                "name": "Log-Likelihood",
+                                "value": "LogLikelihoodSimilarity"
+                            },
+                            {
+                                "name": "Pearson Correlation",
+                                "value": "PearsonCorrelationSimilarity"
+                            },
+                            {
+                                "name": "Tanimoto Coefficient",
+                                "value": "TanimotoCoefficientSimilarity"
+                            },
+                            {
+                                "name": "Uncentered Cosine",
+                                "value": "UncenteredCosineSimilarity"
                             }
                         ]
                     },
-                    "defaultvalue": "correl"
+                    "defaultvalue": "LogLikelihoodSimilarity"
                 },
-                "priorCountParam": {
-                    "name": "Virtual Count",
-                    "description": "Suggested range: 0 to 100.",
+                "weighted": {
+                    "name": "Weighted",
+                    "description": "The Similarity score is weighted (only applied to Euclidean Distance, Pearson Correlation, Uncentered Cosine item similarity).",
                     "constraint": {
-                        "paramtype": "integer"
+                        "paramtype": "boolean"
                     },
                     "ui": {
-                        "uitype": "text"
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "True",
+                                "value": "true"
+                            },
+                            {
+                                "name": "False",
+                                "value": "false"
+                            }
+                        ]
                     },
-                    "defaultvalue": 20
-                },
-                "priorCorrelParam": {
-                    "name": "Prior Correlation",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 0
-                },
-                "minNumRatersParam": {
-                    "name": "Minimum Number of Raters",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
-                },
-                "maxNumRatersParam": {
-                    "name": "Maximum Number of Raters",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 10000
-                },
-                "minIntersectionParam": {
-                    "name": "Minimum Intersection",
-                    "description": "",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
+                    "defaultvalue": false
                 },
                 "viewParam": {
                     "name": "View Score",
@@ -4402,106 +5768,6 @@
                         ]
                     },
                     "defaultvalue": "latest"
-                },
-                "priorCountParamMin": {
-                    "name": "priorCountParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 10
-                },
-                "priorCountParamMax": {
-                    "name": "priorCountParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 30
-                },
-                "minNumRatersParamMin": {
-                    "name": "minNumRatersParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
-                },
-                "minNumRatersParamMax": {
-                    "name": "minNumRatersParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 5
-                },
-                "maxNumRatersParamMin": {
-                    "name": "maxNumRatersParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 10000
-                },
-                "maxNumRatersParamMax": {
-                    "name": "maxNumRatersParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 10000
-                },
-                "minIntersectionParamMin": {
-                    "name": "minIntersectionParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 1
-                },
-                "minIntersectionParamMax": {
-                    "name": "minIntersectionParamMax",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 5
-                },
-                "priorCorrelParamMin": {
-                    "name": "priorCorrelParamMin",
-                    "constraint": {
-                        "paramtype": "integer"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 0
-                },
-                "priorCorrelParamMax": {
-                    "name": "priorCorrelParamMax",
-                    "constraint": {
-                        "paramtype": "double"
-                    },
-                    "ui": {
-                        "uitype": "text"
-                    },
-                    "defaultvalue": 0.1
                 }
             },
             "paramsections": [
@@ -4513,32 +5779,15 @@
                             "name": "Item Similarity Measurement",
                             "sectiontype": "normal",
                             "params": [
-                                "measureParam"
+                                "itemSimilarity"
                             ]
                         },
                         {
-                            "name": "Numeric Parameters",
-                            "sectiontype": "tuning",
-                            "subsections": [
-                                {
-                                    "name": "Regularization",
-                                    "sectiontype": "normal",
-                                    "description": "Add virtual item pairs that have zero correlation. This helps avoid noise if some item pairs have very few user actions in common.",
-                                    "params": [
-                                        "priorCountParam",
-                                        "priorCorrelParam"
-                                    ]
-                                },
-                                {
-                                    "name": "Other Parameters",
-                                    "sectiontype": "normal",
-                                    "description": "Filters to speed up computation and reduce noise.",
-                                    "params": [
-                                        "minNumRatersParam",
-                                        "maxNumRatersParam",
-                                        "minIntersectionParam"
-                                    ]
-                                }
+                            "name": "Advanced Parameters",
+                            "sectiontype": "normal",
+                            "params": [
+                                "booleanData",
+                                "weighted"
                             ]
                         }
                     ]
@@ -4570,7 +5819,7 @@
                 }
             ]
         },
-        "mahout-itemsimcf": {
+        "pio-itemsim-distributed-mahout-itemsimcf": {
             "name": "Mahout's Item Similarity Collaborative Filtering",
             "description": "This algorithm predicts similar items which the user may also like.",
             "batchcommands": [
@@ -4578,7 +5827,7 @@
                 "$base$/bin/quiet.sh $hadoop$ fs -rmr $algoDir$",
                 "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.mahout.itemsim.DataCopy --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
                 "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.mahout.itemsim.DataPreparator --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.similarity.item.ItemSimilarityJob --input $dataFilePrefix$ratings.csv --output $algoFilePrefix$similarities.tsv --tempDir $mahoutTempDir$ --maxSimilaritiesPerItem $numSimilarItems$ --booleanData $booleanData$ --maxPrefsPerUser $maxPrefsPerUser$ --minPrefsPerUser $minPrefsPerUser$ --similarityClassname $similarityClassname$ --threshold $threshold$",
+                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.similarity.item.ItemSimilarityJob --input $dataFilePrefix$ratings.csv --output $algoFilePrefix$similarities.tsv --tempDir $mahoutTempDir$ --maxSimilaritiesPerItem $numSimilarItems$ --booleanData $booleanData$ --maxPrefs $maxPrefs$ --minPrefsPerUser $minPrefsPerUser$ --similarityClassname $similarityClassname$ --threshold $threshold$",
                 "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.mahout.itemsim.ModelConstructor --hdfs --dbType $modeldataDbType$ --dbName $modeldataDbName$ --dbHost $modeldataDbHost$ --dbPort $modeldataDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --modelSet $modelset$ --numSimilarItems $numSimilarItems$ --recommendationTime $recommendationTime$"
             ],
             "offlineevalcommands": [
@@ -4586,12 +5835,12 @@
                 "$base$/bin/quiet.sh $hadoop$ fs -rmr $algoDir$",
                 "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.mahout.itemsim.DataCopy --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
                 "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.mahout.itemsim.DataPreparator --hdfs --dbType $appdataTrainingDbType$ --dbName $appdataTrainingDbName$ --dbHost $appdataTrainingDbHost$ --dbPort $appdataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ $itypes$ --viewParam $viewParam$ --likeParam $likeParam$ --dislikeParam $dislikeParam$ --conversionParam $conversionParam$ --conflictParam $conflictParam$",
-                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.similarity.item.ItemSimilarityJob --input $dataFilePrefix$ratings.csv --output $algoFilePrefix$similarities.tsv --tempDir $mahoutTempDir$ --maxSimilaritiesPerItem $numSimilarItems$ --booleanData $booleanData$ --maxPrefsPerUser $maxPrefsPerUser$ --minPrefsPerUser $minPrefsPerUser$ --similarityClassname $similarityClassname$ --threshold $threshold$",
+                "$hadoop$ jar $mahoutCoreJob$ org.apache.mahout.cf.taste.hadoop.similarity.item.ItemSimilarityJob --input $dataFilePrefix$ratings.csv --output $algoFilePrefix$similarities.tsv --tempDir $mahoutTempDir$ --maxSimilaritiesPerItem $numSimilarItems$ --booleanData $booleanData$ --maxPrefs $maxPrefs$ --minPrefsPerUser $minPrefsPerUser$ --similarityClassname $similarityClassname$ --threshold $threshold$",
                 "$hadoop$ jar $base$/lib/$pdioItemsimAlgo$ io.prediction.algorithms.scalding.mahout.itemsim.ModelConstructor --hdfs --dbType $modeldataTrainingDbType$ --dbName $modeldataTrainingDbName$ --dbHost $modeldataTrainingDbHost$ --dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --algoid $algoid$ --evalid $evalid$ --modelSet $modelset$ --numSimilarItems $numSimilarItems$ --recommendationTime $recommendationTime$"
             ],
             "paramorder": [
                 "booleanData",
-                "maxPrefsPerUser",
+                "maxPrefs",
                 "minPrefsPerUser",
                 "similarityClassname",
                 "threshold",
@@ -4630,7 +5879,7 @@
                     },
                     "defaultvalue": false
                 },
-                "maxPrefsPerUser": {
+                "maxPrefs": {
                     "name": "Max Num of Preferences per User",
                     "description": "Maximum number of preferences considered per user in final recommendation phase.",
                     "constraint": {
@@ -4897,8 +6146,8 @@
                     },
                     "defaultvalue": 0.15
                 },
-                "maxPrefsPerUserMin": {
-                    "name": "maxPrefsPerUserMin",
+                "maxPrefsMin": {
+                    "name": "maxPrefsMin",
                     "constraint": {
                         "paramtype": "integer"
                     },
@@ -4907,8 +6156,8 @@
                     },
                     "defaultvalue": 500
                 },
-                "maxPrefsPerUserMax": {
-                    "name": "maxPrefsPerUserMax",
+                "maxPrefsMax": {
+                    "name": "maxPrefsMax",
                     "constraint": {
                         "paramtype": "integer"
                     },
@@ -4962,7 +6211,7 @@
                             "sectiontype": "tuning",
                             "params": [
                                 "threshold",
-                                "maxPrefsPerUser",
+                                "maxPrefs",
                                 "minPrefsPerUser"
                             ]
                         }
@@ -4997,14 +6246,60 @@
         }
     },
     "offlineevalsplitterinfos": {
-        "trainingtestsplit": {
+        "pio-distributed-trainingtestsplit": {
             "name": "Training/Test Data Splitter With Time Order Option",
             "description": "Split data into training, validation and test sets",
             "engineinfoids": [
                 "itemrec", "itemsim"
             ],
             "commands": [
-                "java -jar $base$/lib/$pdioCommonsU2ITrainingTestSplit$ --hadoop $hadoop$ --pdioEvalJar $base$/lib/$pdioCommonsEval$ --sequenceNum $iteration$ --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --training_dbType $appdataTrainingDbType$ --training_dbName $appdataTrainingDbName$ --training_dbHost $appdataTrainingDbHost$ --training_dbPort $appdataTrainingDbPort$ --validation_dbType $appdataValidationDbType$ --validation_dbName $appdataValidationDbName$ --validation_dbHost $appdataValidationDbHost$ --validation_dbPort $appdataValidationDbPort$ --test_dbType $appdataTestDbType$ --test_dbName $appdataTestDbName$ --test_dbHost $appdataTestDbHost$ --test_dbPort $appdataTestDbPort$ --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --evalid $evalid$ $itypes$ --trainingPercent $trainingPercent$ --validationPercent $validationPercent$ --testPercent $testPercent$ --timeorder $timeorder$"
+                "$base$/bin/u2itrainingtestsplit --hadoop $hadoop$ --pdioEvalJar $base$/lib/$pdioCommonsEval$ --sequenceNum $iteration$ --hdfs --dbType $appdataDbType$ --dbName $appdataDbName$ --dbHost $appdataDbHost$ --dbPort $appdataDbPort$ --training_dbType $appdataTrainingDbType$ --training_dbName $appdataTrainingDbName$ --training_dbHost $appdataTrainingDbHost$ --training_dbPort $appdataTrainingDbPort$ --validation_dbType $appdataValidationDbType$ --validation_dbName $appdataValidationDbName$ --validation_dbHost $appdataValidationDbHost$ --validation_dbPort $appdataValidationDbPort$ --test_dbType $appdataTestDbType$ --test_dbName $appdataTestDbName$ --test_dbHost $appdataTestDbHost$ --test_dbPort $appdataTestDbPort$ --hdfsRoot $hdfsRoot$ --localTempRoot $localTempRoot$ --appid $appid$ --engineid $engineid$ --evalid $evalid$ $itypes$ --trainingPercent $trainingPercent$ --validationPercent $validationPercent$ --testPercent $testPercent$ --timeorder $timeorder$"
+            ],
+            "params": {
+                "timeorder": {
+                    "name": "Data Selection",
+                    "description": "Random with Time Order means that data in Test Set is always newer than those in Train Set.",
+                    "constraint": {
+                        "paramtype": "boolean"
+                    },
+                    "defaultvalue": false,
+                    "ui": {
+                        "uitype": "selection",
+                        "selections": [
+                            {
+                                "name": "Random Sampling",
+                                "value": "false"
+                            },
+                            {
+                                "name": "Random with Time Order",
+                                "value": "true"
+                            }
+                        ]
+                    }
+                }
+            },
+            "paramsections": [
+                {
+                    "name": "Parameters",
+                    "sectiontype": "normal",
+                    "description": "",
+                    "params": [
+                        "timeorder"
+                    ]
+                }
+            ],
+            "paramorder": [
+                "timeorder"
+            ]
+        },
+        "pio-single-trainingtestsplit": {
+            "name": "Training and Test Set Data Splitter for User-to-Item Actions (Single Machine)",
+            "description": "Split data into training, validation and test sets",
+            "engineinfoids": [
+                "itemrec", "itemsim"
+            ],
+            "commands": [
+                "$base$/bin/u2isplit --sequenceNum $iteration$ --appid $appid$ --engineid $engineid$ --evalid $evalid$ $itypesCSV$ --trainingpercent $trainingPercent$ --validationpercent $validationPercent$ --testpercent $testPercent$ --timeorder $timeorder$"
             ],
             "params": {
                 "timeorder": {
@@ -5045,7 +6340,7 @@
         }
     },
     "offlineevalmetricinfos": {
-        "map_k": {
+        "pio-itemrec-distributed-map_k": {
             "name": "MAP@k",
             "description": "Mean Average Precision",
             "engineinfoids": [
@@ -5053,7 +6348,7 @@
             ],
             "commands": [
                 "$hadoop$ jar $base$/lib/$pdioItemrecEval$ io.prediction.metrics.scalding.itemrec.map.MAPAtKDataPreparator --hdfs --test_dbType $appdataTestDbType$ --test_dbName $appdataTestDbName$ --test_dbHost $appdataTestDbHost$ --test_dbPort $appdataTestDbPort$ --training_dbType $appdataTrainingDbType$ --training_dbName $appdataTrainingDbName$ --training_dbHost $appdataTrainingDbHost$ --training_dbPort $appdataTrainingDbPort$ --modeldata_dbType $modeldataTrainingDbType$ --modeldata_dbName $modeldataTrainingDbName$ --modeldata_dbHost $modeldataTrainingDbHost$ --modeldata_dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --evalid $evalid$ --metricid $metricid$ --algoid $algoid$ --kParam $kParam$ --goalParam $goalParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -Devalid=$evalid$ -Dalgoid=$algoid$ -Dk=$kParam$ -Dmetricid=$metricid$ -Dhdfsroot=$hdfsRoot$ -jar $base$/lib/$pdioItemrecTopK$",
+                "$base$/bin/topk --enginetype itemrec --evalid $evalid$ --algoid $algoid$ --k $kParam$ --metricid $metricid$ --hdfsroot $hdfsRoot$",
                 "$hadoop$ jar $base$/lib/$pdioItemrecEval$ io.prediction.metrics.scalding.itemrec.map.MAPAtK --hdfs --dbType $settingsDbType$ --dbName $settingsDbName$ --dbHost $settingsDbHost$ --dbPort $settingsDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --evalid $evalid$ --metricid $metricid$ --algoid $algoid$ --iteration $iteration$ --splitset $splitset$ --kParam $kParam$"
             ],
             "params": {
@@ -5083,15 +6378,52 @@
                 "kParam"
             ]
         },
-        "ismap_k": {
+        "pio-itemrec-single-map_k": {
+            "name": "MAP@k (Single Machine)",
+            "description": "Mean Average Precision",
+            "engineinfoids": [
+                "itemrec"
+            ],
+            "commands": [
+                "$base$/bin/topk --enginetype itemrec --evalid $evalid$ --algoid $algoid$ --k $k$ --metricid $metricid$ --hdfsroot $hdfsRoot$ --local",
+                "$base$/bin/mapatk --appid $appid$ --engineid $engineid$ --evalid $evalid$ --metricid $metricid$ --algoid $algoid$ --iteration $iteration$ --splitset $splitset$ --k $k$ --goal $goalParam$"
+            ],
+            "params": {
+                "k": {
+                    "name": "k",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "defaultvalue": 20,
+                    "ui": {
+                        "uitype": "text"
+                    }
+                }
+            },
+            "paramsections": [
+                {
+                    "name": "Parameters",
+                    "sectiontype": "normal",
+                    "description": "",
+                    "params": [
+                        "k"
+                    ]
+                }
+            ],
+            "paramorder": [
+                "k"
+            ]
+        },
+        "pio-itemsim-distributed-ismap_k": {
             "name": "ISMAP@k",
-            "description": "Item Similarity Mean Average Precision at k",
+            "description": "Item Similarity Mean Average Precision",
             "engineinfoids": [
                 "itemsim"
             ],
             "commands": [
                 "$hadoop$ jar $base$/lib/$pdioItemsimEval$ io.prediction.metrics.scalding.itemsim.ismap.ISMAPAtKDataPreparator --hdfs --test_dbType $appdataTestDbType$ --test_dbName $appdataTestDbName$ --test_dbHost $appdataTestDbHost$ --test_dbPort $appdataTestDbPort$ --training_dbType $appdataTrainingDbType$ --training_dbName $appdataTrainingDbName$ --training_dbHost $appdataTrainingDbHost$ --training_dbPort $appdataTrainingDbPort$ --modeldata_dbType $modeldataTrainingDbType$ --modeldata_dbName $modeldataTrainingDbName$ --modeldata_dbHost $modeldataTrainingDbHost$ --modeldata_dbPort $modeldataTrainingDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --evalid $evalid$ --metricid $metricid$ --algoid $algoid$ --kParam $kParam$ --goalParam $goalParam$",
-                "java -Dio.prediction.base=$base$ $configFile$ -Devalid=$evalid$ -Dalgoid=$algoid$ -Dk=$kParam$ -Dmetricid=$metricid$ -Dhdfsroot=$hdfsRoot$ -jar $base$/lib/$pdioItemsimTopK$",
+                "$base$/bin/topk --enginetype itemsim --evalid $evalid$ --algoid $algoid$ --k $kParam$ --metricid $metricid$ --hdfsroot $hdfsRoot$",
                 "$hadoop$ jar $base$/lib/$pdioItemsimEval$ io.prediction.metrics.scalding.itemsim.ismap.ISMAPAtK --hdfs --dbType $settingsDbType$ --dbName $settingsDbName$ --dbHost $settingsDbHost$ --dbPort $settingsDbPort$ --hdfsRoot $hdfsRoot$ --appid $appid$ --engineid $engineid$ --evalid $evalid$ --metricid $metricid$ --algoid $algoid$ --iteration $iteration$ --splitset $splitset$ --kParam $kParam$"
             ],
             "params": {
@@ -5120,14 +6452,51 @@
             "paramorder": [
                 "kParam"
             ]
+        },
+        "pio-itemsim-single-ismap_k": {
+            "name": "ISMAP@k (Single Machine)",
+            "description": "Item Similarity Mean Average Precision",
+            "engineinfoids": [
+                "itemsim"
+            ],
+            "commands": [
+                "$base$/bin/topk --enginetype itemsim --evalid $evalid$ --algoid $algoid$ --k $k$ --metricid $metricid$ --hdfsroot $hdfsRoot$ --local",
+                "$base$/bin/mapatk --appid $appid$ --engineid $engineid$ --evalid $evalid$ --metricid $metricid$ --algoid $algoid$ --iteration $iteration$ --splitset $splitset$ --k $k$ --goal $goalParam$"
+            ],
+            "params": {
+                "k": {
+                    "name": "k",
+                    "description": "",
+                    "constraint": {
+                        "paramtype": "integer"
+                    },
+                    "defaultvalue": 20,
+                    "ui": {
+                        "uitype": "text"
+                    }
+                }
+            },
+            "paramsections": [
+                {
+                    "name": "Parameters",
+                    "sectiontype": "normal",
+                    "description": "",
+                    "params": [
+                        "k"
+                    ]
+                }
+            ],
+            "paramorder": [
+                "k"
+            ]
         }
     },
     "paramgeninfos": {
-        "random": {
+        "pio-single-random": {
             "name": "Random Search",
             "description": "Random search within specified interval",
             "commands": [
-                "java -Dio.prediction.base=$base$ $configFile$ -Devalids=$evalids$ -Dalgoid=$algoid$ -Dloop=$loop$ -Dparamsets=$paramsets$ -jar $base$/lib/$pdioCommonsParamGen$"
+                "$base$/bin/paramgen --evalids $evalids$ --algoid $algoid$ --loop $loop$ --paramsets $paramsets$"
             ],
             "paramorder": [],
             "paramnames": {},
diff --git a/dist/conf/predictionio.conf b/dist/conf/predictionio.conf
index 9839d59..99be9a6 100644
--- a/dist/conf/predictionio.conf
+++ b/dist/conf/predictionio.conf
@@ -75,10 +75,12 @@
 io.prediction.commons.modeldata.training.db.port=27017
 
 # Mahout core job
-io.prediction.jars.mahoutCoreJob=${io.prediction.base}/vendors/mahout-distribution-0.8/mahout-core-0.8-job.jar
+io.prediction.jars.mahoutCoreJob=${io.prediction.base}/vendors/mahout-distribution-0.9/mahout-core-0.9-job.jar
 
 # PredictionIO Scheduler Threading
 io.prediction.scheduler.steps.fork-join-executor.parallelism-factor = 1.0
 io.prediction.scheduler.steps.fork-join-executor.parallelism-max = 1
 
+# default local temporary space
+io.prediction.commons.settings.local.temp.root=${io.prediction.base}/tmp/
 
diff --git a/process/commons/hadoop/scalding/src/main/scala/io/prediction/commons/scalding/appdata/monogodb/MongoU2iActionsSource.scala b/process/commons/hadoop/scalding/src/main/scala/io/prediction/commons/scalding/appdata/monogodb/MongoU2iActionsSource.scala
index c54a60d..0bee0eb 100644
--- a/process/commons/hadoop/scalding/src/main/scala/io/prediction/commons/scalding/appdata/monogodb/MongoU2iActionsSource.scala
+++ b/process/commons/hadoop/scalding/src/main/scala/io/prediction/commons/scalding/appdata/monogodb/MongoU2iActionsSource.scala
@@ -103,7 +103,8 @@
       fields: (String, String, String, String, Option[String]) =>
         val (action, uid, iid, t, v) = fields
 
-        val vData: String = v.getOrElse(null) // use null if no such field for this record
+        // u2iAction v field type is Int
+        val vData: Any = v.map(_.toInt).getOrElse(null) // use null if no such field for this record
 
         (action, uid, iid, new java.util.Date(t.toLong), vData, appid)
     }.write(this)
diff --git a/process/engines/commons/algorithms/scala/random/build.sbt b/process/engines/commons/algorithms/scala/random/build.sbt
new file mode 100644
index 0000000..2d569eb
--- /dev/null
+++ b/process/engines/commons/algorithms/scala/random/build.sbt
@@ -0,0 +1,21 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-commons-algorithms-scala-random"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "ch.qos.logback" % "logback-core" % "1.1.1",
+  "com.github.scopt" %% "scopt" % "3.2.0",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("random" -> "io.prediction.algorithms.commons.random.Random")
+
+packJvmOpts := Map("random" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/commons/algorithms/scala/random/src/main/resources/application.conf
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf
copy to process/engines/commons/algorithms/scala/random/src/main/resources/application.conf
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/commons/algorithms/scala/random/src/main/resources/logback.xml
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/commons/algorithms/scala/random/src/main/resources/logback.xml
diff --git a/process/engines/commons/algorithms/scala/random/src/main/scala/Random.scala b/process/engines/commons/algorithms/scala/random/src/main/scala/Random.scala
new file mode 100644
index 0000000..54ee76f
--- /dev/null
+++ b/process/engines/commons/algorithms/scala/random/src/main/scala/Random.scala
@@ -0,0 +1,105 @@
+package io.prediction.algorithms.commons.random
+
+import io.prediction.commons.Config
+import io.prediction.commons.modeldata.{ ItemRecScore, ItemSimScore }
+
+import com.github.nscala_time.time.Imports._
+import grizzled.slf4j.Logger
+
+case class RandomConfig(
+  appid: Int = 0,
+  engineid: Int = 0,
+  algoid: Int = 0,
+  evalid: Option[Int] = None,
+  itypes: Option[Seq[String]] = None,
+  numPredictions: Int = 0,
+  modelSet: Boolean = false,
+  recommendationTime: DateTime = DateTime.now)
+
+object Random {
+  def main(args: Array[String]): Unit = {
+    implicit val dateTimeRead: scopt.Read[DateTime] = scopt.Read.reads(x => new DateTime(x.toLong))
+    val commonsConfig = new Config()
+    val engines = commonsConfig.getSettingsEngines
+    val parser = new scopt.OptionParser[RandomConfig]("random") {
+      head("random")
+      opt[Int]("appid") required () action { (x, c) =>
+        c.copy(appid = x)
+      } text ("the App ID that is the parent of the specified Algo ID")
+      opt[Int]("engineid") required () action { (x, c) =>
+        c.copy(engineid = x)
+      } validate { x =>
+        engines.get(x) map { _ => success } getOrElse failure(s"the Engine ID does not correspond to a valid Engine")
+      } text ("the Engine ID that is the parent of the specified Algo ID")
+      opt[Int]("algoid") required () action { (x, c) =>
+        c.copy(algoid = x)
+      } text ("the Algo ID of this run")
+      opt[Int]("evalid") action { (x, c) =>
+        c.copy(evalid = Some(x))
+      } text ("the OfflineEval ID of this run, if any")
+      opt[String]("itypes") action { (x, c) =>
+        c.copy(itypes = Some(x.split(',')))
+      } text ("restrict use of certain itypes (comma-separated, e.g. --itypes type1,type2)")
+      opt[Int]("numPredictions") required () action { (x, c) =>
+        c.copy(numPredictions = x)
+      } text ("the number of predictions to generate")
+      opt[Boolean]("modelSet") required () action { (x, c) =>
+        c.copy(modelSet = x)
+      } text ("the model set to write to after training is finished")
+      opt[DateTime]("recommendationTime") required () action { (x, c) =>
+        c.copy(recommendationTime = x)
+      } text ("the time instant of this training (UTC UNIX timestamp in milliseconds)")
+    }
+
+    parser.parse(args, RandomConfig()) map { config =>
+      val logger = Logger(Random.getClass)
+      val appid = config.evalid.getOrElse(config.appid)
+      val items = config.evalid map { _ => commonsConfig.getAppdataTrainingItems } getOrElse commonsConfig.getAppdataItems
+      val itypes = config.itypes map { _.toSet } getOrElse Set()
+      val validItems = items.getByAppid(appid).toSeq filter { item =>
+        val typeValidity = config.itypes map { t => (t.toSet & item.itypes.toSet).size > 0 } getOrElse true
+        val timeValidity = (item.starttime, item.endtime) match {
+          case (Some(st), None) => config.recommendationTime >= st
+          case (None, Some(et)) => config.recommendationTime <= et
+          case (Some(st), Some(et)) => st <= config.recommendationTime && config.recommendationTime <= et
+          case _ => true
+        }
+        typeValidity && timeValidity
+      }
+      logger.info(s"# valid items: ${validItems.size}")
+      val randomScores = Seq.range(1, config.numPredictions + 1).reverse map { _.toDouble }
+      engines.get(config.engineid).get.infoid match {
+        case "itemrec" => {
+          val itemRecScores = config.evalid map { _ => commonsConfig.getModeldataTrainingItemRecScores } getOrElse commonsConfig.getModeldataItemRecScores
+          val users = config.evalid map { _ => commonsConfig.getAppdataTrainingUsers } getOrElse commonsConfig.getAppdataUsers
+          users.getByAppid(appid) foreach { user =>
+            val randomItems = scala.util.Random.shuffle(validItems).take(config.numPredictions)
+            itemRecScores.insert(ItemRecScore(
+              uid = user.id,
+              iids = randomItems map { _.id },
+              scores = randomScores,
+              itypes = randomItems map { _.itypes },
+              appid = appid,
+              algoid = config.algoid,
+              modelset = config.modelSet))
+          }
+        }
+        case "itemsim" => {
+          val itemSimScores = config.evalid map { _ => commonsConfig.getModeldataTrainingItemSimScores } getOrElse commonsConfig.getModeldataItemSimScores
+          items.getByAppid(appid) foreach { item =>
+            val randomItems = scala.util.Random.shuffle(validItems).take(config.numPredictions)
+            itemSimScores.insert(ItemSimScore(
+              iid = item.id,
+              simiids = randomItems map { _.id },
+              scores = randomScores,
+              itypes = randomItems map { _.itypes },
+              appid = appid,
+              algoid = config.algoid,
+              modelset = config.modelSet))
+          }
+        }
+      }
+      logger.info("Run finished")
+    }
+  }
+}
diff --git a/process/engines/commons/evaluations/scala/map/build.sbt b/process/engines/commons/evaluations/scala/map/build.sbt
new file mode 100644
index 0000000..45d1746
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/map/build.sbt
@@ -0,0 +1,22 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-commons-metrics-scala-map"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "ch.qos.logback" % "logback-core" % "1.1.1",
+  "com.github.scopt" %% "scopt" % "3.2.0",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1",
+  "org.specs2" %% "specs2" % "2.3.10" % "test")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("mapatk" -> "io.prediction.metrics.commons.map.MAPAtK")
+
+packJvmOpts := Map("mapatk" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/commons/evaluations/scala/map/src/main/resources/application.conf
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf
copy to process/engines/commons/evaluations/scala/map/src/main/resources/application.conf
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/commons/evaluations/scala/map/src/main/resources/logback.xml
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/commons/evaluations/scala/map/src/main/resources/logback.xml
diff --git a/process/engines/commons/evaluations/scala/map/src/main/scala/MAPAtK.scala b/process/engines/commons/evaluations/scala/map/src/main/scala/MAPAtK.scala
new file mode 100644
index 0000000..8b653d8
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/map/src/main/scala/MAPAtK.scala
@@ -0,0 +1,253 @@
+package io.prediction.metrics.commons.map
+
+import io.prediction.commons.Config
+import io.prediction.commons.settings.OfflineEvalResult
+import io.prediction.commons.filepath.OfflineMetricFile
+
+import scala.io.Source
+import java.io.File
+import java.io.FileWriter
+import java.io.BufferedWriter
+
+import grizzled.slf4j.Logger
+
+case class MAPAtKConfig(
+  appid: Int = 0,
+  engineid: Int = 0,
+  evalid: Int = 0,
+  metricid: Int = 0,
+  algoid: Int = 0,
+  iteration: Int = 0,
+  splitset: String = "",
+  k: Int = 0,
+  goal: String = "",
+  debug: Boolean = false)
+
+/**
+ * Mean Average Precision at K for Single Machine
+ *
+ * TODO: Eliminate use of Config object. Let scheduler handles it all.
+ */
+object MAPAtK {
+  def main(args: Array[String]) {
+    val commonsConfig = new Config()
+    val engines = commonsConfig.getSettingsEngines
+    val parser = new scopt.OptionParser[MAPAtKConfig]("mapatk") {
+      head("mapatk")
+      opt[Int]("appid") required () action { (x, c) =>
+        c.copy(appid = x)
+      } text ("the App ID that this metric will be applied to")
+      opt[Int]("engineid") required () action { (x, c) =>
+        c.copy(engineid = x)
+      } validate { x =>
+        engines.get(x) map { _ => success } getOrElse failure(s"the Engine ID does not correspond to a valid Engine")
+      } text ("the Engine ID that this metric will be applied to")
+      opt[Int]("evalid") required () action { (x, c) =>
+        c.copy(evalid = x)
+      } text ("the OfflineEval ID that this metric will be applied to")
+      opt[Int]("metricid") required () action { (x, c) =>
+        c.copy(metricid = x)
+      } text ("the OfflineEvalMetric ID that this metric will be applied to")
+      opt[Int]("algoid") required () action { (x, c) =>
+        c.copy(algoid = x)
+      } text ("the Algo ID that this metric will be applied to")
+      opt[Int]("iteration") required () action { (x, c) =>
+        c.copy(iteration = x)
+      } text ("the iteration number (starts from 1 for the 1st iteration and then increment for later iterations)")
+      opt[String]("splitset") required () action { (x, c) =>
+        c.copy(splitset = x)
+      } validate { x =>
+        if (x == "validation" || x == "test") success else failure("--splitset must be either 'validation' or 'test'")
+      } text ("the split set (validation or test) that this metric will be run against")
+      opt[Int]("k") required () action { (x, c) =>
+        c.copy(k = x)
+      } text ("the k parameter for MAP@k")
+      opt[String]("goal") required () action { (x, c) =>
+        c.copy(goal = x)
+      } validate { x =>
+        x match {
+          case "view" | "conversion" | "like" | "rate3" | "rate4" | "rate5" => success
+          case _ => failure("invalid goal specified")
+        }
+      } text ("actions to be treated as relevant (valid values: view, conversion, like, rate3, rate4, rate5)")
+      opt[Unit]("debug") hidden () action { (x, c) =>
+        c.copy(debug = true)
+      } text ("debug mode")
+    }
+
+    parser.parse(args, MAPAtKConfig()) map { config =>
+      val logger = Logger(MAPAtK.getClass)
+      val u2iDb = if (config.splitset == "validation") commonsConfig.getAppdataValidationU2IActions else commonsConfig.getAppdataTestU2IActions
+      val resultsDb = commonsConfig.getSettingsOfflineEvalResults
+      val engine = engines.get(config.engineid).get
+
+      // Collect relevant items for all users and items
+      logger.info("Collecting relevance data...")
+      val u2i = u2iDb.getAllByAppid(config.evalid).toSeq filter { u2iAction =>
+        config.goal match {
+          case "view" | "conversion" | "like" => u2iAction.action == config.goal
+          case "rate3" => try { u2iAction.action == "rate" && u2iAction.v.get >= 3 } catch {
+            case e: Exception =>
+              logger.error(s"${u2iAction.uid}-${u2iAction.iid}: ${u2iAction.v} (${e.getMessage()})")
+              false
+          }
+          case "rate4" => try { u2iAction.action == "rate" && u2iAction.v.get >= 4 } catch {
+            case e: Exception =>
+              logger.error(s"${u2iAction.uid}-${u2iAction.iid}: ${u2iAction.v} (${e.getMessage()})")
+              false
+          }
+          case "rate5" => try { u2iAction.action == "rate" && u2iAction.v.get == 5 } catch {
+            case e: Exception =>
+              logger.error(s"${u2iAction.uid}-${u2iAction.iid}: ${u2iAction.v} (${e.getMessage()})")
+              false
+          }
+        }
+      }
+
+      val relevantItems = u2i.groupBy(_.uid).mapValues(_.map(_.iid).toSet)
+      val relevantUsers = if (engine.infoid == "itemsim") u2i.groupBy(_.iid).mapValues(_.map(_.uid).toSet) else Map[String, Set[String]]()
+
+      val relevantItemsPath = OfflineMetricFile(
+        commonsConfig.settingsLocalTempRoot,
+        config.appid,
+        config.engineid,
+        config.evalid,
+        config.metricid,
+        config.algoid,
+        "relevantItems.tsv")
+      val relevantItemsWriter = new BufferedWriter(new FileWriter(new File(relevantItemsPath)))
+      relevantItems.foreach {
+        case (uid, iids) =>
+          val iidsString = iids.mkString(",")
+          relevantItemsWriter.write(s"${uid}\t${iidsString}\n")
+      }
+      relevantItemsWriter.close()
+
+      logger.info(s"# users: ${relevantItems.size}")
+      if (engine.infoid == "itemsim") logger.info(s"# items: ${relevantUsers.size}")
+
+      // Read top-k list for every user
+      val topKFilePath = OfflineMetricFile(
+        commonsConfig.settingsLocalTempRoot,
+        config.appid,
+        config.engineid,
+        config.evalid,
+        config.metricid,
+        config.algoid,
+        "topKItems.tsv")
+      logger.info(s"Reading top-K list from: $topKFilePath")
+      val prefixSize = config.evalid.toString.length + 1
+
+      val topKItems: Map[String, Seq[String]] = engine.infoid match {
+        case "itemrec" => {
+          Source.fromFile(topKFilePath).getLines().toSeq.map(
+            _.split("\t")).groupBy(
+              _.apply(0)) map { t =>
+                (t._1.drop(prefixSize) -> t._2.apply(0).apply(1).split(",").toSeq.map(_.drop(prefixSize)))
+              }
+        }
+        case "itemsim" => {
+          /**
+           * topKItems.tsv for ItemSim
+           *     iid     simiid  score
+           *     i0      i1      3.2
+           *     i0      i4      2.5
+           *     i0      i5      1.4
+           *
+           * 1. Read all lines into a Seq[String].
+           * 2. Split by \t into Seq[Array[String]].
+           * 3. Group by first element (iid) into Map[String, Seq[Array[String]]].
+           * 4. Sort and filter Seq[Array[String]] to become Map[String, Seq[String]].
+           */
+          Source.fromFile(topKFilePath).getLines().toSeq.map(
+            _.split("\t")).groupBy(
+              _.apply(0)) map { t =>
+                (t._1.drop(prefixSize) -> t._2.sortBy(_.apply(2)).reverse.map(_.apply(1).drop(prefixSize)))
+              }
+        }
+      }
+
+      logger.info(s"Running MAP@${config.k} for ${engine.infoid} engine")
+      val mapAtK: Double = engine.infoid match {
+        case "itemrec" => {
+          val apAtK = topKItems map { t =>
+            val score = relevantItems.get(t._1) map { ri =>
+              averagePrecisionAtK(config.k, t._2, ri)
+            } getOrElse 0.0
+            (t._1, score)
+          }
+          val apAtKPath = OfflineMetricFile(
+            commonsConfig.settingsLocalTempRoot,
+            config.appid,
+            config.engineid,
+            config.evalid,
+            config.metricid,
+            config.algoid,
+            "apAtK.tsv")
+          val apAtKWriter = new BufferedWriter(new FileWriter(new File(apAtKPath)))
+          apAtK.foreach {
+            case (uid, score) =>
+              apAtKWriter.write(s"${uid}\t${score}\n")
+          }
+          apAtKWriter.close()
+
+          apAtK.map(_._2).sum / scala.math.min(topKItems.size, relevantItems.size)
+        }
+        case "itemsim" => {
+          val iapAtK = topKItems map { t =>
+            relevantUsers.get(t._1) map { ru =>
+              val apAtK = ru map { uid =>
+                relevantItems.get(uid) map { ri =>
+                  averagePrecisionAtK(config.k, t._2, ri)
+                } getOrElse 0.0
+              }
+              apAtK.sum / scala.math.min(ru.size, relevantItems.size)
+            } getOrElse 0.0
+          }
+          iapAtK.sum / scala.math.min(topKItems.size, relevantUsers.size)
+        }
+        case _ => 0.0
+      }
+
+      logger.info(s"MAP@${config.k} = $mapAtK. Saving results...")
+
+      resultsDb.save(OfflineEvalResult(
+        evalid = config.evalid,
+        metricid = config.metricid,
+        algoid = config.algoid,
+        score = mapAtK,
+        iteration = config.iteration,
+        splitset = config.splitset))
+
+      logger.info(s"MAP@${config.k} has run to completion")
+    }
+  }
+
+  /**
+   * Calculate the mean average precision @ k
+   *
+   * ap@k = sum(P(i)/min(m, k)) wher i=1 to k
+   * k is number of prediction to be retrieved.
+   * P(i) is the precision at position i of the top-K list
+   *    if the item at position i is relevant, then P(i) = (the number of releavent items up to that position in the top-k list / position)
+   *    if the item at position i is not relevant, then P(i)=0
+   * m is the number of relevant items for this user.
+   *
+   * @return the average precision at k
+   */
+  def averagePrecisionAtK(k: Int, predictedItems: Seq[String], relevantItems: Set[String]): Double = {
+    // supposedly the predictedItems.size should match k
+    // NOTE: what if predictedItems is less than k? use the avaiable items as k.
+    val n = scala.math.min(predictedItems.size, k)
+
+    // find if each element in the predictedItems is one of the relevant items
+    // if so, map to 1. else map to 0
+    // (0, 1, 0, 1, 1, 0, 0)
+    val relevantBinary: Seq[Int] = predictedItems.take(n).map { x => if (relevantItems(x)) 1 else 0 }
+    val pAtKNom = relevantBinary.scanLeft(0)(_ + _).drop(1).zip(relevantBinary).map(t => if (t._2 != 0) t._1 else 0).map(_.toDouble)
+    val pAtKDenom = 1 to relevantBinary.size
+    val pAtK = pAtKNom zip pAtKDenom map { t => t._1 / t._2 }
+    val apAtKDenom = scala.math.min(n, relevantItems.size)
+    if (apAtKDenom == 0) 0 else pAtK.sum / apAtKDenom
+  }
+}
diff --git a/process/engines/commons/evaluations/scala/map/src/test/resources/application.conf b/process/engines/commons/evaluations/scala/map/src/test/resources/application.conf
new file mode 100644
index 0000000..5744f1d
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/map/src/test/resources/application.conf
@@ -0,0 +1,22 @@
+io.prediction.base=..
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.name=predictionio_mapatk_test
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.name=predictionio_appdata_mapatk_test
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.name=predictionio_training_appdata_mapatk_test
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.name=predictionio_validation_appdata_mapatk_test
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.name=predictionio_test_appdata_mapatk_test
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.name=predictionio_modeldata_mapatk_test
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.name=predictionio_training_modeldata_mapatk_test
diff --git a/process/engines/commons/evaluations/scala/map/src/test/scala/MAPAtKSpec.scala b/process/engines/commons/evaluations/scala/map/src/test/scala/MAPAtKSpec.scala
new file mode 100644
index 0000000..e6c2994
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/map/src/test/scala/MAPAtKSpec.scala
@@ -0,0 +1,55 @@
+package io.prediction.metrics.commons.map
+
+import com.mongodb.casbah.Imports._
+import org.specs2._
+import org.specs2.specification.Step
+
+class MAPAtKSpec extends Specification {
+  def is = s2"""
+
+  MAP@k Specification
+
+  Computing AP@k should be correct
+    Test 1 $apatk1
+    Test 2 $apatk2
+    Test 3 $apatk3
+    Test 4 $apatk4
+
+  At the end of test it should
+    clean up test database cleanup
+  """
+
+  def apatk1 =
+    MAPAtK.averagePrecisionAtK(
+      5,
+      Seq("foo", "bar", "abc", "def", "ghi"),
+      Set("bar", "def")) must_== 0.5
+
+  def apatk2 =
+    MAPAtK.averagePrecisionAtK(
+      10,
+      Seq("foo", "bar", "abc", "def", "ghi"),
+      Set("bar", "def")) must_== 0.5
+
+  def apatk3 =
+    MAPAtK.averagePrecisionAtK(
+      10,
+      Seq("a", "b", "c", "d", "e", "f", "g", "h"),
+      Set("a", "e", "c")) must_== 0.7555555555555555
+
+  def apatk4 =
+    MAPAtK.averagePrecisionAtK(
+      4,
+      Seq("a", "b", "c", "d", "e", "f", "g", "h"),
+      Set("a", "e", "c")) must_== 0.5555555555555555
+
+  def cleanup = {
+    Step(MongoConnection()("predictionio_mapatk_test").dropDatabase())
+    Step(MongoConnection()("predictionio_appdata_mapatk_test").dropDatabase())
+    Step(MongoConnection()("predictionio_modeldata_mapatk_test").dropDatabase())
+    Step(MongoConnection()("predictionio_training_appdata_mapatk_test").dropDatabase())
+    Step(MongoConnection()("predictionio_validation_appdata_mapatk_test").dropDatabase())
+    Step(MongoConnection()("predictionio_test_appdata_mapatk_test").dropDatabase())
+    Step(MongoConnection()("predictionio_training_modeldata_mapatk_test").dropDatabase())
+  }
+}
diff --git a/process/engines/commons/evaluations/scala/paramgen/build.sbt b/process/engines/commons/evaluations/scala/paramgen/build.sbt
index de739e2..bc5f8a1 100644
--- a/process/engines/commons/evaluations/scala/paramgen/build.sbt
+++ b/process/engines/commons/evaluations/scala/paramgen/build.sbt
@@ -1,16 +1,21 @@
-import AssemblyKeys._
-
-assemblySettings
+import xerial.sbt.Pack._
 
 name := "predictionio-process-commons-evaluations-paramgen"
 
 libraryDependencies ++= Seq(
-  "ch.qos.logback" % "logback-classic" % "1.0.9",
-  "ch.qos.logback" % "logback-core" % "1.0.9",
-  "com.typesafe" % "config" % "1.0.0",
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "ch.qos.logback" % "logback-core" % "1.1.1",
+  "com.github.scopt" %% "scopt" % "3.2.0",
   "org.clapper" %% "grizzled-slf4j" % "1.0.1")
 
-excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
-  val excludes = Set("minlog-1.2.jar")
-  cp filter { jar => excludes(jar.data.getName)}
-}
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("paramgen" -> "io.prediction.evaluations.commons.paramgen.ParamGen")
+
+packJvmOpts := Map("paramgen" -> Common.packCommonJvmOpts)
diff --git a/process/engines/commons/evaluations/scala/paramgen/src/main/scala/ParamGen.scala b/process/engines/commons/evaluations/scala/paramgen/src/main/scala/ParamGen.scala
new file mode 100644
index 0000000..8f46bcf
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/paramgen/src/main/scala/ParamGen.scala
@@ -0,0 +1,77 @@
+package io.prediction.evaluations.commons.paramgen
+
+import io.prediction.commons._
+
+import grizzled.slf4j.Logger
+
+case class ParamGenConfig(
+  evalids: Seq[Int] = Seq(),
+  algoid: Int = 0,
+  loop: Int = 0,
+  paramsets: Int = 0)
+
+object ParamGen {
+  def main(args: Array[String]) {
+    val commonsConfig = new Config()
+    val algos = commonsConfig.getSettingsAlgos
+    val parser = new scopt.OptionParser[ParamGenConfig]("paramgen") {
+      head("paramgen")
+      opt[String]("evalids") required () action { (x, c) =>
+        c.copy(evalids = x.split(',').map(_.toInt))
+      } text ("comma-separated list of OfflineEval IDs that the parameter generator should consider")
+      opt[Int]("algoid") required () action { (x, c) =>
+        c.copy(algoid = x)
+      } validate { x =>
+        algos.get(x) map { _ => success } getOrElse failure(s"the Algo ID does not correspond to a valid Algo")
+      } text ("the Algo ID of the tune subject")
+      opt[Int]("loop") required () action { (x, c) =>
+        c.copy(loop = x)
+      } text ("the current loop number of this set of parameter")
+      opt[Int]("paramsets") required () action { (x, c) =>
+        c.copy(paramsets = x)
+      } text ("the number of parameter sets of this tuning")
+    }
+
+    parser.parse(args, ParamGenConfig()) map { config =>
+      val logger = Logger(ParamGen.getClass)
+
+      val evalids = config.evalids
+      val algoid = config.algoid
+      val loop = config.loop
+      val paramsets = config.paramsets
+      val algo = algos.get(algoid).get
+
+      /** Figure out what parameters can be tuned */
+      val paramsToTune = algo.params.keySet filter { k =>
+        algo.params.keySet.exists(e => s"${k}Min" == e) &&
+          algo.params.keySet.exists(e => s"${k}Max" == e)
+      }
+
+      for (i <- 1 to paramsets) {
+        /** Pick a random value between intervals */
+        val paramsValues = paramsToTune map { p =>
+          val minValue = algo.params(s"${p}Min")
+          val maxValue = algo.params(s"${p}Max")
+          algo.params(s"${p}Min") match {
+            case n: Int =>
+              val diff = maxValue.asInstanceOf[Int] - minValue.asInstanceOf[Int]
+              if (diff == 0)
+                p -> minValue.asInstanceOf[Int]
+              else
+                p -> (minValue.asInstanceOf[Int] + scala.util.Random.nextInt(diff))
+            case n: Double => p -> (minValue.asInstanceOf[Double] + scala.util.Random.nextDouble() * (maxValue.asInstanceOf[Double] - minValue.asInstanceOf[Double]))
+          }
+        }
+        evalids foreach { evalid =>
+          val algoToTune = algo.copy(
+            offlineevalid = Some(evalid.toInt),
+            loop = Some(loop),
+            params = algo.params ++ paramsValues.toMap,
+            paramset = Some(i),
+            status = "simeval")
+          algos.insert(algoToTune)
+        }
+      }
+    }
+  }
+}
diff --git a/process/engines/commons/evaluations/scala/paramgen/src/main/scala/io/prediction/evaluations/itemrec/paramgen/ParamGen.scala b/process/engines/commons/evaluations/scala/paramgen/src/main/scala/io/prediction/evaluations/itemrec/paramgen/ParamGen.scala
deleted file mode 100644
index b0d48fd..0000000
--- a/process/engines/commons/evaluations/scala/paramgen/src/main/scala/io/prediction/evaluations/itemrec/paramgen/ParamGen.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-package io.prediction.evaluations.itemrec.paramgen
-
-import io.prediction.commons._
-
-import com.typesafe.config.ConfigFactory
-import grizzled.slf4j.Logger
-
-import scala.collection.immutable.Map
-
-object ParamGen {
-  def main(args: Array[String]) {
-    val logger = Logger(ParamGen.getClass)
-
-    val config = ConfigFactory.load
-
-    val evalids = config.getString("evalids")
-    val algoid = config.getInt("algoid")
-    val loop = config.getInt("loop")
-    val paramsets = config.getInt("paramsets")
-
-    val commonsConfig = new Config
-
-    val algos = commonsConfig.getSettingsAlgos
-
-    val algo = algos.get(algoid).get
-
-    /** Figure out what parameters can be tuned */
-    val paramsToTune = algo.params.keySet filter { k =>
-      algo.params.keySet.exists(e => s"${k}Min" == e) &&
-        algo.params.keySet.exists(e => s"${k}Max" == e)
-    }
-
-    for (i <- 1 to paramsets) {
-      /** Pick a random value between intervals */
-      val paramsValues = paramsToTune map { p =>
-        val minValue = algo.params(s"${p}Min")
-        val maxValue = algo.params(s"${p}Max")
-        algo.params(s"${p}Min") match {
-          case n: Int =>
-            val diff = maxValue.asInstanceOf[Int] - minValue.asInstanceOf[Int]
-            if (diff == 0)
-              p -> minValue.asInstanceOf[Int]
-            else
-              p -> (minValue.asInstanceOf[Int] + scala.util.Random.nextInt(diff))
-          case n: Double => p -> (minValue.asInstanceOf[Double] + scala.util.Random.nextDouble() * (maxValue.asInstanceOf[Double] - minValue.asInstanceOf[Double]))
-        }
-      }
-      evalids.split(",") foreach { evalid =>
-        val algoToTune = algo.copy(
-          offlineevalid = Some(evalid.toInt),
-          loop = Some(loop),
-          params = algo.params ++ paramsValues.toMap,
-          paramset = Some(i),
-          status = "simeval")
-        algos.insert(algoToTune)
-      }
-    }
-  }
-}
diff --git a/process/engines/commons/evaluations/scala/topkitems/build.sbt b/process/engines/commons/evaluations/scala/topkitems/build.sbt
new file mode 100644
index 0000000..b4690ae
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/topkitems/build.sbt
@@ -0,0 +1,21 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-commons-evaluations-topkitems"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "ch.qos.logback" % "logback-core" % "1.1.1",
+  "com.github.scopt" %% "scopt" % "3.2.0",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("topk" -> "io.prediction.evaluations.commons.topkitems.TopKItems")
+
+packJvmOpts := Map("topk" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/commons/evaluations/scala/topkitems/src/main/resources/application.conf
similarity index 100%
rename from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf
rename to process/engines/commons/evaluations/scala/topkitems/src/main/resources/application.conf
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/commons/evaluations/scala/topkitems/src/main/resources/logback.xml
similarity index 100%
rename from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml
rename to process/engines/commons/evaluations/scala/topkitems/src/main/resources/logback.xml
diff --git a/process/engines/commons/evaluations/scala/topkitems/src/main/scala/TopKItems.scala b/process/engines/commons/evaluations/scala/topkitems/src/main/scala/TopKItems.scala
new file mode 100644
index 0000000..d93a28f
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/topkitems/src/main/scala/TopKItems.scala
@@ -0,0 +1,135 @@
+package io.prediction.evaluations.commons.topkitems
+
+import io.prediction.commons._
+import io.prediction.commons.filepath.OfflineMetricFile
+import io.prediction.output.itemrec.ItemRecAlgoOutput
+import io.prediction.output.itemsim.ItemSimAlgoOutput
+
+import grizzled.slf4j.Logger
+import java.io.{ File, PrintWriter }
+import scala.sys.process._
+
+case class TopKItemsConfig(
+  enginetype: String = "",
+  evalid: Int = 0,
+  metricid: Int = 0,
+  algoid: Int = 0,
+  hdfsroot: String = "",
+  k: Int = 0,
+  local: Boolean = false)
+
+object TopKItems {
+  def main(args: Array[String]) {
+    val parser = new scopt.OptionParser[TopKItemsConfig]("topk") {
+      head("topk")
+      opt[String]("enginetype") required () action { (x, c) =>
+        c.copy(enginetype = x)
+      } validate { x =>
+        x match {
+          case "itemrec" | "itemsim" => success
+          case _ => failure("--enginetype must be either itemrec or itemsim")
+        }
+      } text ("engine type (supported: itemrec, itemsim)")
+      opt[Int]("evalid") required () action { (x, c) =>
+        c.copy(evalid = x)
+      } text ("the OfflineEval ID that this metric will be applied to")
+      opt[Int]("metricid") required () action { (x, c) =>
+        c.copy(metricid = x)
+      } text ("the OfflineEvalMetric ID that this metric will be applied to")
+      opt[Int]("algoid") required () action { (x, c) =>
+        c.copy(algoid = x)
+      } text ("the Algo ID that this metric will be applied to")
+      opt[String]("hdfsroot") required () action { (x, c) =>
+        c.copy(hdfsroot = x)
+      } text ("the HDFS root directory location where temporary files will be stored")
+      opt[Int]("k") required () action { (x, c) =>
+        c.copy(k = x)
+      } text ("the k parameter for MAP@k")
+      opt[Unit]("local") action { (_, c) =>
+        c.copy(local = true)
+      } text ("run in local mode, i.e. do not copy the end result to HDFS")
+    }
+
+    parser.parse(args, TopKItemsConfig()) map { config =>
+      val logger = Logger(TopKItems.getClass)
+      val evalid = config.evalid
+      val algoid = config.algoid
+      val metricid = config.metricid
+      val hdfsRoot = config.hdfsroot
+      val k = config.k
+      val commonsConfig = new Config
+
+      /** Try search path if hadoop home is not set. */
+      val hadoopCommand = commonsConfig.settingsHadoopHome map { h => h + "/bin/hadoop" } getOrElse { "hadoop" }
+
+      val apps = commonsConfig.getSettingsApps
+      val engines = commonsConfig.getSettingsEngines
+      val algos = commonsConfig.getSettingsAlgos
+      val offlineEvals = commonsConfig.getSettingsOfflineEvals
+
+      val algo = algos.get(algoid).get
+      val offlineEval = offlineEvals.get(evalid).get
+      val engine = engines.get(offlineEval.engineid).get
+      val app = apps.get(engine.appid).get.copy(id = evalid)
+
+      val tmpFilePath = OfflineMetricFile(commonsConfig.settingsLocalTempRoot, engine.appid, engine.id, evalid, metricid, algoid, "topKItems.tsv")
+      val tmpFile = new File(tmpFilePath)
+      tmpFile.getParentFile().mkdirs()
+      logger.info(s"Dumping data to temporary file $tmpFilePath...")
+
+      config.enginetype match {
+        case "itemrec" => {
+          val users = commonsConfig.getAppdataTrainingUsers
+          var userCount = 0
+          printToFile(tmpFile) { p =>
+            users.getByAppid(evalid) foreach { u =>
+              val topKItems = ItemRecAlgoOutput.output(u.id, k, None, None, None, None)(app, engine, algo, Some(offlineEval))
+              if (topKItems.length > 0) {
+                userCount += 1
+                val topKString = topKItems.map(iid => s"${evalid}_${iid}").mkString(",")
+                p.println(s"${evalid}_${u.id}\t${topKString}")
+              }
+            }
+            logger.info(s"Found $userCount user(s) with non-zero top-K items")
+          }
+        }
+        case "itemsim" => {
+          val items = commonsConfig.getAppdataTrainingItems
+          val scores = Seq.range(1, k + 1).reverse
+          var itemCount = 0
+          printToFile(tmpFile) { p =>
+            items.getByAppid(evalid) foreach { i =>
+              val topKItems = ItemSimAlgoOutput.output(i.id, k, None, None, None, None)(app, engine, algo, Some(offlineEval))
+              if (topKItems.length > 0) {
+                itemCount += 1
+                topKItems.zip(scores) foreach { tuple =>
+                  val (iid, score) = tuple
+                  p.println(s"${evalid}_${i.id}\t${evalid}_${iid}\t${score}")
+                }
+              }
+            }
+            logger.info(s"Found ${itemCount} item(s) with non-zero top-K items")
+          }
+        }
+      }
+
+      if (!config.local) {
+        tmpFile.deleteOnExit
+        val hdfsFilePath = OfflineMetricFile(hdfsRoot, engine.appid, engine.id, evalid, metricid, algoid, "topKItems.tsv")
+        val rmCommand = s"$hadoopCommand fs -rm $hdfsFilePath"
+        logger.info(s"Executing '${rmCommand}'...")
+        rmCommand.!
+        val copyCommand = s"$hadoopCommand fs -copyFromLocal $tmpFilePath $hdfsFilePath"
+        logger.info(s"Executing '${copyCommand}'...")
+        copyCommand.!
+      }
+
+      logger.info("Finished")
+    }
+  }
+
+  def printToFile(f: java.io.File)(op: java.io.PrintWriter => Unit) {
+    val p = new java.io.PrintWriter(f)
+    try { op(p) } finally { p.close() }
+  }
+}
diff --git a/process/engines/commons/evaluations/scala/u2isplit/build.sbt b/process/engines/commons/evaluations/scala/u2isplit/build.sbt
new file mode 100644
index 0000000..247d641
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/u2isplit/build.sbt
@@ -0,0 +1,23 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-commons-evaluations-scala-u2isplit"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "ch.qos.logback" % "logback-core" % "1.1.1",
+  "com.github.scopt" %% "scopt" % "3.2.0",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1",
+  "org.json4s" %% "json4s-native" % "3.2.7",
+  "org.json4s" %% "json4s-ext" % "3.2.7")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("u2isplit" -> "io.prediction.evaluations.commons.u2isplit.U2ISplit")
+
+packJvmOpts := Map("u2isplit" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/commons/evaluations/scala/u2isplit/src/main/resources/application.conf
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf
copy to process/engines/commons/evaluations/scala/u2isplit/src/main/resources/application.conf
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/commons/evaluations/scala/u2isplit/src/main/resources/logback.xml
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/commons/evaluations/scala/u2isplit/src/main/resources/logback.xml
diff --git a/process/engines/commons/evaluations/scala/u2isplit/src/main/scala/U2ISplit.scala b/process/engines/commons/evaluations/scala/u2isplit/src/main/scala/U2ISplit.scala
new file mode 100644
index 0000000..5be8004
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/u2isplit/src/main/scala/U2ISplit.scala
@@ -0,0 +1,220 @@
+package io.prediction.evaluations.commons.u2isplit
+
+import io.prediction.commons.Config
+import io.prediction.commons.appdata.{ Item, Items, U2IAction, U2IActions, User, Users }
+import io.prediction.commons.filepath.{ U2ITrainingTestSplitFile }
+
+import java.io.{ BufferedWriter, File, FileWriter }
+import scala.io.Source
+
+import com.github.nscala_time.time.Imports._
+import grizzled.slf4j.Logger
+import org.json4s.native.Serialization
+
+case class U2ISplitConfig(
+  sequenceNum: Int = 0,
+  appid: Int = 0,
+  engineid: Int = 0,
+  evalid: Int = 0,
+  itypes: Option[Seq[String]] = None,
+  trainingpercent: Double = 0,
+  validationpercent: Double = 0,
+  testpercent: Double = 0,
+  timeorder: Boolean = false)
+
+/**
+ * User-to-Item Action Splitter for Single Machine
+ *
+ * TODO: Eliminate use of Config object. Let scheduler handles it all.
+ */
+object U2ISplit {
+  def main(args: Array[String]) {
+    val parser = new scopt.OptionParser[U2ISplitConfig]("u2isplit") {
+      head("u2isplit")
+      opt[Int]("sequenceNum") required () action { (x, c) =>
+        c.copy(sequenceNum = x)
+      } text ("the sequence number (starts from 1 for the 1st iteration and then increment for later iterations)")
+      opt[Int]("appid") required () action { (x, c) =>
+        c.copy(appid = x)
+      } text ("the App ID to split data from")
+      opt[Int]("engineid") required () action { (x, c) =>
+        c.copy(engineid = x)
+      } text ("the Engine ID to split data to")
+      opt[Int]("evalid") required () action { (x, c) =>
+        c.copy(evalid = x)
+      } text ("the OfflineEval ID to split data to")
+      opt[String]("itypes") action { (x, c) =>
+        c.copy(itypes = Some(x.split(',')))
+      } text ("restrict use of certain itypes (comma-separated, e.g. --itypes type1,type2)")
+      opt[Double]("trainingpercent") required () action { (x, c) =>
+        c.copy(trainingpercent = x)
+      } validate { x =>
+        if (x >= 0.01 && x <= 1) success else failure("--trainingpercent must be between 0.01 and 1")
+      } text ("size of training set (0.01 to 1)")
+      opt[Double]("validationpercent") required () action { (x, c) =>
+        c.copy(validationpercent = x)
+      } validate { x =>
+        if (x >= 0 && x <= 1) success else failure("--validationpercent must be between 0 and 1")
+      } text ("size of validation set (0 to 1)")
+      opt[Double]("testpercent") required () action { (x, c) =>
+        c.copy(testpercent = x)
+      } validate { x =>
+        if (x >= 0.01 && x <= 1) success else failure("--testpercent must be between 0.01 and 1")
+      } text ("size of test set (0.01 to 1)")
+      opt[Boolean]("timeorder") action { (x, c) =>
+        c.copy(timeorder = x)
+      } text ("set to true to sort the sampled results in time order before splitting (default to false)")
+      checkConfig { c =>
+        if (c.trainingpercent + c.validationpercent + c.testpercent > 1) failure("sum of training, validation, and test sizes must not exceed 1") else success
+      }
+    }
+
+    parser.parse(args, U2ISplitConfig()) map { config =>
+      val logger = Logger(U2ISplit.getClass)
+      val commonsConfig = new Config()
+      val usersFilePath = U2ITrainingTestSplitFile(
+        rootDir = commonsConfig.settingsLocalTempRoot,
+        appId = config.appid,
+        engineId = config.engineid,
+        evalId = config.evalid,
+        name = "users")
+      val usersFile = new File(usersFilePath)
+      val itemsFilePath = U2ITrainingTestSplitFile(
+        rootDir = commonsConfig.settingsLocalTempRoot,
+        appId = config.appid,
+        engineId = config.engineid,
+        evalId = config.evalid,
+        name = "items")
+      val itemsFile = new File(itemsFilePath)
+      val u2iActionsFilePath = U2ITrainingTestSplitFile(
+        rootDir = commonsConfig.settingsLocalTempRoot,
+        appId = config.appid,
+        engineId = config.engineid,
+        evalId = config.evalid,
+        name = "u2iActions")
+      val u2iActionsFile = new File(u2iActionsFilePath)
+      implicit val formats = org.json4s.DefaultFormats ++ org.json4s.ext.JodaTimeSerializers.all
+
+      // If this is the first iteration (sequence), take a snapshot of appdata
+      if (config.sequenceNum == 1) {
+        logger.info("This is the first iteration. Taking snapshot of app's data...")
+
+        val usersDb = commonsConfig.getAppdataUsers
+        val itemsDb = commonsConfig.getAppdataItems
+        val u2iDb = commonsConfig.getAppdataU2IActions
+
+        // Create the output directory if does not yet exist
+        val outputDir = new File(U2ITrainingTestSplitFile(
+          rootDir = commonsConfig.settingsLocalTempRoot,
+          appId = config.appid,
+          engineId = config.engineid,
+          evalId = config.evalid,
+          name = ""))
+        outputDir.mkdirs()
+
+        // Dump all users and fix ID prefices
+        logger.info(s"Writing to: $usersFilePath")
+        val usersWriter = new BufferedWriter(new FileWriter(usersFile))
+        usersDb.getByAppid(config.appid) foreach { user =>
+          usersWriter.write(Serialization.write(user.copy(appid = config.evalid)))
+          usersWriter.newLine()
+        }
+        usersWriter.close()
+
+        // Dump all items and fix ID prefices
+        // Filtered by itypes
+        logger.info(s"Writing to: $itemsFilePath")
+        val itemsWriter = new BufferedWriter(new FileWriter(itemsFile))
+        val validIids = collection.mutable.Set[String]()
+        config.itypes map { t =>
+          val engineItypes = t.toSet
+          itemsDb.getByAppid(config.appid) foreach { item =>
+            if (item.itypes.toSet.intersect(engineItypes).size > 0) {
+              itemsWriter.write(Serialization.write(item.copy(appid = config.evalid)))
+              itemsWriter.newLine()
+              validIids += item.id
+            }
+          }
+        } getOrElse {
+          itemsDb.getByAppid(config.appid) foreach { item =>
+            itemsWriter.write(Serialization.write(item.copy(appid = config.evalid)))
+            itemsWriter.newLine()
+            validIids += item.id
+          }
+        }
+        itemsWriter.close()
+
+        // Dump all actions and fix ID prefices
+        // Filtered by itypes
+        logger.info(s"Writing to: $u2iActionsFilePath")
+        var u2iCount = 0
+        val u2iActionsWriter = new BufferedWriter(new FileWriter(u2iActionsFile))
+        u2iDb.getAllByAppid(config.appid) foreach { u2iAction =>
+          if (validIids(u2iAction.iid)) {
+            u2iActionsWriter.write(Serialization.write(u2iAction.copy(appid = config.evalid)))
+            u2iActionsWriter.newLine()
+            u2iCount += 1
+          }
+        }
+        u2iActionsWriter.close()
+
+        // Save the count of U2I actions
+        val u2iActionsCountWriter = new BufferedWriter(new FileWriter(new File(u2iActionsFilePath + "Count")))
+        u2iActionsCountWriter.write(u2iCount.toString)
+        u2iActionsCountWriter.close()
+      }
+
+      // Read snapshots
+      logger.info("Reading snapshots...")
+
+      val trainingUsersDb = commonsConfig.getAppdataTrainingUsers
+      val trainingItemsDb = commonsConfig.getAppdataTrainingItems
+      val trainingU2iDb = commonsConfig.getAppdataTrainingU2IActions
+      val validationU2iDb = commonsConfig.getAppdataValidationU2IActions
+      val testU2iDb = commonsConfig.getAppdataTestU2IActions
+
+      val totalCount = Source.fromFile(new File(u2iActionsFilePath + "Count")).mkString.toInt
+      val evaluationCount = (math.floor((config.trainingpercent + config.validationpercent + config.testpercent) * totalCount)).toInt
+      val trainingCount = (math.floor(config.trainingpercent * totalCount)).toInt
+      val validationCount = (math.floor(config.validationpercent * totalCount)).toInt
+      val trainingValidationCount = trainingCount + validationCount
+      val testCount = evaluationCount - trainingValidationCount
+
+      logger.info(s"Reading from: $usersFilePath")
+      trainingUsersDb.deleteByAppid(config.evalid)
+      Source.fromFile(usersFile).getLines() foreach { userJson =>
+        trainingUsersDb.insert(Serialization.read[User](userJson))
+      }
+
+      logger.info(s"Reading from: $itemsFilePath")
+      trainingItemsDb.deleteByAppid(config.evalid)
+      Source.fromFile(itemsFile).getLines() foreach { itemJson =>
+        trainingItemsDb.insert(Serialization.read[Item](itemJson))
+      }
+
+      /**
+       * Perform itypes filtering at this point because itypes is an
+       * engine-specific parameter, and we want the split percentage to
+       * be relative to the total number of items that is valid for this
+       * particular engine.
+       */
+      logger.info(s"Reading from: $u2iActionsFilePath")
+      trainingU2iDb.deleteByAppid(config.evalid)
+      validationU2iDb.deleteByAppid(config.evalid)
+      testU2iDb.deleteByAppid(config.evalid)
+      val allU2iActions = Source.fromFile(u2iActionsFile).getLines().map(Serialization.read[U2IAction](_))
+      val unsortedEvalU2iActions = util.Random.shuffle(allU2iActions).take(evaluationCount)
+      val evalU2iActions = if (config.timeorder) unsortedEvalU2iActions.toSeq.sortWith(_.t + 0.seconds < _.t + 0.seconds) else unsortedEvalU2iActions.toSeq
+      var count = 0
+      evalU2iActions foreach { u2iAction =>
+        if (count < trainingCount)
+          trainingU2iDb.insert(u2iAction)
+        else if (count >= trainingCount && count < trainingValidationCount)
+          validationU2iDb.insert(u2iAction)
+        else
+          testU2iDb.insert(u2iAction)
+        count += 1
+      }
+    }
+  }
+}
diff --git a/process/engines/commons/evaluations/scala/u2itrainingtestsplit/build.sbt b/process/engines/commons/evaluations/scala/u2itrainingtestsplit/build.sbt
index 1bcbcbf..39dbe29 100644
--- a/process/engines/commons/evaluations/scala/u2itrainingtestsplit/build.sbt
+++ b/process/engines/commons/evaluations/scala/u2itrainingtestsplit/build.sbt
@@ -1,12 +1,21 @@
-import AssemblyKeys._
-
-assemblySettings
+import xerial.sbt.Pack._
 
 name := "predictionio-process-commons-evaluations-scala-u2itrainingtestsplittime"
 
-libraryDependencies += "com.twitter" %% "scalding-args" % "0.8.6"
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "ch.qos.logback" % "logback-core" % "1.1.1",
+  "com.github.scopt" %% "scopt" % "3.2.0",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
 
-excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
-  val excludes = Set("minlog-1.2.jar")
-  cp filter { jar => excludes(jar.data.getName)}
-}
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("u2itrainingtestsplit" -> "io.prediction.evaluations.commons.trainingtestsplit.U2ITrainingTestSplitTime")
+
+packJvmOpts := Map("u2itrainingtestsplit" -> Common.packCommonJvmOpts)
diff --git a/process/engines/commons/evaluations/scala/u2itrainingtestsplit/src/main/scala/U2ITrainingTestSplitTime.scala b/process/engines/commons/evaluations/scala/u2itrainingtestsplit/src/main/scala/U2ITrainingTestSplitTime.scala
new file mode 100644
index 0000000..2d5988b
--- /dev/null
+++ b/process/engines/commons/evaluations/scala/u2itrainingtestsplit/src/main/scala/U2ITrainingTestSplitTime.scala
@@ -0,0 +1,151 @@
+package io.prediction.evaluations.commons.trainingtestsplit
+
+import io.prediction.commons.filepath.U2ITrainingTestSplitFile
+
+import java.io.File
+import scala.io.Source
+import scala.sys.process._
+
+import grizzled.slf4j.Logger
+
+case class U2ITrainingTestSplitTimeConfig(
+  hadoop: String = "",
+  pdioEvalJar: String = "",
+  hdfsRoot: String = "",
+  localTempRoot: String = "",
+  appid: Int = 0,
+  engineid: Int = 0,
+  evalid: Int = 0,
+  sequenceNum: Int = 0)
+
+/**
+ * Wrapper for Scalding U2ITrainingTestSplitTime job
+ *
+ * Args:
+ * --hadoop <string> hadoop command
+ * --pdioEvalJar <string> the name of the Scalding U2ITrainingTestSplit job jar
+ * --sequenceNum. <int>. the sequence number (starts from 1 for the 1st iteration and then increment for later iterations)
+ *
+ * --dbType: <string> appdata DB type
+ * --dbName: <string>
+ * --dbHost: <string>. optional. (eg. "127.0.0.1")
+ * --dbPort: <int>. optional. (eg. 27017)
+ *
+ * --training_dbType: <string> training_appadta DB type
+ * --training_dbName: <string>
+ * --training_dbHost: <string>. optional
+ * --training_dbPort: <int>. optional
+ *
+ * --validation_dbType: <string> validation_appdata DB type
+ * --validation_dbName: <string>
+ * --validation_dbHost: <string>. optional
+ * --validation_dbPort: <int>. optional
+ *
+ * --test_dbType: <string> test_appdata DB type
+ * --test_dbName: <string>
+ * --test_dbHost: <string>. optional
+ * --test_dbPort: <int>. optional
+ *
+ * --hdfsRoot: <string>. Root directory of the HDFS
+ *
+ * --appid: <int>
+ * --engineid: <int>
+ * --evalid: <int>
+ *
+ * --itypes: <string separated by white space>. eg "--itypes type1 type2". If no --itypes specified, then ALL itypes will be used.
+ *
+ * --trainingPercent: <double> (0.01 to 1). training set percentage
+ * --validationPercent: <dboule> (0.01 to 1). validation set percentage
+ * --testPercent: <double> (0.01 to 1). test set percentage
+ * --timeorder: <boolean>. Require total percentage < 1
+ *
+ */
+object U2ITrainingTestSplitTime {
+  def main(args: Array[String]) {
+    val parser = new scopt.OptionParser[U2ITrainingTestSplitTimeConfig]("u2itrainingtestsplit") {
+      head("u2itrainingtestsplit")
+      opt[String]("hadoop") required () action { (x, c) =>
+        c.copy(hadoop = x)
+      } text ("path to the 'hadoop' command")
+      opt[String]("pdioEvalJar") required () action { (x, c) =>
+        c.copy(pdioEvalJar = x)
+      } text ("path to PredictionIO Hadoop job JAR")
+      opt[String]("hdfsRoot") required () action { (x, c) =>
+        c.copy(hdfsRoot = x)
+      } text ("PredictionIO root path in HDFS")
+      opt[String]("localTempRoot") required () action { (x, c) =>
+        c.copy(localTempRoot = x)
+      } text ("local directory for temporary storage")
+      opt[Int]("appid") required () action { (x, c) =>
+        c.copy(appid = x)
+      } text ("the App ID of this offline evaluation")
+      opt[Int]("engineid") required () action { (x, c) =>
+        c.copy(engineid = x)
+      } text ("the Engine ID of this offline evaluation")
+      opt[Int]("evalid") required () action { (x, c) =>
+        c.copy(evalid = x)
+      } text ("the OfflineEval ID of this offline evaluation")
+      opt[Int]("sequenceNum") required () action { (x, c) =>
+        c.copy(sequenceNum = x)
+      } validate { x =>
+        if (x >= 1) success else failure("--sequenceNum must be >= 1")
+      } text ("sequence (iteration) number of the offline evaluation")
+      override def errorOnUnknownArgument = false
+    }
+    val logger = Logger(U2ITrainingTestSplitTime.getClass)
+
+    parser.parse(args, U2ITrainingTestSplitTimeConfig()) map { config =>
+      val hadoop = config.hadoop
+      val pdioEvalJar = config.pdioEvalJar
+      val hdfsRoot = config.hdfsRoot
+      val localTempRoot = config.localTempRoot
+      val appid = config.appid
+      val engineid = config.engineid
+      val evalid = config.evalid
+      val sequenceNum = config.sequenceNum
+      val argsString = args.mkString(" ")
+      val resplit = sequenceNum > 1
+
+      /** command */
+      if (!resplit) {
+        // prep
+        val splitPrepCmd = hadoop + " jar " + pdioEvalJar + " io.prediction.evaluations.scalding.commons.u2itrainingtestsplit.U2ITrainingTestSplitTimePrep " + argsString
+        executeCommandAndCheck(splitPrepCmd)
+      }
+
+      // copy the count to local tmp
+      val hdfsCountPath = U2ITrainingTestSplitFile(hdfsRoot, appid, engineid, evalid, "u2iCount.tsv")
+      val localCountPath = localTempRoot + "eval-" + evalid + "-u2iCount.tsv"
+      val localCountFile = new File(localCountPath)
+
+      // create parent dir
+      localCountFile.getParentFile().mkdirs()
+
+      // delete existing file first
+      if (localCountFile.exists()) localCountFile.delete()
+
+      // get the count from hdfs
+      val getHdfsCountCmd = hadoop + " fs -getmerge " + hdfsCountPath + " " + localCountPath
+      executeCommandAndCheck(getHdfsCountCmd)
+
+      // read the local file and get the count
+      val lines = Source.fromFile(localCountPath).getLines
+      if (lines.isEmpty) throw new RuntimeException(s"Count file $localCountPath is empty")
+
+      val count = lines.next
+
+      // split
+      val splitCmd = hadoop + " jar " + pdioEvalJar + " io.prediction.evaluations.scalding.commons.u2itrainingtestsplit.U2ITrainingTestSplitTime " + argsString + " --totalCount " + count
+      executeCommandAndCheck(splitCmd)
+
+      // delete local tmp file
+      logger.info(s"Deleting temporary file $localCountPath...")
+      localCountFile.delete()
+    }
+
+    def executeCommandAndCheck(cmd: String) = {
+      logger.info(s"Executing $cmd...")
+      if ((cmd.!) != 0) throw new RuntimeException(s"Failed to execute '$cmd'")
+    }
+  }
+}
diff --git a/process/engines/commons/evaluations/scala/u2itrainingtestsplit/src/main/scala/io/predictionio/evaluations/commons/u2itrainingtestsplit/U2ITrainingTestSplitTime.scala b/process/engines/commons/evaluations/scala/u2itrainingtestsplit/src/main/scala/io/predictionio/evaluations/commons/u2itrainingtestsplit/U2ITrainingTestSplitTime.scala
deleted file mode 100644
index 5d845eb..0000000
--- a/process/engines/commons/evaluations/scala/u2itrainingtestsplit/src/main/scala/io/predictionio/evaluations/commons/u2itrainingtestsplit/U2ITrainingTestSplitTime.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-package io.prediction.evaluations.commons.trainingtestsplit
-
-import com.twitter.scalding.Args
-
-import io.prediction.commons.filepath.{U2ITrainingTestSplitFile}
-
-import java.io.File
-import scala.io.Source
-import scala.sys.process._
-
-/**
- * Wrapper for Scalding U2ITrainingTestSplitTime job
- *
- * Args:
- * --hadoop <string> hadoop command
- * --pdioEvalJar <string> the name of the Scalding U2ITrainingTestSplit job jar
- * --sequenceNum. <int>. the sequence number (starts from 1 for the 1st iteration and then increment for later iterations)
- * 
- * --dbType: <string> appdata DB type
- * --dbName: <string>
- * --dbHost: <string>. optional. (eg. "127.0.0.1")
- * --dbPort: <int>. optional. (eg. 27017)
- *
- * --training_dbType: <string> training_appadta DB type
- * --training_dbName: <string>
- * --training_dbHost: <string>. optional
- * --training_dbPort: <int>. optional
- * 
- * --validation_dbType: <string> validation_appdata DB type
- * --validation_dbName: <string>
- * --validation_dbHost: <string>. optional
- * --validation_dbPort: <int>. optional
- *
- * --test_dbType: <string> test_appdata DB type
- * --test_dbName: <string>
- * --test_dbHost: <string>. optional
- * --test_dbPort: <int>. optional
- *
- * --hdfsRoot: <string>. Root directory of the HDFS
- * 
- * --appid: <int>
- * --engineid: <int>
- * --evalid: <int>
- *
- * --itypes: <string separated by white space>. eg "--itypes type1 type2". If no --itypes specified, then ALL itypes will be used.
- *
- * --trainingPercent: <double> (0.01 to 1). training set percentage
- * --validationPercent: <dboule> (0.01 to 1). validation set percentage
- * --testPercent: <double> (0.01 to 1). test set percentage
- * --timeorder: <boolean>. Require total percentage < 1
- *
- */
-object U2ITrainingTestSplitTime {
-
-  def main(mainArgs: Array[String]) {
-
-    /** parse args*/
-    val args = Args(mainArgs)
-
-    val hadoop = args("hadoop")
-    val pdioEvalJar = args("pdioEvalJar")
-    
-    val hdfsRoot = args("hdfsRoot")
-    val localTempRoot = args("localTempRoot")
-
-    val appid = args("appid").toInt
-    val engineid = args("engineid").toInt
-    val evalid = args("evalid").toInt
-
-    val sequenceNum = args("sequenceNum").toInt
-    require((sequenceNum >= 1), "sequenceNum must be >= 1.")
-
-    val argsString = args.toString
-
-    val resplit: Boolean = (sequenceNum > 1)
-
-    /** command */
-    if (!resplit) {
-      // prep
-      val splitPrepCmd = hadoop + " jar " + pdioEvalJar + " io.prediction.evaluations.scalding.commons.u2itrainingtestsplit.U2ITrainingTestSplitTimePrep " + argsString
-      executeCommandAndCheck(splitPrepCmd)
-
-    }
-
-    // copy the count to local tmp
-    val hdfsCountPath = U2ITrainingTestSplitFile(hdfsRoot, appid, engineid, evalid, "u2iCount.tsv")
-    val localCountPath = localTempRoot + "eval-" + evalid + "-u2iCount.tsv"
-    
-    val localCountFile = new File(localCountPath)
-
-    // create parent dir
-    localCountFile.getParentFile().mkdirs()
-
-    // delete existing file first
-    if (localCountFile.exists()) localCountFile.delete() 
-
-    // get the count from hdfs
-    val getHdfsCountCmd = hadoop + " fs -getmerge " + hdfsCountPath + " " + localCountPath
-    executeCommandAndCheck(getHdfsCountCmd)
-
-    // read the local file and get the count
-    val lines = Source.fromFile(localCountPath).getLines
-    if (lines.isEmpty)
-      throw new RuntimeException("Count file %s is empty".format(localCountPath))
-    
-    val count = lines.next
-
-    // split
-    val splitCmd = hadoop +" jar " + pdioEvalJar + " io.prediction.evaluations.scalding.commons.u2itrainingtestsplit.U2ITrainingTestSplitTime " + argsString + " --totalCount " + count
-    executeCommandAndCheck(splitCmd)
-
-    // delete local tmp file
-    println("Deleting temporary file %s...".format(localCountPath))
-    localCountFile.delete()
-
-  }
-
-  def executeCommandAndCheck(cmd: String) = {
-    println("Executing %s...".format(cmd))
-    if ((cmd.!) != 0)
-      throw new RuntimeException("Failed to execute '%s'".format(cmd)) 
-  }
-  
-}
-
-
-
diff --git a/process/engines/itemrec/algorithms/hadoop/cascading/popularrank/build.gradle b/process/engines/itemrec/algorithms/hadoop/cascading/popularrank/build.gradle
index bedc3f6..a2cc5e7 100644
--- a/process/engines/itemrec/algorithms/hadoop/cascading/popularrank/build.gradle
+++ b/process/engines/itemrec/algorithms/hadoop/cascading/popularrank/build.gradle
@@ -1,6 +1,9 @@
 
 apply plugin: 'java'
 
+sourceCompatibility = '1.6'
+targetCompatibility = '1.6'
+ 
 repositories {
   mavenLocal()
   mavenCentral()
@@ -13,6 +16,7 @@
   compile( group: 'cascading', name: 'cascading-core', version: cascadingVersion )
   compile( group: 'cascading', name: 'cascading-hadoop', version: cascadingVersion )
   compile 'commons-cli:commons-cli:1.2'
+  compile( group: 'org.apache.hadoop', name: 'hadoop-core', version: '1.2.1')
 }
 
 jar {
@@ -26,4 +30,4 @@
   manifest {
     attributes( "Main-Class": "io/prediction/algorithms/cascading/itemrec/popularrank/PopularRankAlgo" )
   }
-}
\ No newline at end of file
+}
diff --git a/process/engines/itemrec/algorithms/scala/generic/build.sbt b/process/engines/itemrec/algorithms/scala/generic/build.sbt
new file mode 100644
index 0000000..747de64
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/generic/build.sbt
@@ -0,0 +1,22 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-itemrec-algorithms-scala-generic"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "com.twitter" %% "scalding-args" % "0.8.11",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map(
+  "itemrec.generic.dataprep" -> "io.prediction.algorithms.generic.itemrec.GenericDataPreparator")
+
+packJvmOpts := Map(
+  "itemrec.generic.dataprep" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/itemrec/algorithms/scala/generic/src/main/resources/application.conf
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf
copy to process/engines/itemrec/algorithms/scala/generic/src/main/resources/application.conf
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemrec/algorithms/scala/generic/src/main/resources/logback.xml
similarity index 99%
rename from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
rename to process/engines/itemrec/algorithms/scala/generic/src/main/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemrec/algorithms/scala/generic/src/main/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemrec/algorithms/scala/generic/src/main/scala/io/prediction/algorithms/itemrec/generic/GenericDataPreparator.scala b/process/engines/itemrec/algorithms/scala/generic/src/main/scala/io/prediction/algorithms/itemrec/generic/GenericDataPreparator.scala
new file mode 100644
index 0000000..7c00e62
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/generic/src/main/scala/io/prediction/algorithms/itemrec/generic/GenericDataPreparator.scala
@@ -0,0 +1,318 @@
+package io.prediction.algorithms.generic.itemrec
+
+import io.prediction.commons.Config
+import io.prediction.commons.appdata.{ Item, U2IAction, User }
+
+import grizzled.slf4j.Logger
+import java.io.File
+import java.io.FileWriter
+import java.io.RandomAccessFile
+import java.io.BufferedWriter
+import scala.io.Source
+
+import com.twitter.scalding.Args
+
+/**
+ * Generic single machine data preparator for ItemRec engine.
+ * Read data from appdata and output the following files:
+ * - usersIndex.tsv (uindex uid)
+ * - itemsIndex.tsv (iindex iid itypes): only contain valid items to be recommended
+ * - ratings.mm (if --matrixMarket true ): matrix market format rating
+ * - ratings.csv (if --matrixMarket false): comma separated rating file
+ */
+object GenericDataPreparator {
+
+  /* constants */
+  final val ACTION_RATE = "rate"
+  final val ACTION_LIKE = "like"
+  final val ACTION_DISLIKE = "dislike"
+  final val ACTION_VIEW = "view"
+  final val ACTION_CONVERSION = "conversion"
+
+  // When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, 
+  // determine which action will be considered as final preference.
+  final val CONFLICT_LATEST: String = "latest" // use latest action
+  final val CONFLICT_HIGHEST: String = "highest" // use the one with highest score
+  final val CONFLICT_LOWEST: String = "lowest" // use the one with lowest score
+
+  /* global */
+  val logger = Logger(GenericDataPreparator.getClass)
+
+  //println(logger.isInfoEnabled)
+
+  val commonsConfig = new Config
+
+  // argument of this job
+  case class JobArg(
+    val outputDir: String,
+    val appid: Int,
+    val evalid: Option[Int],
+    val itypes: Option[List[String]],
+    val viewParam: Option[Int],
+    val likeParam: Option[Int],
+    val dislikeParam: Option[Int],
+    val conversionParam: Option[Int],
+    val conflictParam: String,
+    val recommendationTime: Option[Long],
+    val matrixMarket: Boolean)
+
+  def main(cmdArgs: Array[String]) {
+
+    logger.info("Running generic data preparator ...")
+    logger.info(cmdArgs.mkString(","))
+
+    /* get arg */
+    val args = Args(cmdArgs)
+
+    val outputDirArg = args("outputDir")
+    val appidArg = args("appid").toInt
+    val evalidArg = args.optional("evalid") map (x => x.toInt)
+    val OFFLINE_EVAL = (evalidArg != None) // offline eval mode
+
+    val preItypesArg = args.list("itypes")
+    val itypesArg: Option[List[String]] = if (preItypesArg.mkString(",").length == 0) None else Option(preItypesArg)
+
+    // determine how to map actions to rating values
+    def getActionParam(name: String): Option[Int] = {
+      val actionParam: Option[Int] = args(name) match {
+        case "ignore" => None
+        case x => Some(x.toInt)
+      }
+      actionParam
+    }
+
+    val viewParamArg: Option[Int] = getActionParam("viewParam")
+    val likeParamArg: Option[Int] = getActionParam("likeParam")
+    val dislikeParamArg: Option[Int] = getActionParam("dislikeParam")
+    val conversionParamArg: Option[Int] = getActionParam("conversionParam")
+
+    val conflictParamArg: String = args("conflictParam")
+
+    // check if the conflictParam is valid
+    require(List(CONFLICT_LATEST, CONFLICT_HIGHEST, CONFLICT_LOWEST).contains(conflictParamArg), "conflict param " + conflictParamArg + " is not valid.")
+
+    val recommendationTimeArg = args.optional("recommendationTime").map(_.toLong)
+
+    // write data in matrix market format
+    val matrixMarketArg: Boolean = args.optional("matrixMarket").map(x => x.toBoolean).getOrElse(true)
+
+    val arg = JobArg(
+      outputDir = outputDirArg,
+      appid = appidArg,
+      evalid = evalidArg,
+      itypes = itypesArg,
+      viewParam = viewParamArg,
+      likeParam = likeParamArg,
+      dislikeParam = dislikeParamArg,
+      conversionParam = conversionParamArg,
+      conflictParam = conflictParamArg,
+      recommendationTime = recommendationTimeArg,
+      matrixMarket = matrixMarketArg
+    )
+
+    /* run job */
+    dataPrep(arg)
+    cleanup(arg)
+
+  }
+
+  case class RatingData(
+    uid: Int,
+    iid: Int,
+    rating: Int,
+    t: Long)
+
+  def dataPrep(arg: JobArg) = {
+
+    // NOTE: if OFFLINE_EVAL, read from training set, and use evalid as appid when read Items and U2iActions
+    val OFFLINE_EVAL = (arg.evalid != None)
+
+    val usersDb = if (!OFFLINE_EVAL)
+      commonsConfig.getAppdataUsers
+    else
+      commonsConfig.getAppdataTrainingUsers
+
+    val itemsDb = if (!OFFLINE_EVAL)
+      commonsConfig.getAppdataItems
+    else
+      commonsConfig.getAppdataTrainingItems
+
+    val u2iDb = if (!OFFLINE_EVAL)
+      commonsConfig.getAppdataU2IActions
+    else
+      commonsConfig.getAppdataTrainingU2IActions
+
+    val appid = if (OFFLINE_EVAL) arg.evalid.get else arg.appid
+
+    // create outputDir if doesn't exist yet.
+    val outputDir = new File(arg.outputDir)
+    outputDir.mkdirs()
+
+    /* write user index */
+    // convert to Map for later lookup
+    // assuming number of users can be fit into memory.
+    val usersMap: Map[String, Int] = usersDb.getByAppid(appid).map(_.id).zipWithIndex
+      .map { case (uid, index) => (uid, index + 1) }.toMap // +1 to make it starting from 1
+
+    val usersIndexWriter = new BufferedWriter(new FileWriter(new File(arg.outputDir + "usersIndex.tsv")))
+    usersMap.foreach {
+      case (uid, uindex) =>
+        usersIndexWriter.write(s"${uindex}\t${uid}\n")
+    }
+    usersIndexWriter.close()
+
+    case class ItemData(
+      val iindex: Int,
+      val itypes: Seq[String],
+      val starttime: Option[Long],
+      val endtime: Option[Long])
+
+    val itemsMap: Map[String, ItemData] = arg.itypes.map { itypes =>
+      itemsDb.getByAppidAndItypes(appid, itypes)
+    }.getOrElse {
+      itemsDb.getByAppid(appid)
+    }.zipWithIndex
+      .map {
+        case (item, index) =>
+          val itemData = ItemData(
+            iindex = index + 1, // +1 to make index starting from 1 (required by graphchi)
+            itypes = item.itypes,
+            starttime = item.starttime.map[Long](_.getMillis()),
+            endtime = item.endtime.map[Long](_.getMillis())
+          )
+          (item.id -> itemData)
+      }.toMap
+
+    // 
+    /* write item index (iindex iid itypes) */
+    val itemsIndexWriter = new BufferedWriter(new FileWriter(new File(arg.outputDir + "itemsIndex.tsv")))
+    // NOTE: only write valid items (eg. valid starttime and endtime)
+    itemsMap.filter {
+      case (iid, itemData) =>
+        itemTimeFilter(true, itemData.starttime, itemData.endtime, arg.recommendationTime)
+    }.foreach {
+      case (iid, itemData) =>
+        val itypes = itemData.itypes.mkString(",")
+        itemsIndexWriter.write(s"${itemData.iindex}\t${iid}\t${itypes}\n")
+    }
+    itemsIndexWriter.close()
+
+    /* write u2i ratings */
+
+    val u2iRatings = u2iDb.getAllByAppid(appid)
+      .filter { u2i =>
+        val validAction = isValidAction(u2i, arg.likeParam, arg.dislikeParam, arg.viewParam, arg.conversionParam)
+        val validUser = usersMap.contains(u2i.uid)
+        val validItem = itemsMap.contains(u2i.iid)
+        (validAction && validUser && validItem)
+      }.map { u2i =>
+        val rating = convertToRating(u2i, arg.likeParam, arg.dislikeParam, arg.viewParam, arg.conversionParam)
+
+        RatingData(
+          uid = usersMap(u2i.uid),
+          iid = itemsMap(u2i.iid).iindex,
+          rating = rating,
+          t = u2i.t.getMillis
+        )
+      }.toSeq
+
+    if (!u2iRatings.isEmpty) {
+
+      val ratingReduced = u2iRatings.groupBy(x => (x.iid, x.uid))
+        .mapValues { v =>
+          v.reduce { (a, b) =>
+            resolveConflict(a, b, arg.conflictParam)
+          }
+        }.values
+        .toSeq
+        .sortBy { x: RatingData =>
+          (x.iid, x.uid)
+        }
+
+      val fileName = if (arg.matrixMarket) "ratings.mm" else "ratings.csv"
+      val ratingsWriter = new BufferedWriter(new FileWriter(new File(arg.outputDir + fileName))) // intermediate file
+
+      if (arg.matrixMarket) {
+        ratingsWriter.write("%%MatrixMarket matrix coordinate real general\n")
+        ratingsWriter.write(s"${usersMap.size} ${itemsMap.size} ${ratingReduced.size}\n")
+      }
+
+      ratingReduced.foreach { r =>
+        if (arg.matrixMarket) {
+          ratingsWriter.write(s"${r.uid} ${r.iid} ${r.rating}\n")
+        } else {
+          ratingsWriter.write(s"${r.uid},${r.iid},${r.rating}\n")
+        }
+      }
+
+      ratingsWriter.close()
+    }
+
+  }
+
+  def itemTimeFilter(enable: Boolean, starttime: Option[Long], endtime: Option[Long], recommendationTime: Option[Long]): Boolean = {
+    if (enable) {
+      recommendationTime.map { recTime =>
+        (starttime, endtime) match {
+          case (Some(start), None) => (recTime >= start)
+          case (Some(start), Some(end)) => ((recTime >= start) && (recTime < end))
+          case (None, Some(end)) => (recTime < end)
+          case (None, None) => true
+        }
+      }.getOrElse(true)
+    } else true
+  }
+
+  def isValidAction(u2i: U2IAction, likeParam: Option[Int], dislikeParam: Option[Int],
+    viewParam: Option[Int], conversionParam: Option[Int]): Boolean = {
+    val keepThis: Boolean = u2i.action match {
+      case ACTION_RATE => true
+      case ACTION_LIKE => (likeParam != None)
+      case ACTION_DISLIKE => (dislikeParam != None)
+      case ACTION_VIEW => (viewParam != None)
+      case ACTION_CONVERSION => (conversionParam != None)
+      case _ => {
+        assert(false, "Action type " + u2i.action + " in u2iActions appdata is not supported!")
+        false // all other unsupported actions
+      }
+    }
+    keepThis
+  }
+
+  def convertToRating(u2i: U2IAction, likeParam: Option[Int], dislikeParam: Option[Int],
+    viewParam: Option[Int], conversionParam: Option[Int]): Int = {
+    val rating: Int = u2i.action match {
+      case ACTION_RATE => u2i.v.get.toInt
+      case ACTION_LIKE => likeParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+      case ACTION_DISLIKE => dislikeParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+      case ACTION_VIEW => viewParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+      case ACTION_CONVERSION => conversionParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+    }
+    rating
+  }
+
+  def resolveConflict(a: RatingData, b: RatingData, conflictParam: String) = {
+    conflictParam match {
+      case CONFLICT_LATEST => if (a.t > b.t) a else b
+      case CONFLICT_HIGHEST => if (a.rating > b.rating) a else b
+      case CONFLICT_LOWEST => if (a.rating < b.rating) a else b
+    }
+  }
+
+  def cleanup(arg: JobArg) = {
+
+  }
+
+}
\ No newline at end of file
diff --git a/process/engines/itemrec/algorithms/scala/generic/src/test/resources/application.conf b/process/engines/itemrec/algorithms/scala/generic/src/test/resources/application.conf
new file mode 100644
index 0000000..9e296b6
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/generic/src/test/resources/application.conf
@@ -0,0 +1,37 @@
+# Used by PredictionIO Commons
+io.prediction.base=.
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+io.prediction.commons.appdata.db.name=predictionio_appdata_generic_dataprep_test
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+io.prediction.commons.appdata.test.db.name=predictionio_test_appdata_generic_dataprep_test
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+io.prediction.commons.appdata.training.db.name=predictionio_trainig_appdata_generic_dataprep_test
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+io.prediction.commons.appdata.validation.db.name=predictionio_validation_appdata_generic_dataprep_test
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+io.prediction.commons.modeldata.db.name=predictionio_modeldata_generic_dataprep_test
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+io.prediction.commons.modeldata.training.db.name=predictionio_training_modeldata_generic_dataprep_test
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
+io.prediction.commons.settings.db.name=predictionio_generic_dataprep_test
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemrec/algorithms/scala/generic/src/test/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemrec/algorithms/scala/generic/src/test/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemrec/algorithms/scala/generic/src/test/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemrec/algorithms/scala/generic/src/test/scala/io/prediction/algorithms/itemrec/generic/GenericDataPreparatorSpec.scala b/process/engines/itemrec/algorithms/scala/generic/src/test/scala/io/prediction/algorithms/itemrec/generic/GenericDataPreparatorSpec.scala
new file mode 100644
index 0000000..c4b5fa4
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/generic/src/test/scala/io/prediction/algorithms/itemrec/generic/GenericDataPreparatorSpec.scala
@@ -0,0 +1,166 @@
+package io.prediction.algorithms.generic.itemrec
+
+import io.prediction.commons.Config
+import io.prediction.commons.appdata.{ User, Item, U2IAction }
+
+import org.specs2.mutable._
+import com.github.nscala_time.time.Imports._
+import scala.io.Source
+import com.mongodb.casbah.Imports._
+
+class GenericDataPreparatorSpec extends Specification {
+
+  // note: should match the db name defined in the application.conf
+  val mongoDbName = "predictionio_appdata_generic_dataprep_test"
+  def cleanUp() = {
+    // remove the test database
+    MongoConnection()(mongoDbName).dropDatabase()
+  }
+
+  val commonConfig = new Config
+  val appdataUsers = commonConfig.getAppdataUsers
+  val appdataItems = commonConfig.getAppdataItems
+  val appdataU2IActions = commonConfig.getAppdataU2IActions
+
+  "GenericDataPreparator with basic rate action app data" should {
+    val appid = 23
+    // insert a few users into db
+    val user = User(
+      id = "u0",
+      appid = appid,
+      ct = DateTime.now,
+      latlng = None,
+      inactive = None,
+      attributes = None)
+
+    appdataUsers.insert(user.copy(id = "u0"))
+    appdataUsers.insert(user.copy(id = "u1"))
+    appdataUsers.insert(user.copy(id = "u2"))
+
+    // insert a few items into db
+    val item = Item(
+      id = "i0",
+      appid = appid,
+      ct = DateTime.now,
+      itypes = List("t1", "t2"),
+      starttime = None,
+      endtime = None,
+      price = None,
+      profit = None,
+      latlng = None,
+      inactive = None,
+      attributes = None)
+
+    appdataItems.insert(item.copy(id = "i0", itypes = List("t1", "t2")))
+    appdataItems.insert(item.copy(id = "i1", itypes = List("t1")))
+    appdataItems.insert(item.copy(id = "i2", itypes = List("t2", "t3")))
+    appdataItems.insert(item.copy(id = "i3", itypes = List("t3")))
+
+    // insert a few u2i into db
+    val u2i = U2IAction(
+      appid = appid,
+      action = "rate",
+      uid = "u0",
+      iid = "i0",
+      t = DateTime.now,
+      latlng = None,
+      v = Some(3),
+      price = None)
+
+    appdataU2IActions.insert(u2i.copy(uid = "u0", iid = "i0", action = "rate", v = Some(3)))
+    appdataU2IActions.insert(u2i.copy(uid = "u0", iid = "i1", action = "rate", v = Some(4)))
+    appdataU2IActions.insert(u2i.copy(uid = "u0", iid = "i2", action = "rate", v = Some(1)))
+
+    appdataU2IActions.insert(u2i.copy(uid = "u1", iid = "i0", action = "rate", v = Some(2)))
+    appdataU2IActions.insert(u2i.copy(uid = "u1", iid = "i1", action = "rate", v = Some(1)))
+    appdataU2IActions.insert(u2i.copy(uid = "u1", iid = "i3", action = "rate", v = Some(3)))
+
+    appdataU2IActions.insert(u2i.copy(uid = "u2", iid = "i1", action = "rate", v = Some(5)))
+    appdataU2IActions.insert(u2i.copy(uid = "u2", iid = "i2", action = "rate", v = Some(1)))
+    appdataU2IActions.insert(u2i.copy(uid = "u2", iid = "i3", action = "rate", v = Some(4)))
+
+    val outputDir = "/tmp/pio_test/"
+    val args = Map(
+      "outputDir" -> outputDir,
+      "appid" -> appid,
+      "viewParam" -> 4,
+      "likeParam" -> 3,
+      "dislikeParam" -> 1,
+      "conversionParam" -> 2,
+      "conflictParam" -> "highest"
+    )
+
+    val argsArray = args.toArray.flatMap {
+      case (k, v) =>
+        Array(s"--${k}", v.toString)
+    }
+
+    GenericDataPreparator.main(argsArray)
+
+    "correctly generate usersIndex.tsv" in {
+      val usersIndex = Source.fromFile(s"${outputDir}usersIndex.tsv")
+        .getLines()
+        .toList
+
+      val expected = List(
+        "1\tu0",
+        "2\tu1",
+        "3\tu2")
+
+      usersIndex must containTheSameElementsAs(expected)
+    }
+
+    "correctly generate itemsIndex.tsv" in {
+      val itemsIndex = Source.fromFile(s"${outputDir}itemsIndex.tsv")
+        .getLines()
+        .toList
+
+      val expected = List(
+        "1\ti0\tt1,t2",
+        "2\ti1\tt1",
+        "3\ti2\tt2,t3",
+        "4\ti3\tt3"
+      )
+      itemsIndex must containTheSameElementsAs(expected)
+    }
+
+    "correctly generate ratings.mm" in {
+      val ratingsLines = Source.fromFile(s"${outputDir}ratings.mm")
+        .getLines()
+
+      val headers = ratingsLines.take(2).toList
+
+      val ratings = ratingsLines.toList
+
+      val expectedHeaders = List(
+        "%%MatrixMarket matrix coordinate real general",
+        "3 4 9"
+      )
+
+      val expected = List(
+        "1 1 3",
+        "1 2 4",
+        "1 3 1",
+        "2 1 2",
+        "2 2 1",
+        "2 4 3",
+        "3 2 5",
+        "3 3 1",
+        "3 4 4"
+      )
+      headers must be_==(expectedHeaders) and
+        (ratings must containTheSameElementsAs(expected))
+    }
+  }
+
+  // TODO: test csv format
+
+  // TODO: test mixed and conflict actions
+
+  // TODO: test start and end time
+
+  // TODO: test evalid != None
+
+  // clean up when finish test
+  step(cleanUp())
+}
\ No newline at end of file
diff --git a/process/engines/itemrec/algorithms/scala/graphchi/build.sbt b/process/engines/itemrec/algorithms/scala/graphchi/build.sbt
new file mode 100644
index 0000000..6c8212e
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/graphchi/build.sbt
@@ -0,0 +1,23 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-itemrec-algorithms-scala-graphchi"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "com.twitter" %% "scalding-args" % "0.8.11",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1",
+  "org.scalanlp" %% "breeze" % "0.6.1")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map(
+  "itemrec.graphchi.modelcon" -> "io.prediction.algorithms.graphchi.itemrec.GraphChiModelConstructor")
+
+packJvmOpts := Map(
+  "itemrec.graphchi.modelcon" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/itemrec/algorithms/scala/graphchi/src/main/resources/application.conf
similarity index 100%
copy from process/engines/itemrec/evaluations/scala/topkitems/src/main/resources/application.conf
copy to process/engines/itemrec/algorithms/scala/graphchi/src/main/resources/application.conf
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemrec/algorithms/scala/graphchi/src/main/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemrec/algorithms/scala/graphchi/src/main/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemrec/algorithms/scala/graphchi/src/main/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemrec/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemrec/GraphChiModelConstructor.scala b/process/engines/itemrec/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemrec/GraphChiModelConstructor.scala
new file mode 100644
index 0000000..664e68a
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemrec/GraphChiModelConstructor.scala
@@ -0,0 +1,201 @@
+package io.prediction.algorithms.graphchi.itemrec
+
+import grizzled.slf4j.Logger
+import breeze.linalg._
+import com.twitter.scalding.Args
+import scala.io.Source
+import scala.collection.mutable.PriorityQueue
+
+import io.prediction.algorithms.graphchi.itemrec.MatrixMarketReader
+import io.prediction.commons.Config
+import io.prediction.commons.modeldata.{ ItemRecScore }
+
+/**
+ * Input files:
+ * - usersIndex.tsv (uindex uid)
+ * - itemsIndex.tsv (iindex iid itypes) (only recommend items in this list)
+ * - ratings.mm ratings file in matrix market file
+ * - ratings.mm_U.mm User x feature matrix generated by GraphChi
+ * - ratings.mm_V.mm Item x feature matrix generated by GraphChi
+ *
+ */
+object GraphChiModelConstructor {
+
+  /* global */
+  val logger = Logger(GraphChiModelConstructor.getClass)
+  //println(logger.isInfoEnabled)
+  val commonsConfig = new Config
+
+  // argument of this job
+  case class JobArg(
+    val inputDir: String,
+    val appid: Int,
+    val algoid: Int,
+    val evalid: Option[Int],
+    val modelSet: Boolean,
+    val unseenOnly: Boolean,
+    val numRecommendations: Int)
+
+  def main(cmdArgs: Array[String]) {
+    logger.info("Running model constructor for GraphChi ...")
+    logger.info(cmdArgs.mkString(","))
+
+    /* get arg */
+    val args = Args(cmdArgs)
+
+    val arg = JobArg(
+      inputDir = args("inputDir"),
+      appid = args("appid").toInt,
+      algoid = args("algoid").toInt,
+      evalid = args.optional("evalid") map (x => x.toInt),
+      modelSet = args("modelSet").toBoolean,
+      unseenOnly = args("unseenOnly").toBoolean,
+      numRecommendations = args("numRecommendations").toInt
+    )
+
+    /* run job */
+    modelCon(arg)
+    cleanUp(arg)
+  }
+
+  def modelCon(arg: JobArg) = {
+
+    // NOTE: if OFFLINE_EVAL, write to training modeldata and use evalid as appid
+    val OFFLINE_EVAL = (arg.evalid != None)
+
+    val modeldataDb = if (!OFFLINE_EVAL)
+      commonsConfig.getModeldataItemRecScores
+    else
+      commonsConfig.getModeldataTrainingItemRecScores
+
+    val appid = if (OFFLINE_EVAL) arg.evalid.get else arg.appid
+
+    // user index file
+    // uindex -> uid
+    val usersMap: Map[Int, String] = Source.fromFile(s"${arg.inputDir}usersIndex.tsv").getLines()
+      .map[(Int, String)] { line =>
+        val (uindex, uid) = try {
+          val data = line.split("\t")
+          (data(0).toInt, data(1))
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get user index and uid in line: ${line}. ${e}")
+          }
+        }
+        (uindex, uid)
+      }.toMap
+
+    case class ItemData(
+      val iid: String,
+      val itypes: Seq[String])
+
+    // item index file (iindex iid itypes)
+    // iindex -> ItemData
+    val itemsMap: Map[Int, ItemData] = Source.fromFile(s"${arg.inputDir}itemsIndex.tsv")
+      .getLines()
+      .map[(Int, ItemData)] { line =>
+        val (iindex, item) = try {
+          val fields = line.split("\t")
+          val itemData = ItemData(
+            iid = fields(1),
+            itypes = fields(2).split(",").toList
+          )
+          (fields(0).toInt, itemData)
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get item info in line: ${line}. ${e}")
+          }
+        }
+        (iindex, item)
+      }.toMap
+
+    // ratings file (for unseen filtering) 
+    val seenSet: Map[Int, Set[Int]] = if (arg.unseenOnly) {
+      Source.fromFile(s"${arg.inputDir}ratings.mm")
+        .getLines()
+        // discard all empty line and comments
+        .filter(line => (line.length != 0) && (!line.startsWith("%")))
+        .drop(1) // 1st line is matrix size
+        .map { line =>
+          val (u, i) = try {
+            val fields = line.split("""\s+""")
+            // u, i, rating
+            (fields(0).toInt, fields(1).toInt)
+          } catch {
+            case e: Exception => throw new RuntimeException(s"Cannot get user and item index from this line: ${line}. ${e}")
+          }
+          (u, i)
+        }.toSeq.groupBy(_._1)
+        .mapValues(_.map(_._2).toSet)
+    } else {
+      Map() // empty map
+    }
+
+    // feature x user matrix
+    val userMatrix = MatrixMarketReader.readDense(s"${arg.inputDir}ratings.mm_U.mm")
+
+    // feature x item matrix
+    val itemMatrix = MatrixMarketReader.readDense(s"${arg.inputDir}ratings.mm_V.mm")
+
+    val allUindex = for (uindex <- 1 to userMatrix.cols if usersMap.contains(uindex)) yield (uindex, userMatrix(::, uindex - 1), seenSet.getOrElse(uindex, Set()))
+
+    val validIindex = for (iindex <- 1 to itemMatrix.cols if validItemFilter(true, iindex, itemsMap)) yield (iindex)
+
+    val allScores = allUindex.par
+      .foreach {
+        case (uindex, userVector, seenItemSet) =>
+          val scores = validIindex.filter(iindex => unseenItemFilter(arg.unseenOnly, iindex, seenItemSet))
+            .map { iindex =>
+              // NOTE: DenseMatrix index starts from 0, so minus 1 (but graphchi user and item index starts from 1)
+              val score = userVector dot itemMatrix(::, iindex - 1)
+              (iindex, score)
+            }
+
+          val topScores = getTopN(scores, arg.numRecommendations)(ScoreOrdering.reverse)
+          //(uindex, topScores)
+          modeldataDb.insert(ItemRecScore(
+            uid = usersMap(uindex),
+            iids = topScores.map(x => itemsMap(x._1).iid),
+            scores = topScores.map(_._2),
+            itypes = topScores.map(x => itemsMap(x._1).itypes),
+            appid = appid,
+            algoid = arg.algoid,
+            modelset = arg.modelSet))
+      }
+  }
+
+  def unseenItemFilter(enable: Boolean, iindex: Int, seenSet: Set[Int]): Boolean = {
+    if (enable) (!seenSet(iindex)) else true
+  }
+
+  def validItemFilter(enable: Boolean, iindex: Int, validMap: Map[Int, Any]): Boolean = {
+    if (enable) validMap.contains(iindex) else true
+  }
+
+  def cleanUp(arg: JobArg) = {
+
+  }
+
+  object ScoreOrdering extends Ordering[(Int, Double)] {
+    override def compare(a: (Int, Double), b: (Int, Double)) = a._2 compare b._2
+  }
+
+  def getTopN[T](s: Seq[T], n: Int)(implicit ord: Ordering[T]): Seq[T] = {
+    val q = PriorityQueue()
+
+    for (x <- s) {
+      if (q.size < n)
+        q.enqueue(x)
+      else {
+        // q is full
+        if (ord.compare(x, q.head) < 0) {
+          q.dequeue()
+          q.enqueue(x)
+        }
+      }
+    }
+
+    q.dequeueAll.toSeq.reverse
+  }
+
+}
diff --git a/process/engines/itemrec/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemrec/MatrixMarketReader.scala b/process/engines/itemrec/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemrec/MatrixMarketReader.scala
new file mode 100644
index 0000000..851b48c
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemrec/MatrixMarketReader.scala
@@ -0,0 +1,80 @@
+package io.prediction.algorithms.graphchi.itemrec
+
+import breeze.linalg._
+import grizzled.slf4j.Logger
+import scala.io.Source
+
+object MatrixMarketReader {
+
+  val logger = Logger(MatrixMarketReader.getClass)
+
+  /* read dense matrix market from file and return DenseMatrix object */
+  def readDense(path: String): DenseMatrix[Double] = {
+    val matrixFile = Source.fromFile(path)
+    // skip line starts with %
+    // skip empty line
+    val lines = matrixFile.getLines()
+      .filter(line => (line.length != 0) && (!line.startsWith("%")))
+
+    // first line is matrix size
+    if (lines.hasNext) {
+      val line = lines.next()
+      val size = line.split("""\s+""")
+
+      // matrix market dense format is column oriented
+      /* eg:
+       * 2 3
+       * 1
+       * 2
+       * 3
+       * 4
+       * 5
+       * 6
+       * becomes
+       * 1 4
+       * 2 5
+       * 3 6
+       */
+      val (colNum, rowNum): (Int, Int) = try {
+        (size(0).toInt, size(1).toInt)
+      } catch {
+        case e: Exception =>
+          throw new RuntimeException(s"Cannot extract matrix size from the line: ${line}. ${e}")
+      }
+
+      logger.debug(s"${rowNum}, ${colNum}")
+      val matrix = DenseMatrix.zeros[Double](rowNum, colNum)
+
+      var r = 0
+      var c = 0
+      lines.foreach { line =>
+        if (c >= colNum) {
+          throw new RuntimeException(s"Number of elements greater than the defined size: ${rowNum} ${colNum}")
+        } else {
+
+          logger.debug(s"${r}, ${c} = ${line}")
+          try {
+            matrix(r, c) = line.toDouble
+          } catch {
+            case e: Exception =>
+              throw new RuntimeException(s"Cannot convert line: ${line} to double. ${e}")
+          }
+          r += 1
+          if (r == rowNum) {
+            r = 0
+            c += 1
+          }
+        }
+      }
+      // c must == colNum when finish
+      if (c < colNum) {
+        throw new RuntimeException(s"Number of elements smaller than the defined size: ${rowNum} ${colNum}")
+      }
+      logger.debug(matrix)
+      matrix
+    } else {
+      DenseMatrix.zeros[Double](0, 0)
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/process/engines/itemrec/algorithms/scala/graphchi/src/test/resources/application.conf b/process/engines/itemrec/algorithms/scala/graphchi/src/test/resources/application.conf
new file mode 100644
index 0000000..45b3078
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/graphchi/src/test/resources/application.conf
@@ -0,0 +1,37 @@
+# Used by PredictionIO Commons
+io.prediction.base=.
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+io.prediction.commons.appdata.db.name=predictionio_appdata_graphchi_dataprep_test
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+io.prediction.commons.appdata.test.db.name=predictionio_test_appdata_graphchi_dataprep_test
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+io.prediction.commons.appdata.training.db.name=predictionio_trainig_appdata_graphchi_dataprep_test
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+io.prediction.commons.appdata.validation.db.name=predictionio_validation_appdata_graphchi_dataprep_test
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+io.prediction.commons.modeldata.db.name=predictionio_modeldata_graphchi_dataprep_test
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+io.prediction.commons.modeldata.training.db.name=predictionio_training_modeldata_graphchi_dataprep_test
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
+io.prediction.commons.settings.db.name=predictionio_graphchi_dataprep_test
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemrec/algorithms/scala/graphchi/src/test/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemrec/algorithms/scala/graphchi/src/test/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemrec/algorithms/scala/graphchi/src/test/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemrec/algorithms/scala/graphchi/src/test/scala/io/prediction/algorithms/graphchi/itemrec/GraphChiModelConstructorSpec.scala b/process/engines/itemrec/algorithms/scala/graphchi/src/test/scala/io/prediction/algorithms/graphchi/itemrec/GraphChiModelConstructorSpec.scala
new file mode 100644
index 0000000..e50c4ce
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/graphchi/src/test/scala/io/prediction/algorithms/graphchi/itemrec/GraphChiModelConstructorSpec.scala
@@ -0,0 +1,454 @@
+package io.prediction.algorithms.graphchi.itemrec
+
+import io.prediction.commons.Config
+import io.prediction.commons.settings.{ App, Algo }
+import io.prediction.commons.modeldata.{ ItemRecScore }
+
+import org.specs2.mutable._
+import com.github.nscala_time.time.Imports._
+import scala.io.Source
+import java.io.File
+import java.io.FileWriter
+import java.io.BufferedWriter
+
+import com.mongodb.casbah.Imports._
+
+class GraphChiModelConstructorSpec extends Specification {
+
+  // note: should match the db name defined in the application.conf
+  val mongoDbName = "predictionio_modeldata_graphchi_dataprep_test"
+  def cleanUp() = {
+    // remove the test database
+    MongoConnection()(mongoDbName).dropDatabase()
+  }
+
+  val commonConfig = new Config
+  val modeldataItemRecScores = commonConfig.getModeldataItemRecScores
+
+  // NOTE: use HALF_UP mode to avoid error caused by rounding when compare data
+  // (eg. 3.5 vs 3.499999999999).
+  // (eg. 0.6666666666 vs 0.666666667)
+  def roundUpScores(irec: ItemRecScore): ItemRecScore = {
+    irec.copy(
+      scores = irec.scores.map { x =>
+        BigDecimal(x).setScale(9, BigDecimal.RoundingMode.HALF_UP).toDouble
+      }
+    )
+  }
+
+  def argMapToArray(args: Map[String, Any]): Array[String] = {
+    args.toArray.flatMap {
+      case (k, v) =>
+        Array(s"--${k}", v.toString)
+    }
+  }
+
+  "GraphChiModelConstructor" should {
+
+    val inputDir = "/tmp/pio_test/"
+
+    val inputDirFile = new File(inputDir)
+    inputDirFile.mkdirs()
+
+    val usersIndex = List(
+      "1\tu1",
+      "2\tu2",
+      "3\tu3")
+
+    val itemsIndex = List(
+      "1\ti1\tt1,t2",
+      "2\ti2\tt1",
+      "3\ti3\tt2,t3",
+      "4\ti4\tt3"
+    )
+
+    val ratingsMM = List(
+      "%%MatrixMarket matrix coordinate real general",
+      "3 4 9",
+      "1 1 3",
+      "1 2 4",
+      "1 3 1",
+      "2 1 2",
+      "2 4 3",
+      "3 2 5"
+    )
+
+    /*
+     * 1.2 2.4 1.1
+     * 4.3 1.1 2.4
+     */
+    val ratingsUMM = List(
+      "%%MatrixMarket matrix array real general",
+      "%This file contains ALS output matrix U. In each row D factors of a single user node.",
+      "3 2",
+      "1.2",
+      "4.3",
+      "2.4",
+      "1.1",
+      "1.1",
+      "2.4"
+    )
+
+    /*
+     * 2.1 3.1 2.6 1.9
+     * 1.5 1.2 1.3 2.0
+     */
+    val ratingsVMM = List(
+      "%%MatrixMarket matrix array real general",
+      "%This file contains ALS output matrix V. In each row D factors of a single item node.",
+      "4 2",
+      "2.1",
+      "1.5",
+      "3.1",
+      "1.2",
+      "2.6",
+      "1.3",
+      "1.9",
+      "2.0"
+    )
+
+    /* Ut V:
+     * 8.97 8.88  8.71  10.88
+     * 6.69 8.76  7.67  6.76
+     * 5.91 6.29  5.98  6.89
+     */
+
+    def writeToFile(lines: List[String], filePath: String) = {
+      val writer = new BufferedWriter(new FileWriter(new File(filePath)))
+      lines.foreach { line =>
+        writer.write(s"${line}\n")
+      }
+      writer.close()
+    }
+
+    writeToFile(usersIndex, s"${inputDir}usersIndex.tsv")
+    writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+    writeToFile(ratingsMM, s"${inputDir}ratings.mm")
+    writeToFile(ratingsUMM, s"${inputDir}ratings.mm_U.mm")
+    writeToFile(ratingsVMM, s"${inputDir}ratings.mm_V.mm")
+
+    val appid = 24
+
+    implicit val app = App(
+      id = appid,
+      userid = 0,
+      appkey = "1234",
+      display = "12345",
+      url = None,
+      cat = None,
+      desc = None,
+      timezone = "UTC"
+    )
+
+    "correctly writes ItemRecScores with larger numRecommendations" in {
+
+      val algoid = 25
+      val modelSet = false
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> false,
+        "numRecommendations" -> 5
+      )
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i4", "i1", "i2", "i3"),
+        scores = Seq(10.88, 8.97, 8.88, 8.71),
+        itypes = Seq(Seq("t3"), Seq("t1", "t2"), Seq("t1"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i2", "i3", "i4", "i1"),
+        scores = Seq(8.76, 7.67, 6.76, 6.69),
+        itypes = Seq(Seq("t1"), Seq("t2", "t3"), Seq("t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u3Expected = ItemRecScore(
+        uid = "u3",
+        iids = Seq("i4", "i2", "i3", "i1"),
+        scores = Seq(6.89, 6.29, 5.98, 5.91),
+        itypes = Seq(Seq("t3"), Seq("t1"), Seq("t2", "t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      GraphChiModelConstructor.main(argMapToArray(args))
+
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+      val u3ItemRec = modeldataItemRecScores.getByUid("u3")
+
+      // don't check id
+      u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected)) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected))) and
+        (u3ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u3Expected)))
+
+    }
+
+    "correctly writes ItemRecScores with smaller numRecommendations" in {
+
+      val algoid = 26
+      val modelSet = true
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> false,
+        "numRecommendations" -> 2
+      )
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i4", "i1"),
+        scores = Seq(10.88, 8.97),
+        itypes = Seq(Seq("t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i2", "i3"),
+        scores = Seq(8.76, 7.67),
+        itypes = Seq(Seq("t1"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u3Expected = ItemRecScore(
+        uid = "u3",
+        iids = Seq("i4", "i2"),
+        scores = Seq(6.89, 6.29),
+        itypes = Seq(Seq("t3"), Seq("t1")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      GraphChiModelConstructor.main(argMapToArray(args))
+
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+      val u3ItemRec = modeldataItemRecScores.getByUid("u3")
+
+      // don't check id
+      u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected)) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected))) and
+        (u3ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u3Expected)))
+
+    }
+
+    "correctly writes ItemRecScores with subset itemsIndex.tsv" in {
+
+      val inputDir = "/tmp/pio_test/subset/"
+
+      val inputDirFile = new File(inputDir)
+      inputDirFile.mkdirs()
+
+      val itemsIndex = List(
+        "1\ti1\tt1,t2",
+        "3\ti3\tt2,t3",
+        "4\ti4\tt3"
+      )
+
+      writeToFile(usersIndex, s"${inputDir}usersIndex.tsv")
+      writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+      writeToFile(ratingsMM, s"${inputDir}ratings.mm")
+      writeToFile(ratingsUMM, s"${inputDir}ratings.mm_U.mm")
+      writeToFile(ratingsVMM, s"${inputDir}ratings.mm_V.mm")
+
+      val algoid = 27
+      val modelSet = false
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> false,
+        "numRecommendations" -> 5
+      )
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i4", "i1", "i3"),
+        scores = Seq(10.88, 8.97, 8.71),
+        itypes = Seq(Seq("t3"), Seq("t1", "t2"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i3", "i4", "i1"),
+        scores = Seq(7.67, 6.76, 6.69),
+        itypes = Seq(Seq("t2", "t3"), Seq("t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u3Expected = ItemRecScore(
+        uid = "u3",
+        iids = Seq("i4", "i3", "i1"),
+        scores = Seq(6.89, 5.98, 5.91),
+        itypes = Seq(Seq("t3"), Seq("t2", "t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      GraphChiModelConstructor.main(argMapToArray(args))
+
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+      val u3ItemRec = modeldataItemRecScores.getByUid("u3")
+
+      // don't check id
+      u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected)) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected))) and
+        (u3ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u3Expected)))
+
+    }
+
+    "correctly writes ItemRecScores with unseenOnly=true" in {
+
+      val algoid = 28
+      val modelSet = true
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> true,
+        "numRecommendations" -> 4
+      )
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i4"),
+        scores = Seq(10.88),
+        itypes = Seq(Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i2", "i3"),
+        scores = Seq(8.76, 7.67),
+        itypes = Seq(Seq("t1"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u3Expected = ItemRecScore(
+        uid = "u3",
+        iids = Seq("i4", "i3", "i1"),
+        scores = Seq(6.89, 5.98, 5.91),
+        itypes = Seq(Seq("t3"), Seq("t2", "t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      GraphChiModelConstructor.main(argMapToArray(args))
+
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+      val u3ItemRec = modeldataItemRecScores.getByUid("u3")
+
+      // don't check id
+      u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected)) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected))) and
+        (u3ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u3Expected)))
+
+    }
+
+  }
+
+  // TODO: test evalid != None
+
+  // clean up when finish test
+  step(cleanUp())
+}
\ No newline at end of file
diff --git a/process/engines/itemrec/algorithms/scala/mahout/build.sbt b/process/engines/itemrec/algorithms/scala/mahout/build.sbt
index 8d0c61e..52dc73c 100644
--- a/process/engines/itemrec/algorithms/scala/mahout/build.sbt
+++ b/process/engines/itemrec/algorithms/scala/mahout/build.sbt
@@ -1,35 +1,27 @@
-import AssemblyKeys._
+import xerial.sbt.Pack._
 
 name := "predictionio-process-itemrec-algorithms-scala-mahout"
 
-packageOptions += Package.ManifestAttributes(java.util.jar.Attributes.Name.MAIN_CLASS -> "io.prediction.commons.mahout.itemrec.MahoutJob")
+libraryDependencies ++= Seq(
+  "org.apache.mahout" % "mahout-core" % "0.9",
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "com.twitter" %% "scalding-args" % "0.8.11",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
 
 parallelExecution in Test := false
 
-resolvers ++= Seq(
-  "Concurrent Maven Repo" at "http://conjars.org/repo",
-  "Clojars Repository" at "http://clojars.org/repo")
+packSettings
 
-assemblySettings
+packJarNameConvention := "full"
 
-test in assembly := {}
+packExpandedClasspath := true
 
-excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
-  val excludes = Set(
-    "jsp-api-2.1-6.1.14.jar",
-    "jsp-2.1-6.1.14.jar",
-    "jasper-compiler-5.5.12.jar",
-    "janino-2.5.16.jar",
-    "minlog-1.2.jar",
-    "mockito-all-1.8.5.jar",
-    "hadoop-core-1.0.4.jar")
-  cp filter { jar => excludes(jar.data.getName)}
-}
+packGenerateWindowsBatFile := false
 
-mergeStrategy in assembly <<= (mergeStrategy in assembly) { (old) =>
-  {
-    case ("org/xmlpull/v1/XmlPullParser.class") => MergeStrategy.rename
-    case ("org/xmlpull/v1/XmlPullParserException.class") => MergeStrategy.rename
-    case x => old(x)
-  }
-}
+packMain := Map(
+  "itemrec.mahout.mahoutjob" -> "io.prediction.algorithms.mahout.itemrec.MahoutJob",
+  "itemrec.mahout.modelcon" -> "io.prediction.algorithms.mahout.itemrec.MahoutModelConstructor")
+
+packJvmOpts := Map(
+  "itemrec.mahout.mahoutjob" -> Common.packCommonJvmOpts,
+  "itemrec.mahout.modelcon" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemrec/algorithms/scala/mahout/commons/build.sbt b/process/engines/itemrec/algorithms/scala/mahout/commons/build.sbt
deleted file mode 100644
index 5a3de06..0000000
--- a/process/engines/itemrec/algorithms/scala/mahout/commons/build.sbt
+++ /dev/null
@@ -1,3 +0,0 @@
-name := "PredictionIO-Process-ItemRec-Algorithms-Scala-Mahout-Commons"
-
-libraryDependencies ++= Seq("org.apache.mahout" % "mahout-core" % "0.8")
diff --git a/process/engines/itemrec/algorithms/scala/mahout/commons/src/main/scala/io/prediction/commons/mahout/itemrec/MahoutJob.scala b/process/engines/itemrec/algorithms/scala/mahout/commons/src/main/scala/io/prediction/commons/mahout/itemrec/MahoutJob.scala
deleted file mode 100644
index a8a1c1d..0000000
--- a/process/engines/itemrec/algorithms/scala/mahout/commons/src/main/scala/io/prediction/commons/mahout/itemrec/MahoutJob.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-package io.prediction.commons.mahout.itemrec
-
-import java.io.File
-import java.io.FileWriter
-
-import scala.collection.JavaConversions._
-
-import scala.sys.process._
-
-import io.prediction.commons.filepath.{DataFile, AlgoFile}
-import io.prediction.commons.Config
-
-import org.apache.mahout.cf.taste.recommender.Recommender
-import org.apache.mahout.cf.taste.model.DataModel
-import org.apache.mahout.cf.taste.impl.model.file.FileDataModel
-
-/** main function to run non-distributed Mahout Job */
-object MahoutJob {
-
-  def main(args: Array[String]) {
-    if (args.size < 1) {
-      println("Please specify Mahout job class name")
-      println("Example. <job class name> --param1 1 --param2 2")
-      System.exit(1)
-    }
-    
-    val jobName = args(0)
-
-    println("Running Job %s...".format(jobName))
-
-    //println(args.mkString(" "))
-    val (argMap, lastkey) = args.drop(1).foldLeft((Map[String, String](), "")) { (res, data) => 
-      val (argMap, lastkey) = res
-      val key: Option[String] = if (data.startsWith("--")) Some(data.stripPrefix("--")) else None
-
-      key map { k =>
-        (argMap ++ Map(k -> ""), k)
-      } getOrElse {
-        val orgData = argMap(lastkey)
-        val newData = orgData match {
-          case "" => data
-          case _ => orgData + " " + data
-        } 
-        (argMap ++ Map(lastkey -> newData), lastkey)
-      }
-    }
-    //println(argMap)
-
-    val job = Class.forName(jobName).
-      getConstructor().
-      newInstance().
-      asInstanceOf[MahoutJob]
-  
-    val runArgs = job.prepare(argMap)
-    
-    val finishArgs = job.run(runArgs)
-
-    val cleanupArgs = job.finish(finishArgs)
-
-    job.cleanup(cleanupArgs)
-
-  }
-
-}
-
-/** Wrapper job class for Mahout algo */
-abstract class MahoutJob {
-
-  val commonsConfig = new Config
-  /** Try search path if hadoop home is not set. */
-  val hadoopCommand = commonsConfig.settingsHadoopHome map { h => h+"/bin/hadoop" } getOrElse { "hadoop" }
-
-  /** Get required arg */
-  def getArg(args: Map[String, String], key: String): String = {
-    if (!args.contains(key)) sys.error("Please specify value for parameter --" + key)
-
-    args(key)
-  }
-
-  /** Get optional arg */
-  def getArgOpt(args: Map[String, String], key: String, default: String): String = {
-    if (args.contains(key)) args(key) else default
-  }
-
-  def getArgOpt(args: Map[String, String], key: String): Option[String] = {
-    if (args.contains(key)) Some(args(key)) else None
-  }
-
-  /** Prepare stage for algo */
-  def prepare(args: Map[String, String]): Map[String, String] = {
-    
-    val hdfsRoot = getArg(args, "hdfsRoot") // required
-    val localTempRoot = getArg(args, "localTempRoot") // required
-    val appid = getArg(args, "appid").toInt // required
-    val engineid = getArg(args, "engineid").toInt // required
-    val algoid = getArg(args, "algoid").toInt // required
-    val evalid: Option[Int] = getArgOpt(args, "evalid") map { _.toInt }
-
-    // input file
-    val hdfsRatingsPath = DataFile(hdfsRoot, appid, engineid, algoid, evalid, "ratings.csv")
-
-    val localRatingsPath = localTempRoot + "algo-" + algoid + "-ratings.csv"
-    val localRatingsFile = new File(localRatingsPath)
-    localRatingsFile.getParentFile().mkdirs() // create parent dir
-    if (localRatingsFile.exists()) localRatingsFile.delete() // delete existing file first
-
-    val copyFromHdfsRatingsCmd = s"$hadoopCommand fs -getmerge $hdfsRatingsPath $localRatingsPath"
-    //logger.info("Executing '%s'...".format(copyFromHdfsRatingsCmd))
-    println("Executing '%s'...".format(copyFromHdfsRatingsCmd))
-    if ((copyFromHdfsRatingsCmd.!) != 0)
-      throw new RuntimeException("Failed to execute '%s'".format(copyFromHdfsRatingsCmd))
-
-    // output file
-    val localPredictedPath = localTempRoot + "algo-"+ algoid + "-predicted.tsv"
-    val localPredictedFile = new File(localPredictedPath)
-
-    localPredictedFile.getParentFile().mkdirs() // create parent dir
-    if (localPredictedFile.exists()) localPredictedFile.delete() // delete existing file first
-
-    val hdfsPredictedPath = AlgoFile(hdfsRoot, appid, engineid, algoid, evalid, "predicted.tsv")
-    val hdfsPredictedDir = AlgoFile(hdfsRoot, appid, engineid, algoid, evalid, "")
-
-    // create hdfs directory for hdfsPredictedPath
-    val createHdfsDirCmd = s"$hadoopCommand fs -mkdir -p $hdfsPredictedDir"
-    println("Executing '%s'...".format(createHdfsDirCmd))
-    createHdfsDirCmd.!
-    
-    args ++ Map("input" -> localRatingsPath, "output" -> localPredictedPath, "hdfsOutput" -> hdfsPredictedPath)
-  }
-
-  /** create and return Mahout's Recommender object. */
-  def buildRecommender(dataModel: DataModel, args: Map[String, String]): Recommender
-
-  /** Run algo job.
-    In default implementation, the prepare() function copies the ratings.csv from HDFS to local temporary directory.
-    The run() function should read and process this local file (defined by --input arg) file and generate the prediction 
-    output file (defined by --output arg) for each user.
-    Then finish() function copies the local prediction output file to HDFS predicted.tsv
-  */
-  def run(args: Map[String, String]): Map[String, String] = {
-
-    val input = args("input")
-    val output = args("output")
-    val numRecommendations: Int = getArgOpt(args, "numRecommendations", "10").toInt
-    
-    val dataModel: DataModel = new FileDataModel(new File(input))
-    val recommender: Recommender = buildRecommender(dataModel, args)
-
-    // generate prediction output file
-    val outputWriter = new FileWriter(new File(output))
-
-    val userIds = dataModel.getUserIDs
-
-    while (userIds.hasNext) {
-      val uid = userIds.next
-      val rec = recommender.recommend(uid, numRecommendations)
-      if (rec.size != 0) {
-        val prediction = uid+"\t"+"[" + (rec map {x => x.getItemID +":"+x.getValue }).mkString(",") + "]"
-        outputWriter.write(prediction+"\n")
-      }
-    }
-
-    outputWriter.close()
-
-    args
-  }
-
-  /** finish stage for algo */
-  def finish(args: Map[String, String]): Map[String, String] = {
-
-    val localPredictedPath = args("output") // required
-    val hdfsPredictedPath = args("hdfsOutput")
-
-    // delete the hdfs file if it exists, otherwise copyFromLocal will fail.
-    val deleteHdfsPredictedCmd = s"$hadoopCommand fs -rmr $hdfsPredictedPath"
-    val copyToHdfsPredictedCmd = s"$hadoopCommand fs -copyFromLocal $localPredictedPath $hdfsPredictedPath"
-
-    //logger.info("Executing '%s'...".format(deleteHdfsPredictedCmd))
-    println("Executing '%s'...".format(deleteHdfsPredictedCmd))
-    deleteHdfsPredictedCmd.!
-    
-    //logger.info("Executing '%s'...".format(copyToHdfsPredictedCmd))
-    println("Executing '%s'...".format(copyToHdfsPredictedCmd))
-    if ((copyToHdfsPredictedCmd.!) != 0)
-      throw new RuntimeException("Failed to execute '%s'".format(copyToHdfsPredictedCmd))
-
-    args
-  }
-
-  /** Cleanup stage for algo */
-  def cleanup(args: Map[String, String]) = {
-    val localRatingsPath = args("input") // required
-    val localPredictedPath = args("output") // required
-    
-    val localRatingsFile = new File(localRatingsPath)
-    val localPredictedFile = new File(localPredictedPath)
-
-    //logger.info("Deleting temporary file " + localRatingsFile.getPath)
-    println("Deleting temporary file %s...".format(localRatingsFile.getPath))
-    localRatingsFile.delete()
-    //logger.info("Deleting temporary file " + localPredictedFile.getPath)
-    println("Deleting temporary file %s...".format(localPredictedFile.getPath))
-    localPredictedFile.delete()
-
-    args
-  }
-
-}
diff --git a/process/engines/itemrec/algorithms/scala/mahout/slopeone/src/main/scala/io/prediction/algorithms/mahout/itemrec/slopeone/SlopeOneJob.scala b/process/engines/itemrec/algorithms/scala/mahout/slopeone/src/main/scala/io/prediction/algorithms/mahout/itemrec/slopeone/SlopeOneJob.scala
deleted file mode 100644
index be68c52..0000000
--- a/process/engines/itemrec/algorithms/scala/mahout/slopeone/src/main/scala/io/prediction/algorithms/mahout/itemrec/slopeone/SlopeOneJob.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package io.prediction.algorithms.mahout.itemrec.slopeone
-
-import scala.collection.JavaConversions._
-
-import io.prediction.commons.mahout.itemrec.MahoutJob
-
-import org.apache.mahout.cf.taste.model.DataModel
-import org.apache.mahout.cf.taste.recommender.Recommender
-import org.apache.mahout.cf.taste.common.Weighting
-import org.apache.mahout.cf.taste.impl.recommender.slopeone.SlopeOneRecommender
-import org.apache.mahout.cf.taste.impl.recommender.slopeone.MemoryDiffStorage
-
-class SlopeOneJob extends MahoutJob {
-
-  override def buildRecommender(dataModel: DataModel, args: Map[String, String]): Recommender = {
-
-    // Weighting param:
-    // - No_Weighting:
-    // - Count: 
-    // - Standard_Deviation: Weights preference difference with lower standard deviation more highly.
-    val weightingArg: String = getArgOpt(args, "weighting", "Standard_Deviation") // weighted slope one recommender
-
-    val (weighting, stdDevWeighting): (Weighting, Weighting) = weightingArg match {
-      case "No_Weighting" => (Weighting.UNWEIGHTED, Weighting.UNWEIGHTED)
-      case "Count" => (Weighting.WEIGHTED, Weighting.UNWEIGHTED)
-      case "Standard_Deviation" => (Weighting.WEIGHTED, Weighting.WEIGHTED)
-      case _ => throw new RuntimeException("Invalid weighting parameter: " + weightingArg)
-    }
-
-    val recommender: Recommender = new SlopeOneRecommender(dataModel,
-         weighting, // weighting
-         stdDevWeighting, // stdDevWeighting
-         new MemoryDiffStorage(dataModel, stdDevWeighting, Long.MaxValue)); //maximum number of item-item average preference differences to track internally
-
-    recommender
-  }
-
-}
\ No newline at end of file
diff --git a/process/engines/itemrec/algorithms/scala/mahout/commons/src/main/resources/application.conf b/process/engines/itemrec/algorithms/scala/mahout/src/main/resources/application.conf
similarity index 100%
rename from process/engines/itemrec/algorithms/scala/mahout/commons/src/main/resources/application.conf
rename to process/engines/itemrec/algorithms/scala/mahout/src/main/resources/application.conf
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemrec/algorithms/scala/mahout/src/main/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemrec/algorithms/scala/mahout/src/main/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/MahoutJob.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/MahoutJob.scala
new file mode 100644
index 0000000..0cd5379
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/MahoutJob.scala
@@ -0,0 +1,147 @@
+package io.prediction.algorithms.mahout.itemrec
+
+import grizzled.slf4j.Logger
+import java.io.File
+import java.io.FileWriter
+import java.io.BufferedWriter
+
+import scala.collection.JavaConversions._
+
+import scala.sys.process._
+
+import org.apache.mahout.cf.taste.recommender.Recommender
+import org.apache.mahout.cf.taste.model.DataModel
+import org.apache.mahout.cf.taste.impl.model.file.FileDataModel
+
+/** main function to run non-distributed Mahout Job */
+object MahoutJob {
+
+  val logger = Logger(MahoutJob.getClass)
+
+  def main(args: Array[String]) {
+    if (args.size < 1) {
+      logger.error("Please specify Mahout job class name")
+      logger.error("Example. <job class name> --param1 1 --param2 2")
+      System.exit(1)
+    }
+
+    val jobName = args(0)
+
+    logger.info("Running Job %s...".format(jobName))
+
+    logger.info(args.mkString(" "))
+    val (argMap, lastkey) = args.drop(1).foldLeft((Map[String, String](), "")) { (res, data) =>
+      val (argMap, lastkey) = res
+      val key: Option[String] = if (data.startsWith("--")) Some(data.stripPrefix("--")) else None
+
+      key map { k =>
+        (argMap ++ Map(k -> ""), k)
+      } getOrElse {
+        val orgData = argMap(lastkey)
+        val newData = orgData match {
+          case "" => data
+          case _ => orgData + " " + data
+        }
+        (argMap ++ Map(lastkey -> newData), lastkey)
+      }
+    }
+    //println(argMap)
+
+    val job = Class.forName(jobName).
+      getConstructor().
+      newInstance().
+      asInstanceOf[MahoutJob]
+
+    val runArgs = job.prepare(argMap)
+
+    val finishArgs = job.run(runArgs)
+
+    val cleanupArgs = job.finish(finishArgs)
+
+    job.cleanup(cleanupArgs)
+
+  }
+
+}
+
+/** Wrapper job class for Mahout algo */
+abstract class MahoutJob {
+  /** Get required arg */
+  def getArg(args: Map[String, String], key: String): String = {
+    if (!args.contains(key)) sys.error("Please specify value for parameter --" + key)
+
+    args(key)
+  }
+
+  /** Get optional arg */
+  def getArgOpt(args: Map[String, String], key: String, default: String): String = {
+    if (args.contains(key)) args(key) else default
+  }
+
+  def getArgOpt(args: Map[String, String], key: String): Option[String] = {
+    if (args.contains(key)) Some(args(key)) else None
+  }
+
+  /** Prepare stage for algo */
+  def prepare(args: Map[String, String]): Map[String, String] = {
+    // simply pass the args to next stage
+    args
+  }
+
+  /** create and return Mahout's Recommender object. */
+  def buildRecommender(dataModel: DataModel, args: Map[String, String]): Recommender
+
+  /**
+   * Run algo job.
+   * In default implementation, the prepare() function does nothing
+   * The run() function read and process this local file (defined by --input arg) file and generate the prediction
+   * output file (defined by --output arg) for each user.
+   * Then finish() does nothing
+   */
+  def run(args: Map[String, String]): Map[String, String] = {
+
+    val input = args("input")
+    val output = args("output")
+    val numRecommendations: Int = getArgOpt(args, "numRecommendations", "10").toInt
+
+    val dataModel: DataModel = new FileDataModel(new File(input))
+    val recommender: Recommender = buildRecommender(dataModel, args)
+
+    val outputFile = new File(output)
+    // create dir if it doesn't exist yet.
+    outputFile.getParentFile().mkdirs()
+
+    // generate prediction output file
+    val userRec = dataModel.getUserIDs.toSeq.par
+      .map { uid =>
+        val rec = recommender.recommend(uid, numRecommendations)
+        if (rec.size != 0) {
+          val prediction = uid + "\t" + "[" + (rec map { x => x.getItemID + ":" + x.getValue }).mkString(",") + "]"
+          Some(prediction)
+        } else {
+          None
+        }
+      }
+
+    val outputWriter = new BufferedWriter(new FileWriter(outputFile))
+    userRec.seq.foreach { line =>
+      line.map(v => outputWriter.write(v + "\n"))
+    }
+    outputWriter.close()
+
+    args
+  }
+
+  /** finish stage for algo */
+  def finish(args: Map[String, String]): Map[String, String] = {
+    // simply pass the args to next stage
+    args
+  }
+
+  /** Cleanup stage for algo */
+  def cleanup(args: Map[String, String]) = {
+    // simpley pass the args to next stage
+    args
+  }
+
+}
diff --git a/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/MahoutModelConstructor.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/MahoutModelConstructor.scala
new file mode 100644
index 0000000..1249be2
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/MahoutModelConstructor.scala
@@ -0,0 +1,242 @@
+package io.prediction.algorithms.mahout.itemrec
+
+import grizzled.slf4j.Logger
+import com.twitter.scalding.Args
+import scala.io.Source
+
+import io.prediction.commons.Config
+import io.prediction.commons.modeldata.{ ItemRecScore }
+
+/**
+ * Description:
+ * Model constuctor for non-distributed (single machine) Mahout ItemRec algo
+ *
+ * Input files:
+ * - predicted.tsv (uindex prediction-string) prediction output generated by MahoutJob
+ * - ratings.csv (uindex iindex rating)
+ * - itemsIndex.tsv (iindex iid itypes starttime endtime)
+ * - usersIndex.tsv (uindex uid)
+ *
+ * Required args:
+ * --appid: <int>
+ * --algoid: <int>
+ * --modelSet: <boolean> (true/false). flag to indicate which set
+ *
+ * --unseenOnly: <boolean> (true/false). only recommend unseen items if this is true.
+ * --numRecommendations: <int>. number of recommendations to be generated
+ *
+ * Optionsl args:
+ * --evalid: <int>. Offline Evaluation if evalid is specified
+ * --debug: <String>. "test" - for testing purpose
+ *
+ * --booleanData: <boolean>. Mahout item rec algo flag for implicit action data
+ * --implicitFeedback: <boolean>. Mahout item rec algo flag for implicit action data
+ *
+ */
+object MahoutModelConstructor {
+  /* global */
+  val logger = Logger(MahoutModelConstructor.getClass)
+  val commonsConfig = new Config
+
+  // argument of this job
+  case class JobArg(
+    val inputDir: String,
+    val appid: Int,
+    val algoid: Int,
+    val evalid: Option[Int],
+    val modelSet: Boolean,
+    val unseenOnly: Boolean,
+    val numRecommendations: Int,
+    val booleanData: Boolean,
+    val implicitFeedback: Boolean)
+
+  def main(cmdArgs: Array[String]) {
+    logger.info("Running model constructor for Mahout ...")
+    logger.info(cmdArgs.mkString(","))
+
+    /* get arg */
+    val args = Args(cmdArgs)
+
+    val arg = JobArg(
+      inputDir = args("inputDir"),
+      appid = args("appid").toInt,
+      algoid = args("algoid").toInt,
+      evalid = args.optional("evalid") map (x => x.toInt),
+      modelSet = args("modelSet").toBoolean,
+      unseenOnly = args("unseenOnly").toBoolean,
+      numRecommendations = args("numRecommendations").toInt,
+      booleanData = args.optional("booleanData").map(x => x.toBoolean).getOrElse(false),
+      implicitFeedback = args.optional("implicitFeedback").map(x => x.toBoolean).getOrElse(false)
+    )
+
+    /* run job */
+    modelCon(arg)
+    cleanUp(arg)
+  }
+
+  def modelCon(arg: JobArg) = {
+    // implicit preference flag.
+    val IMPLICIT_PREFERENCE = arg.booleanData || arg.implicitFeedback
+
+    // NOTE: if OFFLINE_EVAL, write to training modeldata and use evalid as appid
+    val OFFLINE_EVAL = (arg.evalid != None)
+
+    val modeldataDb = if (!OFFLINE_EVAL)
+      commonsConfig.getModeldataItemRecScores
+    else
+      commonsConfig.getModeldataTrainingItemRecScores
+
+    val appid = if (OFFLINE_EVAL) arg.evalid.get else arg.appid
+
+    // user index file
+    // uindex -> uid
+    val usersMap: Map[Int, String] = Source.fromFile(s"${arg.inputDir}usersIndex.tsv").getLines()
+      .map[(Int, String)] { line =>
+        val (uindex, uid) = try {
+          val data = line.split("\t")
+          (data(0).toInt, data(1))
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get user index and uid in line: ${line}. ${e}")
+          }
+        }
+        (uindex, uid)
+      }.toMap
+
+    case class ItemData(
+      val iid: String,
+      val itypes: Seq[String])
+
+    // item index file (iindex iid itypes)
+    // iindex -> ItemData
+    val itemsMap: Map[Int, ItemData] = Source.fromFile(s"${arg.inputDir}itemsIndex.tsv")
+      .getLines()
+      .map[(Int, ItemData)] { line =>
+        val (iindex, item) = try {
+          val fields = line.split("\t")
+          val itemData = ItemData(
+            iid = fields(1),
+            itypes = fields(2).split(",").toList
+          )
+          (fields(0).toInt, itemData)
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get item info in line: ${line}. ${e}")
+          }
+        }
+        (iindex, item)
+      }.toMap
+
+    // ratings file (for unseen filtering) 
+    val seenMap: Map[(Int, Int), Double] = if (arg.unseenOnly) {
+      Source.fromFile(s"${arg.inputDir}ratings.csv")
+        .getLines()
+        .map { line =>
+          val (u, i, r) = try {
+            val fields = line.split(",")
+            // u, i, rating
+            (fields(0).toInt, fields(1).toInt, fields(2).toDouble)
+          } catch {
+            case e: Exception => throw new RuntimeException(s"Cannot get user and item index from this line: ${line}. ${e}")
+          }
+          ((u, i) -> r)
+        }.toMap
+    } else {
+      Map()
+    }
+
+    /* TODO: handling merging seen rating
+    // uindx -> Seq[(iindex, rating)]
+    val ratingsMap: Map[Int, Seq[(Int, Double)]] = seenMap.groupBy { case ((u, i), s) => u }
+      .mapValues { v =>
+        // v is Map[()]
+        v.toSeq.map { case ((u, i), s) => (i, s) }
+      }
+    */
+
+    // prediction
+    Source.fromFile(s"${arg.inputDir}predicted.tsv")
+      .getLines()
+      .foreach { line =>
+        val fields = line.split("\t")
+
+        val (uindex, predictedData) = try {
+          (fields(0).toInt, fields(1))
+        } catch {
+          case e: Exception => throw new RuntimeException(s"Cannot extract uindex and prediction output from this line: ${line}. ${e}")
+        }
+
+        val predicted: Seq[(Int, Double)] = parsePredictedData(predictedData)
+          .map { case (iindex, rating) => (iindex.toInt, rating) }
+
+        // TODO: handling merging seen rating
+        val combined = predicted
+        // if unseenOnly (or implicit preference), no merge with known rating
+        /*if (arg.unseenOnly || IMPLICIT_PREFERENCE) predicted
+        else (predicted ++ ratingsMap.getOrElse(uindex, Seq()))*/
+
+        val topScores = combined
+          .filter {
+            case (iindex, rating) =>
+              unseenItemFilter(arg.unseenOnly, uindex, iindex, seenMap) &&
+                validItemFilter(true, iindex, itemsMap)
+          }.sortBy(_._2)(Ordering[Double].reverse)
+          .take(arg.numRecommendations)
+
+        logger.debug(s"$topScores")
+
+        val uid = try {
+          usersMap(uindex)
+        } catch {
+          case e: Exception => throw new RuntimeException(s"Cannot get uid for this uindex: ${line}. ${e}")
+        }
+        modeldataDb.insert(ItemRecScore(
+          uid = usersMap(uindex),
+          iids = topScores.map(x => itemsMap(x._1).iid),
+          scores = topScores.map(_._2),
+          itypes = topScores.map(x => itemsMap(x._1).itypes),
+          appid = appid,
+          algoid = arg.algoid,
+          modelset = arg.modelSet))
+
+      }
+  }
+
+  def cleanUp(arg: JobArg) = {
+
+  }
+
+  /* TODO refactor this
+  Mahout ItemRec output format
+  [24:3.2] => (24, 3.2)
+  [8:2.5,0:2.5]  => (8, 2.5), (0, 2.5)
+  [0:2.0]
+  [16:3.0]
+  */
+  def parsePredictedData(data: String): List[(String, Double)] = {
+    val dataLen = data.length
+    data.take(dataLen - 1).tail.split(",").toList.map { ratingData =>
+      val ratingDataArray = ratingData.split(":")
+      val item = ratingDataArray(0)
+      val rating: Double = try {
+        ratingDataArray(1).toDouble
+      } catch {
+        case e: Exception =>
+          {
+            assert(false, s"Cannot convert rating value of item ${item} to double: " + ratingDataArray + ". Exception: " + e)
+          }
+          0.0
+      }
+      (item, rating)
+    }
+  }
+
+  def unseenItemFilter(enable: Boolean, uindex: Int, iindex: Int, seenMap: Map[(Int, Int), Any]): Boolean = {
+    if (enable) (!seenMap.contains((uindex, iindex))) else true
+  }
+
+  def validItemFilter(enable: Boolean, iindex: Int, validMap: Map[Int, Any]): Boolean = {
+    if (enable) validMap.contains(iindex) else true
+  }
+
+}
\ No newline at end of file
diff --git a/process/engines/itemrec/algorithms/scala/mahout/alswr/src/main/scala/io/prediction/algorithms/mahout/itemrec/alswr/ALSWRJob.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/alswr/ALSWRJob.scala
similarity index 94%
rename from process/engines/itemrec/algorithms/scala/mahout/alswr/src/main/scala/io/prediction/algorithms/mahout/itemrec/alswr/ALSWRJob.scala
rename to process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/alswr/ALSWRJob.scala
index b4ed9fc..db1aa8e 100644
--- a/process/engines/itemrec/algorithms/scala/mahout/alswr/src/main/scala/io/prediction/algorithms/mahout/itemrec/alswr/ALSWRJob.scala
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/alswr/ALSWRJob.scala
@@ -2,7 +2,7 @@
 
 import scala.collection.JavaConversions._
 
-import io.prediction.commons.mahout.itemrec.MahoutJob
+import io.prediction.algorithms.mahout.itemrec.MahoutJob
 
 import org.apache.mahout.cf.taste.model.DataModel
 import org.apache.mahout.cf.taste.recommender.Recommender
diff --git a/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnitembased/KNNItemBasedJob.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnitembased/KNNItemBasedJob.scala
new file mode 100644
index 0000000..77a38a8
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnitembased/KNNItemBasedJob.scala
@@ -0,0 +1,98 @@
+package io.prediction.algorithms.mahout.itemrec.knnitembased
+
+import io.prediction.algorithms.mahout.itemrec.MahoutJob
+
+import org.apache.mahout.cf.taste.model.DataModel
+import org.apache.mahout.cf.taste.common.Weighting
+import org.apache.mahout.cf.taste.recommender.Recommender
+import org.apache.mahout.cf.taste.similarity.ItemSimilarity
+import org.apache.mahout.cf.taste.impl.recommender.{ GenericItemBasedRecommender, GenericBooleanPrefItemBasedRecommender }
+import org.apache.mahout.cf.taste.impl.similarity.{
+  CityBlockSimilarity,
+  EuclideanDistanceSimilarity,
+  LogLikelihoodSimilarity,
+  PearsonCorrelationSimilarity,
+  TanimotoCoefficientSimilarity,
+  UncenteredCosineSimilarity
+}
+import org.apache.mahout.cf.taste.impl.similarity.file.FileItemSimilarity
+import org.apache.mahout.cf.taste.similarity.precompute.BatchItemSimilarities
+import org.apache.mahout.cf.taste.impl.similarity.precompute.MultithreadedBatchItemSimilarities
+import org.apache.mahout.cf.taste.impl.similarity.precompute.FileSimilarItemsWriter
+import org.apache.mahout.cf.taste.similarity.precompute.SimilarItems
+
+import scala.collection.JavaConversions._
+
+import java.io.File
+
+class KNNItemBasedJob extends MahoutJob {
+
+  val defaultItemSimilarity = "LogLikelihoodSimilarity"
+
+  override def buildRecommender(dataModel: DataModel, args: Map[String, String]): Recommender = {
+
+    val booleanData: Boolean = getArgOpt(args, "booleanData", "false").toBoolean
+    val itemSimilarity: String = getArgOpt(args, "itemSimilarity", defaultItemSimilarity)
+    val weighted: Boolean = getArgOpt(args, "weighted", "false").toBoolean
+    val threshold: Double = getArgOpt(args, "threshold").map(_.toDouble).getOrElse(Double.MinPositiveValue)
+    val nearestN: Int = getArgOpt(args, "nearestN", "10").toInt
+    val outputSim: String = getArg(args, "outputSim")
+
+    val preComputeItemSim: Boolean = getArgOpt(args, "preComputeItemSim", "true").toBoolean
+    val similarItemsPerItem: Int = getArgOpt(args, "similarItemsPerItem", "100").toInt // number of similar items per item in pre-computation
+    // MultithreadedBatchItemSimilarities parameter
+    /*
+    val batchSize: Int = getArgOpt(args, "batchSize", "500").toInt
+    val degreeOfParallelism: Int = getArgOpt(args, "parallelism", "8").toInt
+    val maxDurationInHours: Int = getArgOpt(args, "maxHours", "6").toInt
+    */
+
+    val weightedParam: Weighting = if (weighted) Weighting.WEIGHTED else Weighting.UNWEIGHTED
+
+    val similarity: ItemSimilarity = itemSimilarity match {
+      case "CityBlockSimilarity" => new CityBlockSimilarity(dataModel)
+      case "EuclideanDistanceSimilarity" => new EuclideanDistanceSimilarity(dataModel, weightedParam)
+      case "LogLikelihoodSimilarity" => new LogLikelihoodSimilarity(dataModel)
+      case "PearsonCorrelationSimilarity" => new PearsonCorrelationSimilarity(dataModel, weightedParam)
+      case "TanimotoCoefficientSimilarity" => new TanimotoCoefficientSimilarity(dataModel)
+      case "UncenteredCosineSimilarity" => new UncenteredCosineSimilarity(dataModel, weightedParam)
+      case _ => throw new RuntimeException("Invalid ItemSimilarity: " + itemSimilarity)
+    }
+
+    if (preComputeItemSim) {
+      val outputSimFile = new File(outputSim)
+      outputSimFile.getParentFile().mkdirs()
+      // delete old file
+      if (outputSimFile.exists()) outputSimFile.delete()
+
+      /*
+      val genericRecommeder = new GenericItemBasedRecommender(dataModel, similarity)
+      val batch: BatchItemSimilarities = new MultithreadedBatchItemSimilarities(genericRecommeder, similarItemsPerItem, batchSize)
+      batch.computeItemSimilarities(degreeOfParallelism, maxDurationInHours, new FileSimilarItemsWriter(outputSimFile))
+      */
+
+      val genericRecommeder = new GenericItemBasedRecommender(dataModel, similarity)
+      val itemIds = dataModel.getItemIDs.toSeq
+
+      val simPar = itemIds.par.map { itemId =>
+        new SimilarItems(itemId, genericRecommeder.mostSimilarItems(itemId, similarItemsPerItem))
+      }
+
+      val writer = new FileSimilarItemsWriter(outputSimFile)
+      writer.open()
+      simPar.seq.foreach { s: SimilarItems =>
+        writer.add(s)
+      }
+      writer.close()
+    }
+
+    val recSimilarity = if (preComputeItemSim) {
+      new FileItemSimilarity(new File(outputSim))
+    } else similarity
+
+    val recommender: Recommender = new KNNItemBasedRecommender(dataModel, recSimilarity, booleanData, nearestN, threshold)
+
+    recommender
+  }
+
+}
\ No newline at end of file
diff --git a/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnitembased/KNNItemBasedRecommender.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnitembased/KNNItemBasedRecommender.scala
new file mode 100644
index 0000000..f087dff
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnitembased/KNNItemBasedRecommender.scala
@@ -0,0 +1,100 @@
+package io.prediction.algorithms.mahout.itemrec.knnitembased
+
+import org.apache.mahout.cf.taste.common.TasteException
+import org.apache.mahout.cf.taste.impl.recommender.GenericItemBasedRecommender
+import org.apache.mahout.cf.taste.impl.recommender.AbstractRecommender
+import org.apache.mahout.cf.taste.model.DataModel
+import org.apache.mahout.cf.taste.model.PreferenceArray;
+import org.apache.mahout.cf.taste.similarity.ItemSimilarity
+import org.apache.mahout.cf.taste.recommender.CandidateItemsStrategy
+import org.apache.mahout.cf.taste.recommender.MostSimilarItemsCandidateItemsStrategy
+import org.apache.mahout.cf.taste.impl.recommender.EstimatedPreferenceCapper
+
+import scala.collection.mutable.PriorityQueue
+import scala.collection.JavaConversions._
+
+/* Extension to Mahout's GenericItemBasedRecommender
+ * with the additional settings: booleanData, neighbourSize, threshold.
+ */
+class KNNItemBasedRecommender(dataModel: DataModel,
+  similarity: ItemSimilarity,
+  candidateItemsStrategy: CandidateItemsStrategy,
+  mostSimilarItemsCandidateItemsStrategy: MostSimilarItemsCandidateItemsStrategy,
+  booleanData: Boolean,
+  neighbourSize: Int,
+  threshold: Double) extends GenericItemBasedRecommender(dataModel, similarity, candidateItemsStrategy,
+  mostSimilarItemsCandidateItemsStrategy) {
+
+  val capper: Option[EstimatedPreferenceCapper] = if (getDataModel().getMinPreference().isNaN ||
+    getDataModel().getMaxPreference().isNaN)
+    None
+  else
+    Some(new EstimatedPreferenceCapper(getDataModel()))
+
+  def this(dataModel: DataModel, similarity: ItemSimilarity, booleanData: Boolean, neighbourSize: Int, threshold: Double) =
+    this(dataModel, similarity, AbstractRecommender.getDefaultCandidateItemsStrategy(),
+      GenericItemBasedRecommender.getDefaultMostSimilarItemsCandidateItemsStrategy(), booleanData, neighbourSize, threshold)
+
+  @throws(classOf[TasteException])
+  override def doEstimatePreference(userID: Long, preferencesFromUser: PreferenceArray, itemID: Long): Float = {
+    val ratedIds = preferencesFromUser.getIDs()
+      .zipWithIndex // need index for accessing preferencesFromUser later
+      .map { case (id, index) => (id, similarity.itemSimilarity(itemID, id), index) } // (id, simiarity, index)
+      .filter { case (id, sim, index) => (!sim.isNaN()) && (sim >= threshold) }
+
+    val neighbourRatedIds = getTopN(ratedIds, neighbourSize)(RatedIdOdering.reverse)
+
+    val estimatedPreference: Float = if (booleanData) {
+      val totalSimilarity = neighbourRatedIds.foldLeft[Double](0) { (acc, x) =>
+        val (id, sim, index) = x
+        acc + sim
+      }
+      totalSimilarity.toFloat
+    } else {
+      val (totalPreference, totalSimilarity) = neighbourRatedIds.foldLeft[(Double, Double)]((0, 0)) { (acc, x) =>
+        val (accPreference, accSimilarity) = acc
+        val (id, sim, index) = x
+
+        val totalPreference = accPreference + (sim * preferencesFromUser.getValue(index))
+        val totalSimilarity = accSimilarity + sim
+        (totalPreference, totalSimilarity)
+      }
+      // if there is only 1 similar item, the estimate preference will be same as the preferewnce of that item
+      // regardless of similarity. so don't count it and return NaN instead.
+      if (neighbourRatedIds.size <= 1) {
+        Float.NaN
+      } else {
+        val estimate = (totalPreference / totalSimilarity).toFloat
+        val cappedEstimate = capper.map(c => c.capEstimate(estimate)).getOrElse(estimate)
+        cappedEstimate
+      }
+    }
+    estimatedPreference
+  }
+
+  object RatedIdOdering extends Ordering[(Long, Double, Int)] {
+    override def compare(a: (Long, Double, Int), b: (Long, Double, Int)) = a._2 compare b._2
+  }
+
+  def getTopN[T](s: Seq[T], n: Int)(implicit ord: Ordering[T]): Seq[T] = {
+    val q = PriorityQueue()
+
+    for (x <- s) {
+      if (q.size < n)
+        q.enqueue(x)
+      else {
+        // q is full
+        if (ord.compare(x, q.head) < 0) {
+          q.dequeue()
+          q.enqueue(x)
+        }
+      }
+    }
+
+    q.dequeueAll.toSeq.reverse
+  }
+
+  override def toString() = {
+    "KNNItemBasedRecommender"
+  }
+}
diff --git a/process/engines/itemrec/algorithms/scala/mahout/knnuserbased/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnuserbased/KNNUserBasedJob.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnuserbased/KNNUserBasedJob.scala
similarity index 83%
rename from process/engines/itemrec/algorithms/scala/mahout/knnuserbased/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnuserbased/KNNUserBasedJob.scala
rename to process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnuserbased/KNNUserBasedJob.scala
index d2b1692..ce707f7 100644
--- a/process/engines/itemrec/algorithms/scala/mahout/knnuserbased/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnuserbased/KNNUserBasedJob.scala
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/knnuserbased/KNNUserBasedJob.scala
@@ -2,7 +2,7 @@
 
 import scala.collection.JavaConversions._
 
-import io.prediction.commons.mahout.itemrec.MahoutJob
+import io.prediction.algorithms.mahout.itemrec.MahoutJob
 
 import org.apache.mahout.cf.taste.model.DataModel
 import org.apache.mahout.cf.taste.recommender.Recommender
@@ -10,15 +10,22 @@
 import org.apache.mahout.cf.taste.neighborhood.UserNeighborhood
 import org.apache.mahout.cf.taste.similarity.UserSimilarity
 import org.apache.mahout.cf.taste.common.Weighting
-import org.apache.mahout.cf.taste.impl.recommender.{GenericUserBasedRecommender, GenericBooleanPrefUserBasedRecommender}
+import org.apache.mahout.cf.taste.impl.recommender.{ GenericUserBasedRecommender, GenericBooleanPrefUserBasedRecommender }
 import org.apache.mahout.cf.taste.impl.neighborhood.NearestNUserNeighborhood
-import org.apache.mahout.cf.taste.impl.similarity.{CityBlockSimilarity, EuclideanDistanceSimilarity, LogLikelihoodSimilarity, 
-  PearsonCorrelationSimilarity, SpearmanCorrelationSimilarity, TanimotoCoefficientSimilarity, UncenteredCosineSimilarity}
+import org.apache.mahout.cf.taste.impl.similarity.{
+  CityBlockSimilarity,
+  EuclideanDistanceSimilarity,
+  LogLikelihoodSimilarity,
+  PearsonCorrelationSimilarity,
+  SpearmanCorrelationSimilarity,
+  TanimotoCoefficientSimilarity,
+  UncenteredCosineSimilarity
+}
 
 class KNNUserBasedJob extends MahoutJob {
-  
+
   val userSimilarityValues = Seq(
-    "CityBlockSimilarity", 
+    "CityBlockSimilarity",
     "EuclideanDistanceSimilarity",
     "LogLikelihoodSimilarity",
     "PearsonCorrelationSimilarity",
@@ -29,14 +36,14 @@
   val defaultUserSimilarity = "PearsonCorrelationSimilarity"
 
   override def buildRecommender(dataModel: DataModel, args: Map[String, String]): Recommender = {
-    
+
     val booleanData: Boolean = getArgOpt(args, "booleanData", "false").toBoolean
     val nearestN: Int = getArgOpt(args, "nearestN", "10").toInt
     val userSimilarity: String = getArgOpt(args, "userSimilarity", defaultUserSimilarity)
     val weighted: Boolean = getArgOpt(args, "weighted", "false").toBoolean
-    val minSimilarity: Double = getArgOpt(args, "minSimilarity").map( _.toDouble).getOrElse(Double.NegativeInfinity)
+    val minSimilarity: Double = getArgOpt(args, "minSimilarity").map(_.toDouble).getOrElse(Double.NegativeInfinity)
     val samplingRate: Double = getArgOpt(args, "samplingRate", "1.0").toDouble
-    
+
     val weightedParam: Weighting = if (weighted) Weighting.WEIGHTED else Weighting.UNWEIGHTED
 
     val similarity: UserSimilarity = userSimilarity match {
diff --git a/process/engines/itemrec/algorithms/scala/mahout/svdplusplus/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdplusplus/SVDPlusPlus.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdplusplus/SVDPlusPlus.scala
similarity index 95%
rename from process/engines/itemrec/algorithms/scala/mahout/svdplusplus/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdplusplus/SVDPlusPlus.scala
rename to process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdplusplus/SVDPlusPlus.scala
index d43b156..5b2a85e 100644
--- a/process/engines/itemrec/algorithms/scala/mahout/svdplusplus/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdplusplus/SVDPlusPlus.scala
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdplusplus/SVDPlusPlus.scala
@@ -2,7 +2,7 @@
 
 import scala.collection.JavaConversions._
 
-import io.prediction.commons.mahout.itemrec.MahoutJob
+import io.prediction.algorithms.mahout.itemrec.MahoutJob
 
 import org.apache.mahout.cf.taste.model.DataModel
 import org.apache.mahout.cf.taste.recommender.Recommender
@@ -23,7 +23,7 @@
 
     val factorizer: Factorizer = new SVDPlusPlusFactorizer(dataModel, numFeatures, learningRate, preventOverfitting,
       randomNoise, numIterations, learningRateDecay)
-    
+
     val recommender: Recommender = new SVDRecommender(dataModel, factorizer)
 
     recommender
diff --git a/process/engines/itemrec/algorithms/scala/mahout/svdsgd/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdsgd/SVDSGDJob.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdsgd/SVDSGDJob.scala
similarity index 95%
rename from process/engines/itemrec/algorithms/scala/mahout/svdsgd/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdsgd/SVDSGDJob.scala
rename to process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdsgd/SVDSGDJob.scala
index 792d6b4..aa9c3b0 100644
--- a/process/engines/itemrec/algorithms/scala/mahout/svdsgd/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdsgd/SVDSGDJob.scala
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/svdsgd/SVDSGDJob.scala
@@ -2,7 +2,7 @@
 
 import scala.collection.JavaConversions._
 
-import io.prediction.commons.mahout.itemrec.MahoutJob
+import io.prediction.algorithms.mahout.itemrec.MahoutJob
 
 import org.apache.mahout.cf.taste.model.DataModel
 import org.apache.mahout.cf.taste.recommender.Recommender
@@ -23,7 +23,7 @@
 
     val factorizer: Factorizer = new RatingSGDFactorizer(dataModel, numFeatures, learningRate, preventOverfitting,
       randomNoise, numIterations, learningRateDecay)
-    
+
     val recommender: Recommender = new SVDRecommender(dataModel, factorizer)
 
     recommender
diff --git a/process/engines/itemrec/algorithms/scala/mahout/thresholduserbased/src/main/scala/io/prediction/algorithms/mahout/itemrec/thresholduserbased/ThresholdUserBasedJob.scala b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/thresholduserbased/ThresholdUserBasedJob.scala
similarity index 81%
rename from process/engines/itemrec/algorithms/scala/mahout/thresholduserbased/src/main/scala/io/prediction/algorithms/mahout/itemrec/thresholduserbased/ThresholdUserBasedJob.scala
rename to process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/thresholduserbased/ThresholdUserBasedJob.scala
index f3e38c9..28913f9 100644
--- a/process/engines/itemrec/algorithms/scala/mahout/thresholduserbased/src/main/scala/io/prediction/algorithms/mahout/itemrec/thresholduserbased/ThresholdUserBasedJob.scala
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemrec/thresholduserbased/ThresholdUserBasedJob.scala
@@ -2,7 +2,7 @@
 
 import scala.collection.JavaConversions._
 
-import io.prediction.commons.mahout.itemrec.MahoutJob
+import io.prediction.algorithms.mahout.itemrec.MahoutJob
 
 import org.apache.mahout.cf.taste.model.DataModel
 import org.apache.mahout.cf.taste.recommender.Recommender
@@ -10,15 +10,22 @@
 import org.apache.mahout.cf.taste.neighborhood.UserNeighborhood
 import org.apache.mahout.cf.taste.similarity.UserSimilarity
 import org.apache.mahout.cf.taste.common.Weighting
-import org.apache.mahout.cf.taste.impl.recommender.{GenericUserBasedRecommender, GenericBooleanPrefUserBasedRecommender}
+import org.apache.mahout.cf.taste.impl.recommender.{ GenericUserBasedRecommender, GenericBooleanPrefUserBasedRecommender }
 import org.apache.mahout.cf.taste.impl.neighborhood.ThresholdUserNeighborhood
-import org.apache.mahout.cf.taste.impl.similarity.{CityBlockSimilarity, EuclideanDistanceSimilarity, LogLikelihoodSimilarity, 
-  PearsonCorrelationSimilarity, SpearmanCorrelationSimilarity, TanimotoCoefficientSimilarity, UncenteredCosineSimilarity}
+import org.apache.mahout.cf.taste.impl.similarity.{
+  CityBlockSimilarity,
+  EuclideanDistanceSimilarity,
+  LogLikelihoodSimilarity,
+  PearsonCorrelationSimilarity,
+  SpearmanCorrelationSimilarity,
+  TanimotoCoefficientSimilarity,
+  UncenteredCosineSimilarity
+}
 
 class ThresholdUserBasedJob extends MahoutJob {
-  
+
   val userSimilarityValues = Seq(
-    "CityBlockSimilarity", 
+    "CityBlockSimilarity",
     "EuclideanDistanceSimilarity",
     "LogLikelihoodSimilarity",
     "PearsonCorrelationSimilarity",
@@ -33,9 +40,9 @@
     val booleanData: Boolean = getArgOpt(args, "booleanData", "false").toBoolean
     val userSimilarity: String = getArgOpt(args, "userSimilarity", defaultUserSimilarity)
     val weighted: Boolean = getArgOpt(args, "weighted", "false").toBoolean
-    val threshold: Double = getArgOpt(args, "threshold").map( _.toDouble).getOrElse(Double.MinPositiveValue)
+    val threshold: Double = getArgOpt(args, "threshold").map(_.toDouble).getOrElse(Double.MinPositiveValue)
     val samplingRate: Double = getArgOpt(args, "samplingRate", "1.0").toDouble
-      
+
     val weightedParam: Weighting = if (weighted) Weighting.WEIGHTED else Weighting.UNWEIGHTED
 
     val similarity: UserSimilarity = userSimilarity match {
@@ -59,5 +66,5 @@
 
     recommender
   }
-  
+
 }
diff --git a/process/engines/itemrec/algorithms/scala/mahout/src/test/resources/application.conf b/process/engines/itemrec/algorithms/scala/mahout/src/test/resources/application.conf
new file mode 100644
index 0000000..28ac0ee
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/test/resources/application.conf
@@ -0,0 +1,37 @@
+# Used by PredictionIO Commons
+io.prediction.base=.
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+io.prediction.commons.appdata.db.name=predictionio_appdata_mahout_dataprep_test
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+io.prediction.commons.appdata.test.db.name=predictionio_test_appdata_mahout_dataprep_test
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+io.prediction.commons.appdata.training.db.name=predictionio_trainig_appdata_mahout_dataprep_test
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+io.prediction.commons.appdata.validation.db.name=predictionio_validation_appdata_mahout_dataprep_test
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+io.prediction.commons.modeldata.db.name=predictionio_modeldata_mahout_dataprep_test
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+io.prediction.commons.modeldata.training.db.name=predictionio_training_modeldata_mahout_dataprep_test
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
+io.prediction.commons.settings.db.name=predictionio_mahout_dataprep_test
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemrec/algorithms/scala/mahout/src/test/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemrec/algorithms/scala/mahout/src/test/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/test/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemrec/algorithms/scala/mahout/src/test/scala/io/prediction/algorithms/mahout/itemrec/MahoutModelConstructorSpec.scala b/process/engines/itemrec/algorithms/scala/mahout/src/test/scala/io/prediction/algorithms/mahout/itemrec/MahoutModelConstructorSpec.scala
new file mode 100644
index 0000000..d3dc6ee
--- /dev/null
+++ b/process/engines/itemrec/algorithms/scala/mahout/src/test/scala/io/prediction/algorithms/mahout/itemrec/MahoutModelConstructorSpec.scala
@@ -0,0 +1,426 @@
+package io.prediction.algorithms.mahout.itemrec
+
+import io.prediction.commons.Config
+import io.prediction.commons.settings.{ App, Algo }
+import io.prediction.commons.modeldata.{ ItemRecScore }
+
+import org.specs2.mutable._
+import com.github.nscala_time.time.Imports._
+import scala.io.Source
+import java.io.File
+import java.io.FileWriter
+import java.io.BufferedWriter
+
+import com.mongodb.casbah.Imports._
+
+class MahoutModelConstructorSpec extends Specification {
+
+  // note: should match the db name defined in the application.conf
+  val mongoDbName = "predictionio_modeldata_mahout_dataprep_test"
+  def cleanUp() = {
+    // remove the test database
+    MongoConnection()(mongoDbName).dropDatabase()
+  }
+
+  val commonConfig = new Config
+  val modeldataItemRecScores = commonConfig.getModeldataItemRecScores
+
+  // NOTE: use HALF_UP mode to avoid error caused by rounding when compare data
+  // (eg. 3.5 vs 3.499999999999).
+  // (eg. 0.6666666666 vs 0.666666667)
+  def roundUpScores(irec: ItemRecScore): ItemRecScore = {
+    irec.copy(
+      scores = irec.scores.map { x =>
+        BigDecimal(x).setScale(9, BigDecimal.RoundingMode.HALF_UP).toDouble
+      }
+    )
+  }
+
+  def argMapToArray(args: Map[String, Any]): Array[String] = {
+    args.toArray.flatMap {
+      case (k, v) =>
+        Array(s"--${k}", v.toString)
+    }
+  }
+
+  "MahoutModelConstructor" should {
+
+    val inputDir = "/tmp/pio_test/"
+
+    val inputDirFile = new File(inputDir)
+    inputDirFile.mkdirs()
+
+    val usersIndex = List(
+      "1\tu0",
+      "2\tu1",
+      "3\tu2")
+
+    val itemsIndex = List(
+      "1\ti0\tt1,t2",
+      "2\ti1\tt1",
+      "3\ti2\tt2,t3",
+      "4\ti3\tt3",
+      "5\ti4\tt2,t3",
+      "6\ti5\tt1,t2"
+    )
+
+    val ratingsCSV = List(
+      "1,1,3",
+      "1,2,4",
+      "1,3,1",
+      "2,1,2",
+      "2,2,1",
+      "3,2,5",
+      "3,4,4"
+    )
+
+    val predicted = List(
+      "1\t[4:0.6123,5:31.432,6:11.3,1:2.3]",
+      "2\t[3:1.2,4:11.4,5:3.0,6:2.55]",
+      "3\t[1:4.5,3:22.5,2:3.3,5:2.2]")
+
+    def writeToFile(lines: List[String], filePath: String) = {
+      val writer = new BufferedWriter(new FileWriter(new File(filePath)))
+      lines.foreach { line =>
+        writer.write(s"${line}\n")
+      }
+      writer.close()
+    }
+
+    writeToFile(usersIndex, s"${inputDir}usersIndex.tsv")
+    writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+    writeToFile(ratingsCSV, s"${inputDir}ratings.csv")
+    writeToFile(predicted, s"${inputDir}predicted.tsv")
+
+    val appid = 24
+
+    implicit val app = App(
+      id = appid,
+      userid = 0,
+      appkey = "1234",
+      display = "12345",
+      url = None,
+      cat = None,
+      desc = None,
+      timezone = "UTC"
+    )
+
+    "correctly writes ItemRecScores with larger numRecommendations" in {
+
+      val algoid = 25
+      val modelSet = false
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> false,
+        "numRecommendations" -> 5,
+        "booleanData" -> false,
+        "implicitFeedback" -> false
+      )
+
+      val u0Expected = ItemRecScore(
+        uid = "u0",
+        iids = Seq("i4", "i5", "i0", "i3"),
+        scores = Seq(31.432, 11.3, 2.3, 0.6123),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2"), Seq("t1", "t2"), Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i3", "i4", "i5", "i2"),
+        scores = Seq(11.4, 3.0, 2.55, 1.2),
+        itypes = Seq(Seq("t3"), Seq("t2", "t3"), Seq("t1", "t2"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i2", "i0", "i1", "i4"),
+        scores = Seq(22.5, 4.5, 3.3, 2.2),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2"), Seq("t1"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      MahoutModelConstructor.main(argMapToArray(args))
+
+      val u0ItemRec = modeldataItemRecScores.getByUid("u0")
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+
+      // don't check id
+      u0ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u0Expected)) and
+        (u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected))) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected)))
+
+    }
+
+    "correctly writes ItemRecScores with smaller numRecommendations" in {
+
+      val algoid = 26
+      val modelSet = true
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> false,
+        "numRecommendations" -> 2,
+        "booleanData" -> false,
+        "implicitFeedback" -> false
+      )
+
+      val u0Expected = ItemRecScore(
+        uid = "u0",
+        iids = Seq("i4", "i5"),
+        scores = Seq(31.432, 11.3),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i3", "i4"),
+        scores = Seq(11.4, 3.0),
+        itypes = Seq(Seq("t3"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i2", "i0"),
+        scores = Seq(22.5, 4.5),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      MahoutModelConstructor.main(argMapToArray(args))
+
+      val u0ItemRec = modeldataItemRecScores.getByUid("u0")
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+
+      // don't check id
+      u0ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u0Expected)) and
+        (u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected))) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected)))
+
+    }
+
+    "correctly writes ItemRecScores with subset itemsIndex.tsv" in {
+
+      val inputDir = "/tmp/pio_test/subset/"
+
+      val inputDirFile = new File(inputDir)
+      inputDirFile.mkdirs()
+
+      val itemsIndex = List(
+        "1\ti0\tt1,t2",
+        "3\ti2\tt2,t3",
+        "4\ti3\tt3"
+      )
+
+      writeToFile(usersIndex, s"${inputDir}usersIndex.tsv")
+      writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+      writeToFile(ratingsCSV, s"${inputDir}ratings.csv")
+      writeToFile(predicted, s"${inputDir}predicted.tsv")
+
+      val algoid = 27
+      val modelSet = false
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> false,
+        "numRecommendations" -> 5,
+        "booleanData" -> false,
+        "implicitFeedback" -> false
+      )
+
+      val u0Expected = ItemRecScore(
+        uid = "u0",
+        iids = Seq("i0", "i3"),
+        scores = Seq(2.3, 0.6123),
+        itypes = Seq(Seq("t1", "t2"), Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i3", "i2"),
+        scores = Seq(11.4, 1.2),
+        itypes = Seq(Seq("t3"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i2", "i0"),
+        scores = Seq(22.5, 4.5),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      MahoutModelConstructor.main(argMapToArray(args))
+
+      val u0ItemRec = modeldataItemRecScores.getByUid("u0")
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+
+      // don't check id
+      u0ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u0Expected)) and
+        (u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected))) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected)))
+
+    }
+
+    "correctly writes ItemRecScores with unseenOnly=true" in {
+
+      val algoid = 28
+      val modelSet = true
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "unseenOnly" -> true,
+        "numRecommendations" -> 4,
+        "booleanData" -> false,
+        "implicitFeedback" -> false
+      )
+
+      val u0Expected = ItemRecScore(
+        uid = "u0",
+        iids = Seq("i4", "i5", "i3"),
+        scores = Seq(31.432, 11.3, 0.6123),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2"), Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u1Expected = ItemRecScore(
+        uid = "u1",
+        iids = Seq("i3", "i4", "i5", "i2"),
+        scores = Seq(11.4, 3.0, 2.55, 1.2),
+        itypes = Seq(Seq("t3"), Seq("t2", "t3"), Seq("t1", "t2"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val u2Expected = ItemRecScore(
+        uid = "u2",
+        iids = Seq("i2", "i0", "i4"),
+        scores = Seq(22.5, 4.5, 2.2),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      MahoutModelConstructor.main(argMapToArray(args))
+
+      val u0ItemRec = modeldataItemRecScores.getByUid("u0")
+      val u1ItemRec = modeldataItemRecScores.getByUid("u1")
+      val u2ItemRec = modeldataItemRecScores.getByUid("u2")
+
+      // don't check id
+      u0ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u0Expected)) and
+        (u1ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u1Expected))) and
+        (u2ItemRec.map(roundUpScores(_).copy(id = None)) must beSome(roundUpScores(u2Expected)))
+
+    }
+
+  }
+
+  // TODO: test evalid != None
+
+  // clean up when finish test
+  step(cleanUp())
+}
diff --git a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtK.scala b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtK.scala
index 8c6e105..2c1300a 100644
--- a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtK.scala
+++ b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtK.scala
@@ -10,23 +10,23 @@
 
 /**
  * Source:
- *   relevantItems.tsv eg  u0   i0,i1,i2 
+ *   relevantItems.tsv eg  u0   i0,i1,i2
  *   topKItems.tsv  eg.  u0  i1,i4,i5
- *   
+ *
  * Sink:
  *   offlineEvalResults DB
  *   averagePrecision.tsv  eg.  u0  0.03
- *   
- *   
+ *
+ *
  * Description:
  *   Calculate Mean Average Precision @ k score
- *   
+ *
  * Required args:
  * --dbType: <string> The OfflineEvalResults DB Type (eg. mongodb) (see --dbHost, --dbPort)
  * --dbName: <string>
- * 
+ *
  * --hdfsRoot: <string>. Root directory of the HDFS
- * 
+ *
  * --appid: <int>
  * --engineid: <int>
  * --evalid: <int>
@@ -36,18 +36,18 @@
  * --splitset: <string>
  *
  * --kParam: <int>
- * 
+ *
  * Optional args:
  * --dbHost: <string> (eg. "127.0.0.1")
  * --dbPort: <int> (eg. 27017)
- * 
+ *
  * --debug: <String>. "test" - for testing purpose
- * 
+ *
  * Example:
  * scald.rb --hdfs-local io.prediction.metrics.scalding.itemrec.map.MAPAtK --dbType mongodb --dbName predictionio --dbHost 127.0.0.1 --dbPort 27017 --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 3 --evalid 15 --metricid 10 --algoid 9 --kParam 30
  */
-class MAPAtK(args: Args) extends Job(args) { 
-  
+class MAPAtK(args: Args) extends Job(args) {
+
   /**
    * parse args
    */
@@ -55,9 +55,9 @@
   val dbNameArg = args("dbName")
   val dbHostArg = args.optional("dbHost")
   val dbPortArg = args.optional("dbPort") map (x => x.toInt)
-  
+
   val hdfsRootArg = args("hdfsRoot")
-  
+
   val appidArg = args("appid").toInt
   val engineidArg = args("engineid").toInt
   val evalidArg = args("evalid").toInt
@@ -68,10 +68,9 @@
   val splitsetArg = args.getOrElse("splitset", "")
 
   val kParamArg = args("kParam").toInt
-  
+
   val debugArg = args.list("debug")
   val DEBUG_TEST = debugArg.contains("test") // test mode
-  
 
   /**
    * get Sources
@@ -79,10 +78,10 @@
   val relevantItems = Tsv(OfflineMetricFile(hdfsRootArg, appidArg, engineidArg, evalidArg, metricidArg, algoidArg, "relevantItems.tsv")).read
     .mapTo((0, 1) -> ('uidTest, 'relevantList)) { fields: (String, String) =>
       val (uidTest, relevantList) = fields
-      
-      (uidTest, relevantList.split(",").toList)  
+
+      (uidTest, relevantList.split(",").toList)
     }
-  
+
   val topKItems = Tsv(OfflineMetricFile(hdfsRootArg, appidArg, engineidArg, evalidArg, metricidArg, algoidArg, "topKItems.tsv")).read
     .mapTo((0, 1) -> ('uid, 'topList)) { fields: (String, String) =>
       val (uid, topList) = fields
@@ -92,25 +91,24 @@
   /**
    * sink
    */
-  
+
   val averagePrecisionSink = Tsv(OfflineMetricFile(hdfsRootArg, appidArg, engineidArg, evalidArg, metricidArg, algoidArg, "averagePrecision.tsv"))
-  
-  val offlineEvalResultsSink = OfflineEvalResults(dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg)
-  
-  
+
+  val offlineEvalResultsSink = OfflineEvalResults(dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg)
+
   /**
    * computation
    */
-        
+
   // for each user, calculate the AP based on relvantList and topList
 
   // use RightJoin, so that if there is no topList but there is relevantList, set AP to 0.
   // if the user has no relevantList in relevantItems, ignore this user.
   val averagePrecision = topKItems.joinWithSmaller('uid -> 'uidTest, relevantItems, joiner = new RightJoin)
-    .rename (('uid, 'topList) -> ('uidX, 'topListX))
-    .map(('uidX, 'topListX, 'uidTest) -> ('uid, 'topList)) { fields: (String, List[String], String) => 
+    .rename(('uid, 'topList) -> ('uidX, 'topListX))
+    .map(('uidX, 'topListX, 'uidTest) -> ('uid, 'topList)) { fields: (String, List[String], String) =>
       val (uidX, topListX, uidTest) = fields
-      
+
       if (uidX == null)
         (uidTest, List(""))
       else
@@ -118,42 +116,42 @@
     }
     .map(('topList, 'relevantList) -> ('avgPreAtK, 'key, 'zeroAP, 'numOfHits)) { fields: (List[String], List[String]) =>
       val (topList, relevantList) = fields
-             
+
       val (ap, numOfHits) = averagePrecisionAtK(kParamArg, topList, relevantList)
       val zeroAP = if (ap == 0) 1 else 0
-         
+
       (ap, 1, zeroAP, numOfHits)
     }
-  
+
   averagePrecision
     .mapTo(('uid, 'topList, 'relevantList, 'avgPreAtK, 'numOfHits) -> ('uid, 'topList, 'relevantList, 'avgPreAtK, 'numOfHits)) {
       fields: (String, List[String], List[String], Double, Int) =>
         val (uid, topList, relevantList, avgPreAtK, numOfHits) = fields
-        
+
         (uid, topList.mkString(","), relevantList.mkString(","), avgPreAtK, numOfHits)
     }
-  
+
   averagePrecision.project('uid, 'avgPreAtK, 'numOfHits)
     .write(averagePrecisionSink)
-    
-  val results = averagePrecision.groupBy('key) { 
-     _
+
+  val results = averagePrecision.groupBy('key) {
+    _
       .size('num) // how many users in total
       .sum('avgPreAtK -> 'avgPreAtKSum)
       .sum('zeroAP -> 'numOfZeroAP)
-    }
-    .mapTo(('num, 'avgPreAtKSum, 'numOfZeroAP) -> ('evalid, 'metricid, 'algoid, 'score, 'iteration, 'splitset, 'num, 'numOfZeroAP)) { fields: (Int, Double, Int) => 
-        
+  }
+    .mapTo(('num, 'avgPreAtKSum, 'numOfZeroAP) -> ('evalid, 'metricid, 'algoid, 'score, 'iteration, 'splitset, 'num, 'numOfZeroAP)) { fields: (Int, Double, Int) =>
+
       val (num, avgPreAtKSum, numOfZeroAP) = fields
-      
-      val meanAveragePrecision = avgPreAtKSum/num
-      
-      (evalidArg, metricidArg, algoidArg, meanAveragePrecision, iterationArg, splitsetArg, num, numOfZeroAP) 
+
+      val meanAveragePrecision = avgPreAtKSum / num
+
+      (evalidArg, metricidArg, algoidArg, meanAveragePrecision, iterationArg, splitsetArg, num, numOfZeroAP)
     }
-  
-  results.then( offlineEvalResultsSink.writeData('evalid, 'metricid, 'algoid, 'score, 'iteration, 'splitset) _ )
-  
-  /** 
+
+  results.then(offlineEvalResultsSink.writeData('evalid, 'metricid, 'algoid, 'score, 'iteration, 'splitset) _)
+
+  /**
    * Calculate the average precision @ k
    *
    * ap@k = sum(P(i)/min(m, k)) wher i=1 to k
@@ -165,45 +163,45 @@
    *
    * return:
    *   averagePrecision (Double)
-   *   numOfHits (Int) 
+   *   numOfHits (Int)
    */
-  def averagePrecisionAtK(k: Int, predictedItems: List[String], relevantItems: List[String]) : (Double, Int) = {
-    
+  def averagePrecisionAtK(k: Int, predictedItems: List[String], relevantItems: List[String]): (Double, Int) = {
+
     // supposedly the predictedItems.size should match k
     // NOTE: what if predictedItems is less than k? use the avaiable items as k.
     require((predictedItems.size <= k), "The size of predicted Items list should be <= k.")
     val n = scala.math.min(predictedItems.size, k)
-     
+
     // find if each element in the predictedItems is one of the relevant items
     // if so, map to 1. else map to 0
     // (0, 1, 0, 1, 1, 0, 0)
-    val relevantBinary : List[Int] = predictedItems.map { x => if (relevantItems.contains(x)) 1 else 0 }
+    val relevantBinary: List[Int] = predictedItems.map { x => if (relevantItems.contains(x)) 1 else 0 }
     val numOfHits: Int = relevantBinary.sum
 
     // prepare the data for AP calculation.
     // take the relevantBinary List and map each element to tuple (num, index)
     // where num is number of 1s up to that position if the element is 1 and 0 if the elemtn is 0.
     // index is the position index of the element starting from 1.
-    def prepareAPData(org: List[Int], accum: Int, index: Int) : List[(Int, Int)]= {
+    def prepareAPData(org: List[Int], accum: Int, index: Int): List[(Int, Int)] = {
       if (org.isEmpty)
         List()
       else {
         val newAccum = accum + org.head
         val num = if (org.head == 1) newAccum else 0
         val element = (num -> index)
-        List(element) ++ prepareAPData(org.tail, newAccum, index + 1) 
+        List(element) ++ prepareAPData(org.tail, newAccum, index + 1)
       }
     }
-    
+
     val averagePrecisionData = prepareAPData(relevantBinary, 0, 1)
-    
+
     val apDenom = scala.math.min(n, relevantItems.size)
-    
+
     // NOTE: if relevantItems.size is 0, the averagePrecision is 0
     val averagePrecision = if (apDenom == 0) 0 else
-        ((averagePrecisionData.map { x => if (x._1 != 0) (x._1.toDouble / x._2) else 0 }.sum) / apDenom)
-    
+      ((averagePrecisionData.map { x => if (x._1 != 0) (x._1.toDouble / x._2) else 0 }.sum) / apDenom)
+
     (averagePrecision, numOfHits)
   }
-  
+
 }
\ No newline at end of file
diff --git a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparator.scala b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparator.scala
index 5bb1412..d839969 100644
--- a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparator.scala
+++ b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/main/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparator.scala
@@ -111,8 +111,8 @@
   /*val trainingU2i = U2iActions(appId=evalidArg,
       dbType=training_dbTypeArg, dbName=training_dbNameArg, dbHost=training_dbHostArg, dbPort=training_dbPortArg).readData('actionTrain, 'uidTrain, 'iidTrain, 'tTrain, 'vTrain)*/
 
-  val testU2i = U2iActions(appId=evalidArg,
-      dbType=test_dbTypeArg, dbName=test_dbNameArg, dbHost=test_dbHostArg, dbPort=test_dbPortArg).readData('actionTest, 'uidTest, 'iidTest, 'tTest, 'vTest)
+  val testU2i = U2iActions(appId = evalidArg,
+    dbType = test_dbTypeArg, dbName = test_dbNameArg, dbHost = test_dbHostArg, dbPort = test_dbPortArg).readData('actionTest, 'uidTest, 'iidTest, 'tTest, 'vTest)
 
   /**
    * computation
@@ -141,7 +141,7 @@
           case e: Exception => {
             assert(false, s"Failed to convert v field ${v} to int. Exception:" + e)
             false
-          }   
+          }
         }
         case GOAL_RATE5 => try {
           (action == ACTION_RATE) && (v.get.toInt >= 5)
diff --git a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparatorTest.scala b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparatorTest.scala
index d69da3c..405c662 100644
--- a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparatorTest.scala
+++ b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKDataPreparatorTest.scala
@@ -8,36 +8,36 @@
 import io.prediction.commons.scalding.appdata.U2iActions
 
 class MAPAtKDataPreparatorTest extends Specification with TupleConversions {
-  
+
   val Rate = "rate"
   val Like = "like"
   val Dislike = "dislike"
   val View = "view"
   //val ViewDetails = "viewDetails"
   val Conversion = "conversion"
-  
-  def test(params: Map[String, String], 
-      testU2i: List[(String, String, String, String, String)],
-      relevantItems: List[(String, String)] // List(("u0", "i0,i1,i2"), ("u1", "i0,i1,i2"))
+
+  def test(params: Map[String, String],
+    testU2i: List[(String, String, String, String, String)],
+    relevantItems: List[(String, String)] // List(("u0", "i0,i1,i2"), ("u1", "i0,i1,i2"))
     ) = {
-    
+
     val test_dbType = "file"
     val test_dbName = "testsetpath/"
     val test_dbHost = None
     val test_dbPort = None
-    
+
     val training_dbType = "file"
     val training_dbName = "trainingsetpath/"
     val training_dbHost = None
     val training_dbPort = None
-    
+
     val modeldata_dbType = "file"
     val modeldata_dbName = "modeldatapath/"
     val modeldata_dbHost = None
     val modeldata_dbPort = None
-    
+
     val hdfsRoot = "testroot/"
-    
+
     JobTest("io.prediction.metrics.scalding.itemrec.map.MAPAtKDataPreparator")
       .arg("test_dbType", test_dbType)
       .arg("test_dbName", test_dbName)
@@ -53,104 +53,102 @@
       .arg("algoid", "8")
       .arg("goalParam", params("goalParam"))
       .arg("kParam", params("kParam"))
-      .source(U2iActions(appId=5, dbType=test_dbType, dbName=test_dbName, dbHost=test_dbHost, dbPort=test_dbPort).getSource, testU2i)
+      .source(U2iActions(appId = 5, dbType = test_dbType, dbName = test_dbName, dbHost = test_dbHost, dbPort = test_dbPort).getSource, testU2i)
       .sink[(String, String)](Tsv(OfflineMetricFile(hdfsRoot, 2, 4, 5, 6, 8, "relevantItems.tsv"))) { outputBuffer =>
-        
+
         def sortItems(t: List[(String, String)]): List[(String, List[String])] = {
           t map (x => (x._1, x._2.split(",").toList.sorted))
         }
-        
+
         "correctly generates relevantItems for each user" in {
           // since DataPrepator may generate relevantItems list in any order
           // and the order is not important,
           // sort the list first so we can compare it with expected result.
           val output = sortItems(outputBuffer.toList)
           val expected = sortItems(relevantItems)
-          
+
           println(outputBuffer.toList)
           //println(output)
           println(expected)
-          
+
           output must containTheSameElementsAs(expected)
-          
+
         }
       }
       .run
       .finish
   }
-  
-    
-    
-    val testU2i = List(
-      // u0
-      (Rate, "u0", "i0", "123450", "4"),
-      (View, "u0", "i1", "123457", "PIO_NONE"),
-      (Dislike, "u0", "i2", "123458", "PIO_NONE"),
-      (View, "u0", "i3", "123459", "PIO_NONE"),
-      (View, "u0", "i7", "123460", "PIO_NONE"),
-      (Rate, "u0", "i8", "123450", "5"), 
-      
-      // u1
-      (View, "u1", "i0", "123457", "PIO_NONE"),
-      (Conversion, "u1", "i1", "123458", "PIO_NONE"),
-      (Conversion, "u1", "i4", "123457", "PIO_NONE"),
-      (Conversion, "u1", "i5", "123456", "PIO_NONE"),
-      (Rate, "u1", "i7", "123456", "3"),
-      (Rate, "u1", "i8", "123454", "3"),
-      (Rate, "u1", "i9", "123453", "4"),
-      
-      // u2
-      (View, "u2", "i3", "123458", "PIO_NONE"),
-      (Conversion, "u2", "i4", "123451", "PIO_NONE"),
-      (Conversion, "u2", "i5", "123452", "PIO_NONE"),
-      (Rate, "u2", "i6", "123452", "5"))
 
-    "itemrec.map MAPAtKDataPreparator with goal = view" should {
-      val params = Map("goalParam" -> "view", "kParam" -> "4")
-      val relevantItems = List(
-        ("u0", "i1,i3,i7"),
-        ("u1", "i0"),
-        ("u2", "i3"))
+  val testU2i = List(
+    // u0
+    (Rate, "u0", "i0", "123450", "4"),
+    (View, "u0", "i1", "123457", "PIO_NONE"),
+    (Dislike, "u0", "i2", "123458", "PIO_NONE"),
+    (View, "u0", "i3", "123459", "PIO_NONE"),
+    (View, "u0", "i7", "123460", "PIO_NONE"),
+    (Rate, "u0", "i8", "123450", "5"),
 
-      test(params, testU2i, relevantItems)
-    }
-    
-    "itemrec.map MAPAtKDataPreparator with goal = conversion" should {
-      val params = Map("goalParam" -> "conversion", "kParam" -> "8")
-      val relevantItems = List(
-        ("u1", "i1,i4,i5"),
-        ("u2", "i4,i5"))
-            
-      test(params, testU2i, relevantItems)
-    }
+    // u1
+    (View, "u1", "i0", "123457", "PIO_NONE"),
+    (Conversion, "u1", "i1", "123458", "PIO_NONE"),
+    (Conversion, "u1", "i4", "123457", "PIO_NONE"),
+    (Conversion, "u1", "i5", "123456", "PIO_NONE"),
+    (Rate, "u1", "i7", "123456", "3"),
+    (Rate, "u1", "i8", "123454", "3"),
+    (Rate, "u1", "i9", "123453", "4"),
 
-    "itemrec.map MAPAtKDataPreparator with goal = rate >= 3" should {
-      val params = Map("goalParam" -> "rate3", "kParam" -> "8")
-      val relevantItems = List(
-        ("u0", "i0,i8"),
-        ("u1", "i7,i8,i9"),
-        ("u2", "i6"))
-        
-      test(params, testU2i, relevantItems)
-    }
+    // u2
+    (View, "u2", "i3", "123458", "PIO_NONE"),
+    (Conversion, "u2", "i4", "123451", "PIO_NONE"),
+    (Conversion, "u2", "i5", "123452", "PIO_NONE"),
+    (Rate, "u2", "i6", "123452", "5"))
 
-    "itemrec.map MAPAtKDataPreparator with goal = rate >= 4" should {
-      val params = Map("goalParam" -> "rate4", "kParam" -> "8")
-      val relevantItems = List(
-        ("u0", "i0,i8"),
-        ("u1", "i9"),
-        ("u2", "i6"))
-        
-      test(params, testU2i, relevantItems)
-    }
+  "itemrec.map MAPAtKDataPreparator with goal = view" should {
+    val params = Map("goalParam" -> "view", "kParam" -> "4")
+    val relevantItems = List(
+      ("u0", "i1,i3,i7"),
+      ("u1", "i0"),
+      ("u2", "i3"))
 
-    "itemrec.map MAPAtKDataPreparator with goal = rate >= 5" should {
-      val params = Map("goalParam" -> "rate5", "kParam" -> "8")
-      val relevantItems = List(
-        ("u0", "i8"),
-        ("u2", "i6"))
-        
-      test(params, testU2i, relevantItems)
-    }
-  
+    test(params, testU2i, relevantItems)
+  }
+
+  "itemrec.map MAPAtKDataPreparator with goal = conversion" should {
+    val params = Map("goalParam" -> "conversion", "kParam" -> "8")
+    val relevantItems = List(
+      ("u1", "i1,i4,i5"),
+      ("u2", "i4,i5"))
+
+    test(params, testU2i, relevantItems)
+  }
+
+  "itemrec.map MAPAtKDataPreparator with goal = rate >= 3" should {
+    val params = Map("goalParam" -> "rate3", "kParam" -> "8")
+    val relevantItems = List(
+      ("u0", "i0,i8"),
+      ("u1", "i7,i8,i9"),
+      ("u2", "i6"))
+
+    test(params, testU2i, relevantItems)
+  }
+
+  "itemrec.map MAPAtKDataPreparator with goal = rate >= 4" should {
+    val params = Map("goalParam" -> "rate4", "kParam" -> "8")
+    val relevantItems = List(
+      ("u0", "i0,i8"),
+      ("u1", "i9"),
+      ("u2", "i6"))
+
+    test(params, testU2i, relevantItems)
+  }
+
+  "itemrec.map MAPAtKDataPreparator with goal = rate >= 5" should {
+    val params = Map("goalParam" -> "rate5", "kParam" -> "8")
+    val relevantItems = List(
+      ("u0", "i8"),
+      ("u2", "i6"))
+
+    test(params, testU2i, relevantItems)
+  }
+
 }
\ No newline at end of file
diff --git a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKTest.scala b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKTest.scala
index 208b3d4..8c69689 100644
--- a/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKTest.scala
+++ b/process/engines/itemrec/evaluations/hadoop/scalding/metrics/map/src/test/scala/io/prediction/metrics/scalding/itemrec/map/MAPAtKTest.scala
@@ -4,31 +4,30 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.filepath.{OfflineMetricFile}
+import io.prediction.commons.filepath.{ OfflineMetricFile }
 import io.prediction.commons.scalding.settings.OfflineEvalResults
 
 class MAPAtKTest extends Specification with TupleConversions {
 
   def test(
-      evalid: Int, metricid: Int, algoid: Int, iteration: Int, splitset: String,
-      params: Map[String, String],
-      relevantItems: List[(String, String)],
-      topKItems: List[(String, String)],
-      averagePrecision: List[(String, Double)],
-      meanAveragePrecision: Double
-      ) = {
-    
+    evalid: Int, metricid: Int, algoid: Int, iteration: Int, splitset: String,
+    params: Map[String, String],
+    relevantItems: List[(String, String)],
+    topKItems: List[(String, String)],
+    averagePrecision: List[(String, Double)],
+    meanAveragePrecision: Double) = {
+
     val dbType = "file"
     val dbName = "testpath/"
     val dbHost = None
     val dbPort = None
     val hdfsRoot = "testroot/"
-    
+
     val appid = 20
     val engineid = 1
 
     val offlineEvalResults = List((evalid, metricid, algoid, meanAveragePrecision, iteration, splitset))
-    
+
     JobTest("io.prediction.metrics.scalding.itemrec.map.MAPAtK")
       .arg("dbType", dbType)
       .arg("dbName", dbName)
@@ -44,21 +43,21 @@
       .source(Tsv(OfflineMetricFile(hdfsRoot, appid, engineid, evalid, metricid, algoid, "relevantItems.tsv")), relevantItems)
       .source(Tsv(OfflineMetricFile(hdfsRoot, appid, engineid, evalid, metricid, algoid, "topKItems.tsv")), topKItems)
       .sink[(String, Double)](Tsv(OfflineMetricFile(hdfsRoot, appid, engineid, evalid, metricid, algoid, "averagePrecision.tsv"))) { outputBuffer =>
-         // only compare double up to 6 decimal places
-         def roundingData(orgList: List[(String, Double)]) = {
-           orgList map { x =>
-             val (t1, t2) = x
-             // NOTE: use HALF_UP mode to avoid error caused by rounding when compare data
-             // (eg. 3.5 vs 3.499999999999, 0.6666666666 vs 0.666666667)
+        // only compare double up to 6 decimal places
+        def roundingData(orgList: List[(String, Double)]) = {
+          orgList map { x =>
+            val (t1, t2) = x
+            // NOTE: use HALF_UP mode to avoid error caused by rounding when compare data
+            // (eg. 3.5 vs 3.499999999999, 0.6666666666 vs 0.666666667)
             (t1, BigDecimal(t2).setScale(6, BigDecimal.RoundingMode.HALF_UP).toDouble)
           }
         }
-  
+
         "correctly calculate Average Precision for each user" in {
           roundingData(outputBuffer.toList) must containTheSameElementsAs(roundingData(averagePrecision))
         }
       }
-      .sink[(Int, Int, Int, Double, Int, String)](OfflineEvalResults(dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource) { outputBuffer =>
+      .sink[(Int, Int, Int, Double, Int, String)](OfflineEvalResults(dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource) { outputBuffer =>
         def roundingData(orgList: List[(Int, Int, Int, Double, Int, String)]) = {
           orgList map { x =>
             val (t1, t2, t3, t4, t5, t6) = x
@@ -70,19 +69,19 @@
         "correctly write MAP@k score into a file" in {
           roundingData(outputBuffer.toList) must containTheSameElementsAs(roundingData(offlineEvalResults))
         }
-      } 
+      }
       .run
       .finish
   }
-   
+
   "itemrec.map" should {
     val relevantItems = List(("u0", "i3,i4,i5"), ("u1", "i0,i1"), ("u3", "i0"))
     val topKItems = List(("u0", "i6,i4,i3,i5,i0"), ("u1", "i1,i4,i5,i0"))
     val averagePrecision = List(("u0", 0.638888888), ("u1", 0.75), ("u3", 0.0))
     val meanAveragePrecision = 0.4629629333333
-    
+
     val params = Map("kParam" -> "5")
-    
+
     test(2, 3, 4, 11, "",
       params, relevantItems, topKItems, averagePrecision, meanAveragePrecision)
   }
@@ -92,11 +91,11 @@
     val topKItems = List(("u0", "i6,i4,i3,i5,i0"), ("u1", "i1,i4,i5,i0"))
     val averagePrecision = List(("u0", 0.638888888), ("u1", 0.75), ("u3", 0.0))
     val meanAveragePrecision = 0.4629629333333
-    
+
     val params = Map("kParam" -> "5")
-    
+
     test(12, 2, 54, 9, "validation",
       params, relevantItems, topKItems, averagePrecision, meanAveragePrecision)
   }
-  
+
 }
\ No newline at end of file
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/build.sbt b/process/engines/itemrec/evaluations/scala/topkitems/build.sbt
deleted file mode 100644
index 9feaf43..0000000
--- a/process/engines/itemrec/evaluations/scala/topkitems/build.sbt
+++ /dev/null
@@ -1,18 +0,0 @@
-import AssemblyKeys._
-
-assemblySettings
-
-name := "predictionio-process-itemrec-evaluations-topkitems"
-
-libraryDependencies ++= Seq(
-  "ch.qos.logback" % "logback-classic" % "1.0.9",
-  "ch.qos.logback" % "logback-core" % "1.0.9",
-  "com.github.scala-incubator.io" %% "scala-io-core" % "0.4.2",
-  "com.github.scala-incubator.io" %% "scala-io-file" % "0.4.2",
-  "com.typesafe" % "config" % "1.0.0",
-  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
-
-excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
-  val excludes = Set("minlog-1.2.jar")
-  cp filter { jar => excludes(jar.data.getName)}
-}
diff --git a/process/engines/itemrec/evaluations/scala/topkitems/src/main/scala/io/prediction/evaluations/itemrec/topkitems/TopKItems.scala b/process/engines/itemrec/evaluations/scala/topkitems/src/main/scala/io/prediction/evaluations/itemrec/topkitems/TopKItems.scala
deleted file mode 100644
index 3823971..0000000
--- a/process/engines/itemrec/evaluations/scala/topkitems/src/main/scala/io/prediction/evaluations/itemrec/topkitems/TopKItems.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-package io.prediction.evaluations.itemrec.topkitems
-
-import io.prediction.commons._
-import io.prediction.commons.filepath.OfflineMetricFile
-import io.prediction.output.itemrec.ItemRecAlgoOutput
-
-import com.typesafe.config.ConfigFactory
-import grizzled.slf4j.Logger
-import java.io.File
-import scala.sys.process._
-import scalax.io._
-
-object TopKItems {
-  def main(args: Array[String]) {
-    val logger = Logger(TopKItems.getClass)
-
-    val config = ConfigFactory.load
-
-    val evalid = config.getInt("evalid")
-    val algoid = config.getInt("algoid")
-    val metricid = config.getInt("metricid")
-    val hdfsRoot = config.getString("hdfsroot")
-    val k = config.getInt("k")
-
-    val commonsConfig = new Config
-
-    /** Try search path if hadoop home is not set. */
-    val hadoopCommand = commonsConfig.settingsHadoopHome map { h => h+"/bin/hadoop" } getOrElse { "hadoop" }
-
-    val apps = commonsConfig.getSettingsApps
-    val engines = commonsConfig.getSettingsEngines
-    val algos = commonsConfig.getSettingsAlgos
-    val offlineEvals = commonsConfig.getSettingsOfflineEvals
-    val users = commonsConfig.getAppdataTrainingUsers
-
-    val algo = algos.get(algoid).get
-    val offlineEval = offlineEvals.get(evalid).get
-    val engine = engines.get(offlineEval.engineid).get
-    val app = apps.get(engine.appid).get.copy(id = evalid)
-
-    val tmpFile = File.createTempFile("pdio-", ".topk", new File(commonsConfig.settingsLocalTempRoot))
-    tmpFile.deleteOnExit
-    val output: Output = Resource.fromFile(tmpFile)
-    logger.info("Dumping data to temporary file %s...".format(tmpFile))
-
-    var userCount = 0
-    users.getByAppid(evalid) foreach { u =>
-      val topKItems = ItemRecAlgoOutput.output(u.id, k, None, None, None, None)(app, engine, algo, Some(offlineEval))
-      if (topKItems.length > 0) {
-        userCount += 1
-        output.write("%d_%s\t%s\n".format(evalid, u.id, topKItems.map(iid => "%d_%s".format(evalid, iid)).mkString(",")))
-      }
-    }
-    logger.info("Found %d user(s) with non-zero top-K items".format(userCount))
-
-    val hdfsFile = OfflineMetricFile(hdfsRoot, engine.appid, engine.id, evalid, metricid, algoid, "topKItems.tsv")
-
-    val rmCommand = s"$hadoopCommand fs -rm $hdfsFile"
-    logger.info("Executing '%s'...".format(rmCommand))
-    rmCommand.!
-
-    val copyCommand = s"$hadoopCommand fs -copyFromLocal $tmpFile $hdfsFile"
-    logger.info("Executing '%s'...".format(copyCommand))
-    copyCommand.!
-
-    logger.info("Finished")
-  }
-}
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparator.scala b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparator.scala
index 417370b..5dd55dc 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparator.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparator.scala
@@ -2,23 +2,23 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.scalding.appdata.{Items, U2iActions}
+import io.prediction.commons.scalding.appdata.{ Items, U2iActions }
 import io.prediction.commons.filepath.DataFile
 
 /**
  * Source: appdata DB (items, u2iActions)
  * Sink: selectedItems.tsv, ratings.tsv
  * Descripton:
- *   Prepare data for itemsim.itemsimcf algo. Read from appdata DB and store selected items 
+ *   Prepare data for itemsim.itemsimcf algo. Read from appdata DB and store selected items
  *   and ratings into a file.
  *   (appdata store -> DataPreparator -> HDFS)
- * 
+ *
  * Required args:
  * --dbType: <string> (eg. mongodb) (see --dbHost, --dbPort)
  * --dbName: <string> appdata database name. (eg predictionio_appdata, or predictionio_training_appdata)
- * 
+ *
  * --hdfsRoot: <string>. Root directory of the HDFS
- * 
+ *
  * --appid: <int>
  * --engineid: <int>
  * --algoid: <int>
@@ -28,22 +28,22 @@
  * --dislikeParam: <string>
  * --conversionParam: <string>
  * --conflictParam: <string>. (latest/highest/lowest)
- * 
+ *
  * Optional args:
  * --dbHost: <string> (eg. "127.0.0.1")
  * --dbPort: <int> (eg. 27017)
- * 
+ *
  * --itypes: <string separated by white space>. eg "--itypes type1 type2". If no --itypes specified, then ALL itypes will be used.
  * --evalid: <int>. Offline Evaluation if evalid is specified
  * --debug: <String>. "test" - for testing purpose
- * 
+ *
  * Example:
  * Batch:
  * scald.rb --hdfs-local io.prediction.algorithms.scalding.itemsim.itemsimcf.DataPreparator --dbType mongodb --dbName appdata --dbHost 127.0.0.1 --dbPort 27017 --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 3 --algoid 9 --itypes t2 --viewParam 2 --likeParam 5 --dislikeParam 1 --conversionParam 4 --conflictParam latest
- * 
+ *
  * Offline Eval:
  * scald.rb --hdfs-local io.prediction.algorithms.scalding.itemsim.itemsimcf.DataPreparator --dbType mongodb --dbName training_appdata --dbHost 127.0.0.1 --dbPort 27017 --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 3 --algoid 9 --itypes t2 --viewParam 2 --likeParam 5 --dislikeParam 1 --conversionParam 4 --conflictParam latest --evalid 15
- * 
+ *
  */
 class DataPreparator(args: Args) extends Job(args) {
 
@@ -54,18 +54,18 @@
   val dbNameArg = args("dbName")
   val dbHostArg = args.optional("dbHost")
   val dbPortArg = args.optional("dbPort") map (x => x.toInt) // becomes Option[Int]
-  
+
   val hdfsRootArg = args("hdfsRoot")
-  
+
   val appidArg = args("appid").toInt
   val engineidArg = args("engineid").toInt
   val algoidArg = args("algoid").toInt
   val evalidArg = args.optional("evalid") map (x => x.toInt)
   val OFFLINE_EVAL = (evalidArg != None) // offline eval mode
-  
+
   val preItypesArg = args.list("itypes")
   val itypesArg: Option[List[String]] = if (preItypesArg.mkString(",").length == 0) None else Option(preItypesArg)
-  
+
   // determine how to map actions to rating values
   def getActionParam(name: String): Option[Int] = {
     val actionParam: Option[Int] = args(name) match {
@@ -79,21 +79,21 @@
   val likeParamArg: Option[Int] = getActionParam("likeParam")
   val dislikeParamArg: Option[Int] = getActionParam("dislikeParam")
   val conversionParamArg: Option[Int] = getActionParam("conversionParam")
-  
+
   // When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, 
   // determine which action will be considered as final preference.
   final val CONFLICT_LATEST: String = "latest" // use latest action
   final val CONFLICT_HIGHEST: String = "highest" // use the one with highest score
   final val CONFLICT_LOWEST: String = "lowest" // use the one with lowest score
-  
+
   val conflictParamArg: String = args("conflictParam")
 
   // check if the conflictParam is valid
-  require(List(CONFLICT_LATEST, CONFLICT_HIGHEST, CONFLICT_LOWEST).contains(conflictParamArg), "conflict param " +conflictParamArg +" is not valid.")
+  require(List(CONFLICT_LATEST, CONFLICT_HIGHEST, CONFLICT_LOWEST).contains(conflictParamArg), "conflict param " + conflictParamArg + " is not valid.")
 
   val debugArg = args.list("debug")
   val DEBUG_TEST = debugArg.contains("test") // test mode
-  
+
   /**
    * constants
    */
@@ -110,14 +110,14 @@
    */
   // get appdata
   // NOTE: if OFFLINE_EVAL, read from training set, and use evalid as appid when read Items and U2iActions
-  val trainingAppid = if (OFFLINE_EVAL) evalidArg.get else appidArg 
-  
+  val trainingAppid = if (OFFLINE_EVAL) evalidArg.get else appidArg
+
   // get items data
-  val items = Items(appId=trainingAppid, itypes=itypesArg, 
-      dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg).readStartEndtime('iidx, 'itypes, 'starttime, 'endtime)
-  
-  val u2i = U2iActions(appId=trainingAppid, 
-      dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg).readData('action, 'uid, 'iid, 't, 'v)
+  val items = Items(appId = trainingAppid, itypes = itypesArg,
+    dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg).readStartEndtime('iidx, 'itypes, 'starttime, 'endtime)
+
+  val u2i = U2iActions(appId = trainingAppid,
+    dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg).readData('action, 'uid, 'iid, 't, 'v)
 
   /**
    * sink
@@ -150,7 +150,7 @@
     }
     .map(('action, 'v, 't) -> ('rating, 'tLong)) { fields: (String, Option[String], String) =>
       val (action, v, t) = fields
-      
+
       // convert actions into rating value based on "action" and "v" fields
       val rating: Int = action match {
         case ACTION_RATE => try {
@@ -161,19 +161,19 @@
             1
           }
         }
-        case ACTION_LIKE => likeParamArg.getOrElse{
+        case ACTION_LIKE => likeParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
-        case ACTION_DISLIKE => dislikeParamArg.getOrElse{
+        case ACTION_DISLIKE => dislikeParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
-        case ACTION_VIEW => viewParamArg.getOrElse{
+        case ACTION_VIEW => viewParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
-        case ACTION_CONVERSION => conversionParamArg.getOrElse{
+        case ACTION_CONVERSION => conversionParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
@@ -182,36 +182,35 @@
           1
         }
       }
-      
+
       (rating, t.toLong)
-    } 
-    .then( resolveConflict('uid, 'iid, 'tLong, 'rating, conflictParamArg) _ )
+    }
+    .then(resolveConflict('uid, 'iid, 'tLong, 'rating, conflictParamArg) _)
     .project('uid, 'iid, 'rating)
     .write(ratingsSink)
- 
+
   // Also store the selected items into DataFile for later model construction usage.
   items.mapTo(('iidx, 'itypes, 'starttime, 'endtime) -> ('iidx, 'itypes, 'starttime, 'endtime)) { fields: (String, List[String], Long, Option[Long]) =>
     val (iidx, itypes, starttime, endtime) = fields
-    
-      // NOTE: convert List[String] into comma-separated String
-      // NOTE: endtime is optional
-      (iidx, itypes.mkString(","), starttime, endtime.map(_.toString).getOrElse("PIO_NONE"))
-    }.write(selectedItemsSink)
-  
+
+    // NOTE: convert List[String] into comma-separated String
+    // NOTE: endtime is optional
+    (iidx, itypes.mkString(","), starttime, endtime.map(_.toString).getOrElse("PIO_NONE"))
+  }.write(selectedItemsSink)
+
   /**
    * function to resolve conflicting actions of same uid-iid pair.
    */
   def resolveConflict(uidField: Symbol, iidField: Symbol, tfield: Symbol, ratingField: Symbol, conflictSolution: String)(p: RichPipe): RichPipe = {
- 
+
     // NOTE: sortBy() sort from smallest to largest. use reverse to pick the largest one.
     val dataPipe = conflictSolution match {
       case CONFLICT_LATEST => p.groupBy(uidField, iidField) { _.sortBy(tfield).reverse.take(1) } // take latest one (largest t)
       case CONFLICT_HIGHEST => p.groupBy(uidField, iidField) { _.sortBy(ratingField).reverse.take(1) } // take highest rating
       case CONFLICT_LOWEST => p.groupBy(uidField, iidField) { _.sortBy(ratingField).take(1) } // take lowest rating
     }
-   
+
     dataPipe
   }
-    
-  
+
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarity.scala b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarity.scala
index 2651fa3..cf549c0 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarity.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarity.scala
@@ -4,17 +4,17 @@
 
 import cascading.pipe.Pipe
 
-import io.prediction.commons.filepath.{DataFile, AlgoFile}
+import io.prediction.commons.filepath.{ DataFile, AlgoFile }
 
 /**
  * Source: ratings.tsv
  * Sink: itemSimScores.tsv
  * Descripton:
  *   Compute item similarity score.
- * 
+ *
  * Required args:
  * --hdfsRoot: <string>. Root directory of the HDFS
- * 
+ *
  * --appid: <int>
  * --engineid: <int>
  * --algoid: <int>
@@ -24,29 +24,29 @@
  * --priorCorrelParam: <double>. for regularization. correlation of these virtual pairs
  * --minNumRatersParam: <int>. min number of raters of the item
  * --maxNumRatersParam: <int> max number of raters of the item
- * --minIntersectionParam: <int>. min number of co-rater users between 2 simliar items 
+ * --minIntersectionParam: <int>. min number of co-rater users between 2 simliar items
  * --numSimilarItems: <int>. number of similar items to be generated
  *
  * Optional args:
  * --evalid: <int>. Offline Evaluation if evalid is specified
- * 
+ *
  * Example:
  * scald.rb --hdfs-local io.prediction.algorithms.scalding.itemsim.itemsimcf.ItemSimilarity --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 2 --algoid 8 --measureParam correl --priorCountParam 20 --priorCorrelParam 0.05
  */
 class ItemSimilarity(args: Args) extends VectorSimilarities(args) {
-  
+
   // args
   val hdfsRootArg = args("hdfsRoot")
-  
+
   val appidArg = args("appid").toInt
   val engineidArg = args("engineid").toInt
   val algoidArg = args("algoid").toInt
   val evalidArg = args.optional("evalid") map (x => x.toInt)
-  
+
   val measureParamArg = args("measureParam")
   val priorCountParamArg = args("priorCountParam").toInt
   val priorCorrelParamArg = args("priorCorrelParam").toDouble
-  
+
   val minNumRatersParamArg = args("minNumRatersParam").toInt
   val maxNumRatersParamArg = args("maxNumRatersParam").toInt
   val minIntersectionParamArg = args("minIntersectionParam").toInt
@@ -54,26 +54,26 @@
 
   // override VectorSimilarities param
   override val MEASURE: String = measureParamArg
-  
+
   override val PRIOR_COUNT: Int = priorCountParamArg
-  
+
   override val PRIOR_CORRELATION: Double = priorCorrelParamArg
-  
+
   override val MIN_NUM_RATERS: Int = minNumRatersParamArg
-  
+
   override val MAX_NUM_RATERS: Int = maxNumRatersParamArg
-  
+
   override val MIN_INTERSECTION: Int = minIntersectionParamArg
-  
-  override def input(userField : Symbol, itemField : Symbol, ratingField : Symbol): Pipe = {
+
+  override def input(userField: Symbol, itemField: Symbol, ratingField: Symbol): Pipe = {
     Tsv(DataFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "ratings.tsv")).read
-      .mapTo((0, 1, 2) -> (userField, itemField, ratingField)) {fields: (String, String, Double) => fields}
-    
+      .mapTo((0, 1, 2) -> (userField, itemField, ratingField)) { fields: (String, String, Double) => fields }
+
   }
-  
+
   // start computation
   vectorSimilaritiesAlgo('iid, 'simiid, 'score)
     .groupBy('iid) { _.sortBy('score).reverse.take(numSimilarItemsArg) }
     .write(Tsv(AlgoFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "itemSimScores.tsv")))
-  
+
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructor.scala b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructor.scala
index f792d41..c988630 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructor.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructor.scala
@@ -2,25 +2,25 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.filepath.{DataFile, AlgoFile}
+import io.prediction.commons.filepath.{ DataFile, AlgoFile }
 import io.prediction.commons.scalding.modeldata.ItemSimScores
 
 /**
- * Source: 
+ * Source:
  *   selectedItems.tsv
  *   itemSimScores.tsv
- * Sink: 
+ * Sink:
  *   itemSimScores DB
  * Description:
  *   Read the itemSimScores.tsv and get additional attributes from selectedItems.tsv for each similiar items.
  *   Then write the result to model DB.
- *   
+ *
  * Required args:
  * --dbType: <string> modeldata DB type (eg. mongodb) (see --dbHost, --dbPort)
  * --dbName: <string> (eg. predictionio_modeldata)
- * 
+ *
  * --hdfsRoot: <string>. Root directory of the HDFS
- * 
+ *
  * --appid: <int>
  * --engineid: <int>
  * --algoid: <int>
@@ -30,15 +30,15 @@
  * Optionsl args:
  * --dbHost: <string> (eg. "127.0.0.1")
  * --dbPort: <int> (eg. 27017)
- * 
+ *
  * --evalid: <int>. Offline Evaluation if evalid is specified
  * --debug: <String>. "test" - for testing purpose
- * 
+ *
  * Example:
  * scald.rb --hdfs-local io.prediction.algorithms.scalding.itemsim.itemsimcf.ModelConstructor --dbType mongodb --dbName modeldata --dbHost 127.0.0.1 --dbPort 27017 --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 2 --algoid 8 --modelSet false
  */
 class ModelConstructor(args: Args) extends Job(args) {
-  
+
   /**
    * parse args
    */
@@ -46,18 +46,18 @@
   val dbNameArg = args("dbName")
   val dbHostArg = args.optional("dbHost")
   val dbPortArg = args.optional("dbPort") map (x => x.toInt)
-  
+
   val hdfsRootArg = args("hdfsRoot")
-  
+
   val appidArg = args("appid").toInt
   val engineidArg = args("engineid").toInt
   val algoidArg = args("algoid").toInt
   val evalidArg = args.optional("evalid") map (x => x.toInt)
   val OFFLINE_EVAL = (evalidArg != None) // offline eval mode
-  
+
   val debugArg = args.list("debug")
   val DEBUG_TEST = debugArg.contains("test") // test mode
-  
+
   val modelSetArg = args("modelSet").toBoolean
   val recommendationTimeArg = args("recommendationTime").toLong
 
@@ -66,7 +66,7 @@
    */
   val score = Tsv(AlgoFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "itemSimScores.tsv")).read
     .mapTo((0, 1, 2) -> ('iid, 'simiid, 'score)) { fields: (String, String, Double) => fields }
-  
+
   val items = Tsv(DataFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "selectedItems.tsv")).read
     .mapTo((0, 1, 2, 3) -> ('iidx, 'itypes, 'starttime, 'endtime)) { fields: (String, String, Long, String) =>
       val (iidx, itypes, starttime, endtime) = fields // itypes are comma-separated String
@@ -87,7 +87,7 @@
 
       (iidx, itypes.split(",").toList, starttime, endtimeOpt)
     }
-  
+
   /**
    * process & output
    */
@@ -107,9 +107,9 @@
     }
     .project('iid, 'simiid, 'score, 'itypes)
     .groupBy('iid) { _.sortBy('score).reverse.toList[(String, Double, List[String])](('simiid, 'score, 'itypes) -> 'simiidsList) }
-    
-  val src = ItemSimScores(dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg, algoid=algoidArg, modelset=modelSetArg)
-  
-  p.then( src.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _ )
-  
+
+  val src = ItemSimScores(dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg, algoid = algoidArg, modelset = modelSetArg)
+
+  p.then(src.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _)
+
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/VectorSimilarities.scala b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/VectorSimilarities.scala
index b4ea9fa..cf19bef 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/VectorSimilarities.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/main/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/VectorSimilarities.scala
@@ -1,7 +1,7 @@
 package io.prediction.algorithms.scalding.itemsim.itemsimcf
 
 /**
- * CREDIT: this file is based on Edwin Chen's VectorSimliarities.scala 
+ * CREDIT: this file is based on Edwin Chen's VectorSimliarities.scala
  * with minor modifications. Thanks Edwin Chen for posting this awesome code!
  * https://github.com/echen/scaldingale
  */
@@ -11,7 +11,7 @@
 import cascading.pipe.Pipe
 
 /**
- * Given a dataset of ratings, how can we compute the similarity 
+ * Given a dataset of ratings, how can we compute the similarity
  * between pairs of items?
  *
  * This class defines an abstract ratings input format. Subclasses
@@ -25,25 +25,25 @@
  * and Jaccard similarity) are then applied to these vectors.
  *
  * @author Edwin Chen
- * 
+ *
  * modified by Tappingstone
  */
-abstract class VectorSimilarities(args : Args) extends Job(args) {
+abstract class VectorSimilarities(args: Args) extends Job(args) {
 
   // parameter to configure simliarity measurement functions
   final val CORREL_MEASURE: String = "correl" // this is Pearson
   final val COSINE_MEASURE: String = "cosine"
   final val JACCARD_MEASURE: String = "jaccard"
-  
+
   // subclass should override this to change measurement function
   val MEASURE: String = CORREL_MEASURE //default measure
-  
+
   /**
    * Parameters to regularize correlation.
    */
   val PRIOR_COUNT: Int = 10 // default
   val PRIOR_CORRELATION: Double = 0
-  
+
   /**
    * Filters to speed up computation and reduce noise.
    * Subclasses should probably override these, based on the actual data.
@@ -51,23 +51,23 @@
   val MIN_NUM_RATERS: Int = 3
   val MAX_NUM_RATERS: Int = 10000
   val MIN_INTERSECTION: Int = 1
-  
+
   /**
    * Subclasses should override this to define their own input.
    * This method should return a Pipe using the Symbols in parameters
    */
-  def input(userField : Symbol, itemField : Symbol, ratingField : Symbol) : Pipe
-  
-// *************************
-// * STEPS OF THE COMPUTATION
-// *************************  
+  def input(userField: Symbol, itemField: Symbol, ratingField: Symbol): Pipe
+
+  // *************************
+  // * STEPS OF THE COMPUTATION
+  // *************************  
 
   def vectorSimilaritiesAlgo(itemField: Symbol, simItemField: Symbol, scoreField: Symbol): Pipe = {
     /**
      * Read in the input and give each field a type and name.
      */
-    val ratings = input('user, 'item, 'rating)  
-  
+    val ratings = input('user, 'item, 'rating)
+
     /**
      * Also keep track of the total number of people who rated an item.
      */
@@ -78,26 +78,26 @@
         // Rename, since Scalding currently requires both sides of a join to have distinctly named fields.
         .rename('item -> 'itemX)
         .joinWithLarger('itemX -> 'item, ratings).discard('itemX)
-        .filter('numRaters) { numRaters : Long => numRaters >= MIN_NUM_RATERS && numRaters <= MAX_NUM_RATERS }
-  
+        .filter('numRaters) { numRaters: Long => numRaters >= MIN_NUM_RATERS && numRaters <= MAX_NUM_RATERS }
+
     /**
      * Make a dummy copy of the ratings, so we can do a self-join.
      */
-    val ratings2 = 
+    val ratings2 =
       ratingsWithSize
         .rename(('user, 'item, 'rating, 'numRaters) -> ('user2, 'item2, 'rating2, 'numRaters2))
-  
+
     /**
-     * Join the two rating streams on their user fields, 
-     * in order to find all pairs of items that a user has rated.  
+     * Join the two rating streams on their user fields,
+     * in order to find all pairs of items that a user has rated.
      */
     val ratingPairs =
       ratingsWithSize
         .joinWithSmaller('user -> 'user2, ratings2)
         // De-dupe so that we don't calculate similarity of both (A, B) and (B, A).
-        .filter('item, 'item2) { items : (String, String) => items._1 < items._2 }
+        .filter('item, 'item2) { items: (String, String) => items._1 < items._2 }
         .project('item, 'rating, 'numRaters, 'item2, 'rating2, 'numRaters2)
-  
+
     /**
      * Compute dot products, norms, sums, and sizes of the rating vectors.
      */
@@ -105,109 +105,109 @@
       ratingPairs
         // Compute (x*y, x^2, y^2), which we need for dot products and norms.
         .map(('rating, 'rating2) -> ('ratingProd, 'ratingSq, 'rating2Sq)) {
-          ratings : (Double, Double) =>
-          (ratings._1 * ratings._2, scala.math.pow(ratings._1, 2), scala.math.pow(ratings._2, 2))
+          ratings: (Double, Double) =>
+            (ratings._1 * ratings._2, scala.math.pow(ratings._1, 2), scala.math.pow(ratings._2, 2))
         }
-        .groupBy('item, 'item2) { 
-    			_
-    			  .size
-    				.sum('ratingProd -> 'dotProduct)
-    				.sum('rating -> 'ratingSum)
-    				.sum('rating2 -> 'rating2Sum)
-    				.sum('ratingSq -> 'ratingNormSq)
-    				.sum('rating2Sq -> 'rating2NormSq)
-    				.max('numRaters) // Simply an easy way to make sure the numRaters field stays.
-    				.max('numRaters2)
-    		}
-    		.filter('size) { size : Long => size >= MIN_INTERSECTION }
-  
+        .groupBy('item, 'item2) {
+          _
+            .size
+            .sum('ratingProd -> 'dotProduct)
+            .sum('rating -> 'ratingSum)
+            .sum('rating2 -> 'rating2Sum)
+            .sum('ratingSq -> 'ratingNormSq)
+            .sum('rating2Sq -> 'rating2NormSq)
+            .max('numRaters) // Simply an easy way to make sure the numRaters field stays.
+            .max('numRaters2)
+        }
+        .filter('size) { size: Long => size >= MIN_INTERSECTION }
+
     /**
      * Calculate similarity between rating vectors using similarity measures
      * like correlation, cosine similarity, and Jaccard similarity.
      */
     val similaritiesScore =
       vectorCalcs
-        .map(('size, 'dotProduct, 'ratingSum, 'rating2Sum, 'ratingNormSq, 'rating2NormSq, 'numRaters, 'numRaters2) -> 
+        .map(('size, 'dotProduct, 'ratingSum, 'rating2Sum, 'ratingNormSq, 'rating2NormSq, 'numRaters, 'numRaters2) ->
           'score) {
-            
-          fields : (Double, Double, Double, Double, Double, Double, Double, Double) =>
-                  
-          val (size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq, numRaters, numRaters2) = fields
-          
-          val score = MEASURE match {
-            case CORREL_MEASURE => correlation(size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq)
-            case COSINE_MEASURE => cosineSimilarity(dotProduct, scala.math.sqrt(ratingNormSq), scala.math.sqrt(rating2NormSq))
-            case JACCARD_MEASURE => jaccardSimilarity(size, numRaters, numRaters2)
-            case _ => 0.0 // all other invalid cases
-            
-          }
-          
-          // regularization
-          // TODO: different measurement may have different way to do regularization
-          val regScore = regularized(size, score, PRIOR_COUNT, PRIOR_CORRELATION)
-          
-          regScore
+
+          fields: (Double, Double, Double, Double, Double, Double, Double, Double) =>
+
+            val (size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq, numRaters, numRaters2) = fields
+
+            val score = MEASURE match {
+              case CORREL_MEASURE => correlation(size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq)
+              case COSINE_MEASURE => cosineSimilarity(dotProduct, scala.math.sqrt(ratingNormSq), scala.math.sqrt(rating2NormSq))
+              case JACCARD_MEASURE => jaccardSimilarity(size, numRaters, numRaters2)
+              case _ => 0.0 // all other invalid cases
+
+            }
+
+            // regularization
+            // TODO: different measurement may have different way to do regularization
+            val regScore = regularized(size, score, PRIOR_COUNT, PRIOR_CORRELATION)
+
+            regScore
         }
-  
+
     // return score for each pair.
     val simScore1 = similaritiesScore
-        .mapTo(('item, 'item2, 'score) -> (itemField, simItemField, scoreField)) { fields: (String, String, Double) => fields }
-  
+      .mapTo(('item, 'item2, 'score) -> (itemField, simItemField, scoreField)) { fields: (String, String, Double) => fields }
+
     val simScore2 = similaritiesScore
-        .mapTo(('item2, 'item, 'score) -> (itemField, simItemField, scoreField)) { fields: (String, String, Double) => fields }
-  
+      .mapTo(('item2, 'item, 'score) -> (itemField, simItemField, scoreField)) { fields: (String, String, Double) => fields }
+
     // concatenate 2 simScore
     val simScoreCat = simScore1 ++ simScore2
-    
+
     simScoreCat
-  
-  }  
-// *************************
-// * SIMILARITY MEASURES
-// *************************
-  
+
+  }
+  // *************************
+  // * SIMILARITY MEASURES
+  // *************************
+
   /**
    * The correlation between two vectors A, B is
    *   cov(A, B) / (stdDev(A) * stdDev(B))
    *
    * This is equivalent to
-   *   [n * dotProduct(A, B) - sum(A) * sum(B)] / 
+   *   [n * dotProduct(A, B) - sum(A) * sum(B)] /
    *     sqrt{ [n * norm(A)^2 - sum(A)^2] [n * norm(B)^2 - sum(B)^2] }
    */
-  def correlation(size : Double, dotProduct : Double, ratingSum : Double, 
-    rating2Sum : Double, ratingNormSq : Double, rating2NormSq : Double) = {
-      
+  def correlation(size: Double, dotProduct: Double, ratingSum: Double,
+    rating2Sum: Double, ratingNormSq: Double, rating2NormSq: Double) = {
+
     val numerator = size * dotProduct - ratingSum * rating2Sum
     val denominator = scala.math.sqrt(size * ratingNormSq - ratingSum * ratingSum) * scala.math.sqrt(size * rating2NormSq - rating2Sum * rating2Sum)
-    
+
     // NOTE: check if denominator == 0
-    if (denominator == 0) 
+    if (denominator == 0)
       0.0
-    else 
+    else
       numerator / denominator
   }
-  
+
   /**
    * Regularize correlation by adding virtual pseudocounts over a prior:
    *   RegularizedCorrelation = w * ActualCorrelation + (1 - w) * PriorCorrelation
    * where w = # actualPairs / (# actualPairs + # virtualPairs).
    */
-  def regularized(size: Double, score: Double, virtualCount : Double, priorCorrelation : Double): Double = {
+  def regularized(size: Double, score: Double, virtualCount: Double, priorCorrelation: Double): Double = {
     if (virtualCount != 0) {
-       val w = size / (size + virtualCount)
-          
-       w * score + (1 - w) * priorCorrelation
+      val w = size / (size + virtualCount)
+
+      w * score + (1 - w) * priorCorrelation
     } else {
-          
+
       score
     }
   }
-  
+
   /**
    * The cosine similarity between two vectors A, B is
    *   dotProduct(A, B) / (norm(A) * norm(B))
    */
-  def cosineSimilarity(dotProduct : Double, ratingNorm : Double, rating2Norm : Double) = {
+  def cosineSimilarity(dotProduct: Double, ratingNorm: Double, rating2Norm: Double) = {
     dotProduct / (ratingNorm * rating2Norm)
   }
 
@@ -215,8 +215,8 @@
    * The Jaccard Similarity between two sets A, B is
    *   |Intersection(A, B)| / |Union(A, B)|
    */
-  def jaccardSimilarity(usersInCommon : Double, totalUsers1 : Double, totalUsers2 : Double) = {
+  def jaccardSimilarity(usersInCommon: Double, totalUsers1: Double, totalUsers2: Double) = {
     val union = totalUsers1 + totalUsers2 - usersInCommon
     usersInCommon / union
-  }  
+  }
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparatorTest.scala b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparatorTest.scala
index 5d93031..320ea31 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparatorTest.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/DataPreparatorTest.scala
@@ -4,11 +4,11 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.scalding.appdata.{Items, U2iActions}
+import io.prediction.commons.scalding.appdata.{ Items, U2iActions }
 import io.prediction.commons.filepath.DataFile
 
 class DataPreparatorTest extends Specification with TupleConversions {
-  
+
   val Rate = "rate"
   val Like = "like"
   val Dislike = "dislike"
@@ -19,18 +19,18 @@
   val appid = 2
 
   def test(itypes: List[String], params: Map[String, String],
-      items: List[(String, String, String, String, String, String)], // id, itypes, appid, starttime, ct, endtime
-      u2iActions: List[(String, String, String, String, String)],
-      ratings: List[(String, String, Int)], 
-      selectedItems: List[(String, String, String, String)] // id, itypes, starttime, endtime
-      ) = {
-    
+    items: List[(String, String, String, String, String, String)], // id, itypes, appid, starttime, ct, endtime
+    u2iActions: List[(String, String, String, String, String)],
+    ratings: List[(String, String, Int)],
+    selectedItems: List[(String, String, String, String)] // id, itypes, starttime, endtime
+    ) = {
+
     val dbType = "file"
     val dbName = "testpath/"
     val dbHost = None //Option("testhost")
     val dbPort = None //Option(27017)
     val hdfsRoot = "testroot/"
-    
+
     JobTest("io.prediction.algorithms.scalding.itemsim.itemsimcf.DataPreparator")
       .arg("dbType", dbType)
       .arg("dbName", dbName)
@@ -47,8 +47,8 @@
       .arg("conversionParam", params("conversionParam"))
       .arg("conflictParam", params("conflictParam"))
       //.arg("debug", List("test")) // NOTE: test mode
-      .source(Items(appId=appid, itypes=Some(itypes), dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource, items)
-      .source(U2iActions(appId=appid, dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource, u2iActions)
+      .source(Items(appId = appid, itypes = Some(itypes), dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource, items)
+      .source(U2iActions(appId = appid, dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource, u2iActions)
       .sink[(String, String, Int)](Tsv(DataFile(hdfsRoot, 2, 4, 5, None, "ratings.tsv"))) { outputBuffer =>
         "correctly process and write data to ratings.tsv" in {
           outputBuffer.toList must containTheSameElementsAs(ratings)
@@ -61,23 +61,23 @@
       }
       .run
       .finish
-    
+
   }
-  
+
   /** no itypes specified */
   def testWithoutItypes(params: Map[String, String],
-      items: List[(String, String, String, String, String, String)], // id, itypes, appid, starttime, ct, endtime
-      u2iActions: List[(String, String, String, String, String)],
-      ratings: List[(String, String, Int)],
-      selectedItems: List[(String, String, String, String)] // id, itypes, starttime, endtime
-      ) = {
-    
+    items: List[(String, String, String, String, String, String)], // id, itypes, appid, starttime, ct, endtime
+    u2iActions: List[(String, String, String, String, String)],
+    ratings: List[(String, String, Int)],
+    selectedItems: List[(String, String, String, String)] // id, itypes, starttime, endtime
+    ) = {
+
     val dbType = "file"
     val dbName = "testpath/"
     val dbHost = None //Option("testhost")
     val dbPort = None //Option(27017)
     val hdfsRoot = "testroot/"
-    
+
     JobTest("io.prediction.algorithms.scalding.itemsim.itemsimcf.DataPreparator")
       .arg("dbType", dbType)
       .arg("dbName", dbName)
@@ -94,8 +94,8 @@
       .arg("conversionParam", params("conversionParam"))
       .arg("conflictParam", params("conflictParam"))
       //.arg("debug", List("test")) // NOTE: test mode
-      .source(Items(appId=appid, itypes=None, dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource, items)
-      .source(U2iActions(appId=appid, dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource, u2iActions)
+      .source(Items(appId = appid, itypes = None, dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource, items)
+      .source(U2iActions(appId = appid, dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource, u2iActions)
       .sink[(String, String, Int)](Tsv(DataFile(hdfsRoot, 2, 4, 5, None, "ratings.tsv"))) { outputBuffer =>
         "correctly process and write data to ratings.tsv" in {
           outputBuffer.toList must containTheSameElementsAs(ratings)
@@ -108,9 +108,9 @@
       }
       .run
       .finish
-    
+
   }
-  
+
   val noEndtime = "PIO_NONE"
   /**
    * Test 1. basic. Rate actions only without conflicts
@@ -137,32 +137,32 @@
   }
 
   val test1U2i = List(
-      (Rate, "u0", "i0", "123450", "3"), 
-      (Rate, "u0", "i1", "123457", "1"),
-      (Rate, "u0", "i2", "123458", "4"),
-      (Rate, "u0", "i3", "123459", "2"),
-      (Rate, "u1", "i0", "123457", "5"),
-      (Rate, "u1", "i1", "123458", "2"))
-      
-  val test1Ratings = List(      
-      ("u0", "i0", 3), 
-      ("u0", "i1", 1),
-      ("u0", "i2", 4),
-      ("u0", "i3", 2),
-      ("u1", "i0", 5),
-      ("u1", "i1", 2))
-  
+    (Rate, "u0", "i0", "123450", "3"),
+    (Rate, "u0", "i1", "123457", "1"),
+    (Rate, "u0", "i2", "123458", "4"),
+    (Rate, "u0", "i3", "123459", "2"),
+    (Rate, "u1", "i0", "123457", "5"),
+    (Rate, "u1", "i1", "123458", "2"))
+
+  val test1Ratings = List(
+    ("u0", "i0", 3),
+    ("u0", "i1", 1),
+    ("u0", "i2", 4),
+    ("u0", "i3", 2),
+    ("u1", "i0", 5),
+    ("u1", "i1", 2))
+
   val test1Params: Map[String, String] = Map("viewParam" -> "3", "likeParam" -> "4", "dislikeParam" -> "1", "conversionParam" -> "5",
-      "conflictParam" -> "latest") 
-  
+    "conflictParam" -> "latest")
+
   "itemsim.itemsimcf DataPreparator with only rate actions, all itypes, no conflict" should {
     test(test1AllItypes, test1Params, test1Items, test1U2i, test1Ratings, genSelectedItems(test1Items))
   }
-  
+
   "itemsim.itemsimcf DataPreparator with only rate actions, no itypes specified, no conflict" should {
     testWithoutItypes(test1Params, test1Items, test1U2i, test1Ratings, genSelectedItems(test1Items))
   }
-  
+
   /**
    * Test 2. rate actions only with conflicts
    */
@@ -181,82 +181,82 @@
     test2ItemsMap("i3"))
 
   val test2U2i = List(
-      (Rate, "u0", "i0", "123448", "3"),
-      (Rate, "u0", "i0", "123449", "4"), // highest
-      (Rate, "u0", "i0", "123451", "2"), // latest 
-      (Rate, "u0", "i0", "123450", "1"), // lowest
-      
-      (Rate, "u0", "i1", "123456", "1"), // lowest
-      (Rate, "u0", "i1", "123457", "2"),
-      (Rate, "u0", "i1", "123458", "3"), // latest, highest
+    (Rate, "u0", "i0", "123448", "3"),
+    (Rate, "u0", "i0", "123449", "4"), // highest
+    (Rate, "u0", "i0", "123451", "2"), // latest 
+    (Rate, "u0", "i0", "123450", "1"), // lowest
 
-      (Rate, "u0", "i2", "123461", "2"), // latest, lowest
-      (Rate, "u0", "i2", "123459", "3"),
-      (Rate, "u0", "i2", "123460", "5"), // highest
-      
-      (Rate, "u0", "i3", "123459", "2"),
-      (Rate, "u1", "i0", "123457", "5"),
-      
-      (Rate, "u1", "i1", "123458", "3"), // lowest
-      (Rate, "u1", "i1", "123459", "4"), // highest
-      (Rate, "u1", "i1", "123460", "3")) // latest, lowest
-      
+    (Rate, "u0", "i1", "123456", "1"), // lowest
+    (Rate, "u0", "i1", "123457", "2"),
+    (Rate, "u0", "i1", "123458", "3"), // latest, highest
+
+    (Rate, "u0", "i2", "123461", "2"), // latest, lowest
+    (Rate, "u0", "i2", "123459", "3"),
+    (Rate, "u0", "i2", "123460", "5"), // highest
+
+    (Rate, "u0", "i3", "123459", "2"),
+    (Rate, "u1", "i0", "123457", "5"),
+
+    (Rate, "u1", "i1", "123458", "3"), // lowest
+    (Rate, "u1", "i1", "123459", "4"), // highest
+    (Rate, "u1", "i1", "123460", "3")) // latest, lowest
+
   val test2RatingsLatest = List(
-      ("u0", "i0", 2), 
-      ("u0", "i1", 3),
-      ("u0", "i2", 2),
-      ("u0", "i3", 2),
-      ("u1", "i0", 5),
-      ("u1", "i1", 3))
-  
-   val test2RatingsHighest = List(
-      ("u0", "i0", 4), 
-      ("u0", "i1", 3),
-      ("u0", "i2", 5),
-      ("u0", "i3", 2),
-      ("u1", "i0", 5),
-      ("u1", "i1", 4))
+    ("u0", "i0", 2),
+    ("u0", "i1", 3),
+    ("u0", "i2", 2),
+    ("u0", "i3", 2),
+    ("u1", "i0", 5),
+    ("u1", "i1", 3))
 
-   val test2RatingsLowest = List(
-      ("u0", "i0", 1), 
-      ("u0", "i1", 1),
-      ("u0", "i2", 2),
-      ("u0", "i3", 2),
-      ("u1", "i0", 5),
-      ("u1", "i1", 3))
-      
+  val test2RatingsHighest = List(
+    ("u0", "i0", 4),
+    ("u0", "i1", 3),
+    ("u0", "i2", 5),
+    ("u0", "i3", 2),
+    ("u1", "i0", 5),
+    ("u1", "i1", 4))
+
+  val test2RatingsLowest = List(
+    ("u0", "i0", 1),
+    ("u0", "i1", 1),
+    ("u0", "i2", 2),
+    ("u0", "i3", 2),
+    ("u1", "i0", 5),
+    ("u1", "i1", 3))
+
   val test2Itypes_t1t4 = List("t1", "t4")
   val test2Items_t1t4 = List(
     test2ItemsMap("i0"),
     test2ItemsMap("i2"),
     test2ItemsMap("i3"))
   val test2RatingsHighest_t1t4 = List(
-      ("u0", "i0", 4), 
-      ("u0", "i2", 5),
-      ("u0", "i3", 2),
-      ("u1", "i0", 5))
-   
+    ("u0", "i0", 4),
+    ("u0", "i2", 5),
+    ("u0", "i3", 2),
+    ("u1", "i0", 5))
+
   val test2Params: Map[String, String] = Map("viewParam" -> "3", "likeParam" -> "4", "dislikeParam" -> "1", "conversionParam" -> "5",
-      "conflictParam" -> "latest")
+    "conflictParam" -> "latest")
   val test2ParamsHighest = test2Params + ("conflictParam" -> "highest")
   val test2ParamsLowest = test2Params + ("conflictParam" -> "lowest")
-      
+
   "itemsim.itemsimcf DataPreparator with only rate actions, all itypes, conflict=latest" should {
     test(test2AllItypes, test2Params, test2Items, test2U2i, test2RatingsLatest, genSelectedItems(test2Items))
   }
-  
+
   "itemsim.itemsimcf DataPreparator with only rate actions, all itypes, conflict=highest" should {
     test(test2AllItypes, test2ParamsHighest, test2Items, test2U2i, test2RatingsHighest, genSelectedItems(test2Items))
   }
-  
+
   "itemsim.itemsimcf DataPreparator with only rate actions, all itypes, conflict=lowest" should {
     test(test2AllItypes, test2ParamsLowest, test2Items, test2U2i, test2RatingsLowest, genSelectedItems(test2Items))
   }
-  
+
   "itemsim.itemsimcf DataPreparator with only rate actions, some itypes, conflict=highest" should {
     test(test2Itypes_t1t4, test2ParamsHighest, test2Items, test2U2i, test2RatingsHighest_t1t4, genSelectedItems(test2Items_t1t4))
   }
-  
+
   /**
    * Test 3. Different Actions without conflicts
    */
@@ -275,34 +275,34 @@
     test3ItemsMap("i3"))
 
   val test3U2i = List(
-      (Rate, "u0", "i0", "123450", "4"), 
-      (Like, "u0", "i1", "123457", "PIO_NONE"),
-      (Dislike, "u0", "i2", "123458", "PIO_NONE"),
-      (View, "u0", "i3", "123459", "PIO_NONE"), // NOTE: assume v field won't be missing
-      (Rate, "u1", "i0", "123457", "2"),
-      (Conversion, "u1", "i1", "123458", "PIO_NONE"))
-      
-  val test3Ratings = List(      
-      ("u0", "i0", 4), 
-      ("u0", "i1", 4),
-      ("u0", "i2", 2),
-      ("u0", "i3", 1),
-      ("u1", "i0", 2),
-      ("u1", "i1", 5))
-  
+    (Rate, "u0", "i0", "123450", "4"),
+    (Like, "u0", "i1", "123457", "PIO_NONE"),
+    (Dislike, "u0", "i2", "123458", "PIO_NONE"),
+    (View, "u0", "i3", "123459", "PIO_NONE"), // NOTE: assume v field won't be missing
+    (Rate, "u1", "i0", "123457", "2"),
+    (Conversion, "u1", "i1", "123458", "PIO_NONE"))
+
+  val test3Ratings = List(
+    ("u0", "i0", 4),
+    ("u0", "i1", 4),
+    ("u0", "i2", 2),
+    ("u0", "i3", 1),
+    ("u1", "i0", 2),
+    ("u1", "i1", 5))
+
   val test3Params: Map[String, String] = Map("viewParam" -> "1", "likeParam" -> "4", "dislikeParam" -> "2", "conversionParam" -> "5",
-      "conflictParam" -> "latest") 
-  
+    "conflictParam" -> "latest")
+
   "itemsim.itemsimcf DataPreparator with only all actions, all itypes, no conflict" should {
     test(test3AllItypes, test3Params, test3Items, test3U2i, test3Ratings, genSelectedItems(test3Items))
   }
-    
+
   /**
    * test 4. Different Actions with conflicts
    */
   val test4Params: Map[String, String] = Map("viewParam" -> "2", "likeParam" -> "5", "dislikeParam" -> "1", "conversionParam" -> "4",
-      "conflictParam" -> "latest")
-      
+    "conflictParam" -> "latest")
+
   val test4AllItypes = List("t1", "t2", "t3", "t4")
   val test4ItemsMap = Map(
     // id, itypes, appid, starttime, ct, endtime
@@ -318,34 +318,34 @@
     test4ItemsMap("i3"))
 
   val test4U2i = List(
-      (Rate, "u0", "i0", "123448", "3"),
-      (View, "u0", "i0", "123449", "PIO_NONE"), // lowest (2)
-      (Like, "u0", "i0", "123451", "PIO_NONE"), // latest, highest (5)
-      (Conversion, "u0", "i0", "123450", "PIO_NONE"), 
-      
-      (Rate, "u0", "i1", "123456", "1"), // lowest
-      (Rate, "u0", "i1", "123457", "4"), // highest
-      (View, "u0", "i1", "123458", "PIO_NONE"), // latest (2)
+    (Rate, "u0", "i0", "123448", "3"),
+    (View, "u0", "i0", "123449", "PIO_NONE"), // lowest (2)
+    (Like, "u0", "i0", "123451", "PIO_NONE"), // latest, highest (5)
+    (Conversion, "u0", "i0", "123450", "PIO_NONE"),
 
-      (Conversion, "u0", "i2", "123461", "PIO_NONE"), // latest, highest  (4)
-      (Rate, "u0", "i2", "123459", "3"),
-      (View, "u0", "i2", "123460", "PIO_NONE"), // lowest
-      
-      (Rate, "u0", "i3", "123459", "2"),
-      (View, "u1", "i0", "123457", "PIO_NONE"), // (2)
-      
-      (Rate, "u1", "i1", "123458", "5"), // highest
-      (Conversion, "u1", "i1", "123459", "PIO_NONE"), // (4)
-      (Dislike, "u1", "i1", "123460", "PIO_NONE")) // latest, lowest (1)
-      
+    (Rate, "u0", "i1", "123456", "1"), // lowest
+    (Rate, "u0", "i1", "123457", "4"), // highest
+    (View, "u0", "i1", "123458", "PIO_NONE"), // latest (2)
+
+    (Conversion, "u0", "i2", "123461", "PIO_NONE"), // latest, highest  (4)
+    (Rate, "u0", "i2", "123459", "3"),
+    (View, "u0", "i2", "123460", "PIO_NONE"), // lowest
+
+    (Rate, "u0", "i3", "123459", "2"),
+    (View, "u1", "i0", "123457", "PIO_NONE"), // (2)
+
+    (Rate, "u1", "i1", "123458", "5"), // highest
+    (Conversion, "u1", "i1", "123459", "PIO_NONE"), // (4)
+    (Dislike, "u1", "i1", "123460", "PIO_NONE")) // latest, lowest (1)
+
   val test4RatingsLatest = List(
-      ("u0", "i0", 5), 
-      ("u0", "i1", 2),
-      ("u0", "i2", 4),
-      ("u0", "i3", 2),
-      ("u1", "i0", 2),
-      ("u1", "i1", 1))
-  
+    ("u0", "i0", 5),
+    ("u0", "i1", 2),
+    ("u0", "i2", 4),
+    ("u0", "i3", 2),
+    ("u1", "i0", 2),
+    ("u1", "i1", 1))
+
   "itemsim.itemsimcf DataPreparator with all actions, all itypes, and conflicts=latest" should {
     test(test4AllItypes, test4Params, test4Items, test4U2i, test4RatingsLatest, genSelectedItems(test4Items))
   }
@@ -353,11 +353,11 @@
   val test4ParamsIgnoreView = test4Params + ("viewParam" -> "ignore")
 
   val test4RatingsIgnoreViewLatest = List(
-      ("u0", "i0", 5), 
-      ("u0", "i1", 4),
-      ("u0", "i2", 4),
-      ("u0", "i3", 2),
-      ("u1", "i1", 1))
+    ("u0", "i0", 5),
+    ("u0", "i1", 4),
+    ("u0", "i2", 4),
+    ("u0", "i3", 2),
+    ("u1", "i1", 1))
 
   "itemsim.itemsimcf DataPreparator with all actions, all itypes, ignore View actions and conflicts=latest" should {
     test(test4AllItypes, test4ParamsIgnoreView, test4Items, test4U2i, test4RatingsIgnoreViewLatest, genSelectedItems(test4Items))
@@ -367,12 +367,12 @@
   val test4ParamsIgnoreAllExceptView = test4Params + ("viewParam" -> "1", "likeParam" -> "ignore", "dislikeParam" -> "ignore", "conversionParam" -> "ignore")
 
   val test4RatingsIgnoreAllExceptViewLatest = List(
-      ("u0", "i0", 1), 
-      ("u0", "i1", 1),
-      ("u0", "i2", 1),
-      ("u0", "i3", 2),
-      ("u1", "i0", 1),
-      ("u1", "i1", 5))
+    ("u0", "i0", 1),
+    ("u0", "i1", 1),
+    ("u0", "i2", 1),
+    ("u0", "i3", 2),
+    ("u1", "i0", 1),
+    ("u1", "i1", 5))
 
   "itemsim.itemsimcf DataPreparator with all actions, all itypes, ignore all actions except View (and Rate) and conflicts=latest" should {
     test(test4AllItypes, test4ParamsIgnoreAllExceptView, test4Items, test4U2i, test4RatingsIgnoreAllExceptViewLatest, genSelectedItems(test4Items))
@@ -382,11 +382,11 @@
   val test4ParamsIgnoreAll = test4Params + ("viewParam" -> "ignore", "likeParam" -> "ignore", "dislikeParam" -> "ignore", "conversionParam" -> "ignore")
 
   val test4RatingsIgnoreAllLatest = List(
-      ("u0", "i0", 3), 
-      ("u0", "i1", 4),
-      ("u0", "i2", 3),
-      ("u0", "i3", 2),
-      ("u1", "i1", 5))
+    ("u0", "i0", 3),
+    ("u0", "i1", 4),
+    ("u0", "i2", 3),
+    ("u0", "i3", 2),
+    ("u1", "i1", 5))
 
   "itemsim.itemsimcf DataPreparator with all actions, all itypes, ignore all actions (except Rate) and conflicts=latest" should {
     test(test4AllItypes, test4ParamsIgnoreAll, test4Items, test4U2i, test4RatingsIgnoreAllLatest, genSelectedItems(test4Items))
@@ -401,16 +401,14 @@
     test4ItemsMap("i3"))
 
   val test4RatingsLowest_t3 = List(
-      ("u0", "i0", 2), 
-      ("u0", "i1", 1),
-      ("u0", "i3", 2),
-      ("u1", "i0", 2),
-      ("u1", "i1", 1))
-        
+    ("u0", "i0", 2),
+    ("u0", "i1", 1),
+    ("u0", "i3", 2),
+    ("u1", "i0", 2),
+    ("u1", "i1", 1))
+
   "itemsim.itemsimcf DataPreparator with all actions, some itypes, and conflicts=lowest" should {
     test(test4Itypes_t3, test4ParamsLowest, test4Items, test4U2i, test4RatingsLowest_t3, genSelectedItems(test4Items_t3))
   }
 
-
 }
- 
\ No newline at end of file
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarityTest.scala b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarityTest.scala
index 948582d..500106c 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarityTest.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ItemSimilarityTest.scala
@@ -4,29 +4,27 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.filepath.{DataFile, AlgoFile}
+import io.prediction.commons.filepath.{ DataFile, AlgoFile }
 
 class ItemSimilarityTest extends Specification with TupleConversions {
-  
+
   // helper function
   // only compare double up to 9 decimal places
   def roundingData(orgList: List[(String, String, Double)]) = {
     orgList map { x =>
       val (t1, t2, t3) = x
-      
+
       // NOTE: use HALF_UP mode to avoid error caused by rounding when compare data
       // (eg. 3.5 vs 3.499999999999).
       // (eg. 0.6666666666 vs 0.666666667)
-      
+
       (t1, t2, BigDecimal(t3).setScale(9, BigDecimal.RoundingMode.HALF_UP).toDouble)
     }
   }
 
-
   def test(testArgs: Map[String, String],
     testInput: List[(String, String, Int)],
-    testOutput: List[(String, String, Double)]
-    ) = {
+    testOutput: List[(String, String, Double)]) = {
 
     val hdfsRoot = "testroot/"
 
@@ -43,173 +41,173 @@
       arg("minIntersectionParam", testArgs("minIntersectionParam")).
       arg("numSimilarItems", testArgs("numSimilarItems")).
       source(Tsv(DataFile(hdfsRoot, 8, 2, 3, None, "ratings.tsv")), testInput).
-       sink[(String, String, Double)](Tsv(AlgoFile(hdfsRoot, 8, 2, 3, None, "itemSimScores.tsv"))) { outputBuffer =>
-         "correctly calculate similarity score" in {
-           roundingData(outputBuffer.toList) must containTheSameElementsAs(roundingData(testOutput))
-         }
-       }
+      sink[(String, String, Double)](Tsv(AlgoFile(hdfsRoot, 8, 2, 3, None, "itemSimScores.tsv"))) { outputBuffer =>
+        "correctly calculate similarity score" in {
+          roundingData(outputBuffer.toList) must containTheSameElementsAs(roundingData(testOutput))
+        }
+      }
       .run
-      .finish  
+      .finish
   }
 
   // simple test1
   val test1args = Map[String, String]("measureParam" -> "correl",
-      "priorCountParam" -> "10",
-      "priorCorrelParam" -> "0",
-      "minNumRatersParam" -> "1",
-      "maxNumRatersParam" -> "10000",
-      "minIntersectionParam" -> "1",
-      "numSimilarItems" -> "500"
+    "priorCountParam" -> "10",
+    "priorCorrelParam" -> "0",
+    "minNumRatersParam" -> "1",
+    "maxNumRatersParam" -> "10000",
+    "minIntersectionParam" -> "1",
+    "numSimilarItems" -> "500"
   )
-  
+
   val test1Input = List(
-      ("u0","i0",1),
-      ("u0","i1",2),
-      ("u0","i2",3),
-      ("u1","i1",4),
-      ("u1","i2",4),
-      ("u1","i3",2),
-      ("u2","i0",3),
-      ("u2","i1",2),
-      ("u2","i3",1),
-      ("u3","i0",2),
-      ("u3","i2",1),
-      ("u3","i3",5))
-      
+    ("u0", "i0", 1),
+    ("u0", "i1", 2),
+    ("u0", "i2", 3),
+    ("u1", "i1", 4),
+    ("u1", "i2", 4),
+    ("u1", "i3", 2),
+    ("u2", "i0", 3),
+    ("u2", "i1", 2),
+    ("u2", "i3", 1),
+    ("u3", "i0", 2),
+    ("u3", "i2", 1),
+    ("u3", "i3", 5))
+
   val test1Output = List(
-      ("i0","i1",0.0),
-      ("i1","i0",0.0),
-      ("i0","i2",-0.16666666666666666),
-      ("i2","i0",-0.16666666666666666),
-      ("i0","i3",-0.16666666666666666),
-      ("i3","i0",-0.16666666666666666),
-      ("i1","i2",0.16666666666666666),
-      ("i2","i1",0.16666666666666666),
-      ("i1","i3",0.16666666666666666),
-      ("i3","i1",0.16666666666666666),
-      ("i2","i3",-0.16666666666666666),
-      ("i3","i2",-0.16666666666666666))
-  
+    ("i0", "i1", 0.0),
+    ("i1", "i0", 0.0),
+    ("i0", "i2", -0.16666666666666666),
+    ("i2", "i0", -0.16666666666666666),
+    ("i0", "i3", -0.16666666666666666),
+    ("i3", "i0", -0.16666666666666666),
+    ("i1", "i2", 0.16666666666666666),
+    ("i2", "i1", 0.16666666666666666),
+    ("i1", "i3", 0.16666666666666666),
+    ("i3", "i1", 0.16666666666666666),
+    ("i2", "i3", -0.16666666666666666),
+    ("i3", "i2", -0.16666666666666666))
+
   val hdfsRoot = "testroot/"
-  
+
   "ItemSimilarity Correlation" should {
-    test(test1args, test1Input, test1Output)   
+    test(test1args, test1Input, test1Output)
   }
-  
+
   // simple test2
   val test2args = Map[String, String]("measureParam" -> "correl",
-      "priorCountParam" -> "20",
-      "priorCorrelParam" -> "0.5",
-      "minNumRatersParam" -> "1",
-      "maxNumRatersParam" -> "10000",
-      "minIntersectionParam" -> "1",
-      "numSimilarItems" -> "500"
+    "priorCountParam" -> "20",
+    "priorCorrelParam" -> "0.5",
+    "minNumRatersParam" -> "1",
+    "maxNumRatersParam" -> "10000",
+    "minIntersectionParam" -> "1",
+    "numSimilarItems" -> "500"
   )
-  
+
   val test2Input = List(
-      ("u0","i0",1),
-      ("u0","i1",2),
-      ("u0","i2",3),
-      ("u1","i1",4),
-      ("u1","i2",4),
-      ("u1","i3",2),
-      ("u2","i0",3),
-      ("u2","i1",2),
-      ("u2","i3",1),
-      ("u3","i0",2),
-      ("u3","i2",1),
-      ("u3","i3",5))
+    ("u0", "i0", 1),
+    ("u0", "i1", 2),
+    ("u0", "i2", 3),
+    ("u1", "i1", 4),
+    ("u1", "i2", 4),
+    ("u1", "i3", 2),
+    ("u2", "i0", 3),
+    ("u2", "i1", 2),
+    ("u2", "i3", 1),
+    ("u3", "i0", 2),
+    ("u3", "i2", 1),
+    ("u3", "i3", 5))
 
   val test2Output = List(
-      ("i0","i1",0.454545454545454),
-      ("i1","i0",0.454545454545454),
-      ("i0","i2",0.363636363636364),
-      ("i2","i0",0.363636363636364),
-      ("i0","i3",0.363636363636364),
-      ("i3","i0",0.363636363636364),
-      ("i1","i2",0.545454545454545),
-      ("i2","i1",0.545454545454545),
-      ("i1","i3",0.545454545454545),
-      ("i3","i1",0.545454545454545),
-      ("i2","i3",0.363636363636364),
-      ("i3","i2",0.363636363636364))
-  
+    ("i0", "i1", 0.454545454545454),
+    ("i1", "i0", 0.454545454545454),
+    ("i0", "i2", 0.363636363636364),
+    ("i2", "i0", 0.363636363636364),
+    ("i0", "i3", 0.363636363636364),
+    ("i3", "i0", 0.363636363636364),
+    ("i1", "i2", 0.545454545454545),
+    ("i2", "i1", 0.545454545454545),
+    ("i1", "i3", 0.545454545454545),
+    ("i3", "i1", 0.545454545454545),
+    ("i2", "i3", 0.363636363636364),
+    ("i3", "i2", 0.363636363636364))
+
   "ItemSimilarity Correlation with different regularization" should {
-    test(test2args, test2Input, test2Output)   
+    test(test2args, test2Input, test2Output)
   }
-  
+
   // simple test3
   val test3args = Map[String, String]("measureParam" -> "cosine",
-      "priorCountParam" -> "0",
-      "priorCorrelParam" -> "0",
-      "minNumRatersParam" -> "1",
-      "maxNumRatersParam" -> "10000",
-      "minIntersectionParam" -> "1",
-      "numSimilarItems" -> "500"
+    "priorCountParam" -> "0",
+    "priorCorrelParam" -> "0",
+    "minNumRatersParam" -> "1",
+    "maxNumRatersParam" -> "10000",
+    "minIntersectionParam" -> "1",
+    "numSimilarItems" -> "500"
   )
-  
+
   val test3Input = List(
-      ("u0","i0",1),
-      ("u0","i1",2),
-      ("u0","i2",3),
-      ("u1","i1",4),
-      ("u1","i2",4),
-      ("u1","i3",2),
-      ("u2","i0",3),
-      ("u2","i1",2),
-      ("u2","i3",1),
-      ("u3","i0",2),
-      ("u3","i2",1),
-      ("u3","i3",5))
+    ("u0", "i0", 1),
+    ("u0", "i1", 2),
+    ("u0", "i2", 3),
+    ("u1", "i1", 4),
+    ("u1", "i2", 4),
+    ("u1", "i3", 2),
+    ("u2", "i0", 3),
+    ("u2", "i1", 2),
+    ("u2", "i3", 1),
+    ("u3", "i0", 2),
+    ("u3", "i2", 1),
+    ("u3", "i3", 5))
 
   val test3Output = List[(String, String, Double)](
-      ("i0","i1",0.894427190999916),
-      ("i1","i0",0.894427190999916),
-      ("i0","i2",0.707106781186548),
-      ("i2","i0",0.707106781186548),
-      ("i0","i3",0.707106781186548),
-      ("i3","i0",0.707106781186548),
-      ("i1","i2",0.983869910099907),
-      ("i2","i1",0.983869910099907),
-      ("i1","i3",1.0), // NOTE: (use HALF_UP to work around 1.0 vs 0.999999999)
-      ("i3","i1",1.0),
-      ("i2","i3",0.585490553844358),
-      ("i3","i2",0.585490553844358))
-  
+    ("i0", "i1", 0.894427190999916),
+    ("i1", "i0", 0.894427190999916),
+    ("i0", "i2", 0.707106781186548),
+    ("i2", "i0", 0.707106781186548),
+    ("i0", "i3", 0.707106781186548),
+    ("i3", "i0", 0.707106781186548),
+    ("i1", "i2", 0.983869910099907),
+    ("i2", "i1", 0.983869910099907),
+    ("i1", "i3", 1.0), // NOTE: (use HALF_UP to work around 1.0 vs 0.999999999)
+    ("i3", "i1", 1.0),
+    ("i2", "i3", 0.585490553844358),
+    ("i3", "i2", 0.585490553844358))
+
   "ItemSimilarity Cosine" should {
-    test(test3args, test3Input, test3Output)   
+    test(test3args, test3Input, test3Output)
   }
 
   // test4 - test numSimilarItems smaller than existing
   val test4args = Map[String, String]("measureParam" -> "cosine",
-      "priorCountParam" -> "0",
-      "priorCorrelParam" -> "0",
-      "minNumRatersParam" -> "1",
-      "maxNumRatersParam" -> "10000",
-      "minIntersectionParam" -> "1",
-      "numSimilarItems" -> "1"
+    "priorCountParam" -> "0",
+    "priorCorrelParam" -> "0",
+    "minNumRatersParam" -> "1",
+    "maxNumRatersParam" -> "10000",
+    "minIntersectionParam" -> "1",
+    "numSimilarItems" -> "1"
   )
-  
+
   val test4Input = List(
-      ("u0","i0",1),
-      ("u0","i1",2),
-      ("u0","i2",3),
-      ("u1","i1",4),
-      ("u1","i2",4),
-      ("u1","i3",2),
-      ("u2","i0",3),
-      ("u2","i1",2),
-      ("u2","i3",1),
-      ("u3","i0",2),
-      ("u3","i2",1),
-      ("u3","i3",5))
+    ("u0", "i0", 1),
+    ("u0", "i1", 2),
+    ("u0", "i2", 3),
+    ("u1", "i1", 4),
+    ("u1", "i2", 4),
+    ("u1", "i3", 2),
+    ("u2", "i0", 3),
+    ("u2", "i1", 2),
+    ("u2", "i3", 1),
+    ("u3", "i0", 2),
+    ("u3", "i2", 1),
+    ("u3", "i3", 5))
 
   val test4Output = List[(String, String, Double)](
-      ("i0","i1",0.894427190999916),
-      ("i2","i1",0.983869910099907),
-      ("i1","i3",1.0), // NOTE: (use HALF_UP to work around 1.0 vs 0.999999999)
-      ("i3","i1",1.0))
-      
+    ("i0", "i1", 0.894427190999916),
+    ("i2", "i1", 0.983869910099907),
+    ("i1", "i3", 1.0), // NOTE: (use HALF_UP to work around 1.0 vs 0.999999999)
+    ("i3", "i1", 1.0))
+
   "ItemSimilarity Cosine with smaller numSimilarItems" should {
     test(test4args, test4Input, test4Output)
   }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructorTest.scala b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructorTest.scala
index c5dfbe9..88427ec 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructorTest.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/itemsimcf/src/test/scala/io/prediction/algorithms/scalding/itemsim/itemsimcf/ModelConstructorTest.scala
@@ -4,7 +4,7 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.filepath.{AlgoFile, DataFile}
+import io.prediction.commons.filepath.{ AlgoFile, DataFile }
 import io.prediction.commons.scalding.modeldata.ItemSimScores
 
 class ModelConstructorTest extends Specification with TupleConversions {
@@ -39,13 +39,13 @@
       //.arg("debug", "test") // NOTE: test mode
       .source(Tsv(AlgoFile(hdfsRoot, appid, engineid, algoid, None, "itemSimScores.tsv")), itemSimScores)
       .source(Tsv(DataFile(hdfsRoot, appid, engineid, algoid, None, "selectedItems.tsv")), items)
-      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort, algoid=algoid, modelset=modelSet).getSource) { outputBuffer =>
+      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort, algoid = algoid, modelset = modelSet).getSource) { outputBuffer =>
         "correctly write model data to a file" in {
           outputBuffer.toList must containTheSameElementsAs(outputItemSimScores)
         }
-    }
-    .run
-    .finish
+      }
+      .run
+      .finish
   }
 
   val largeNumber = 1234567890 // larger than any item starttime
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRank.scala b/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRank.scala
index f22fbfc..7574a44 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRank.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRank.scala
@@ -2,9 +2,9 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.scalding.appdata.{Items, Users}
+import io.prediction.commons.scalding.appdata.{ Items, Users }
 import io.prediction.commons.scalding.modeldata.ItemSimScores
-import io.prediction.commons.filepath.{AlgoFile}
+import io.prediction.commons.filepath.{ AlgoFile }
 
 /**
  * Source:
@@ -81,12 +81,12 @@
 
   // get items data
   val latestItems = Items(
-    appId=trainingAppid,
-    itypes=itypesArg,
-    dbType=training_dbTypeArg,
-    dbName=training_dbNameArg,
-    dbHost=training_dbHostArg,
-    dbPort=training_dbPortArg)
+    appId = trainingAppid,
+    itypes = itypesArg,
+    dbType = training_dbTypeArg,
+    dbName = training_dbNameArg,
+    dbHost = training_dbHostArg,
+    dbPort = training_dbPortArg)
     .readStartEndtime('iidx, 'itypes, 'starttime, 'endtime)
     .filter('starttime, 'endtime) { fields: (Long, Option[Long]) =>
       // only keep items with valid starttime and endtime
@@ -106,24 +106,24 @@
     .groupBy('iidx) { _.sortBy('score).reverse.take(numSimilarItemsArg + 1) }
 
   val items = Items(
-    appId=trainingAppid,
-    itypes=None,
-    dbType=training_dbTypeArg,
-    dbName=training_dbNameArg,
-    dbHost=training_dbHostArg,
-    dbPort=training_dbPortArg)
+    appId = trainingAppid,
+    itypes = None,
+    dbType = training_dbTypeArg,
+    dbName = training_dbNameArg,
+    dbHost = training_dbHostArg,
+    dbPort = training_dbPortArg)
     .readData('iid, 'itypesx)
 
   /**
    * sink
    */
   val itemSimScores = ItemSimScores(
-    dbType=modeldata_dbTypeArg,
-    dbName=modeldata_dbNameArg,
-    dbHost=modeldata_dbHostArg,
-    dbPort=modeldata_dbPortArg,
-    algoid=algoidArg,
-    modelset=modelSetArg)
+    dbType = modeldata_dbTypeArg,
+    dbName = modeldata_dbNameArg,
+    dbHost = modeldata_dbHostArg,
+    dbPort = modeldata_dbPortArg,
+    algoid = algoidArg,
+    modelset = modelSetArg)
 
   /**
    * computation
@@ -132,5 +132,5 @@
     .filter('iid, 'iidx) { fields: (String, String) => fields._1 != fields._2 }
     .groupBy('iid) { _.sortBy('score).reverse.take(numSimilarItemsArg) }
     .groupBy('iid) { _.sortBy('score).reverse.toList[(String, Double, List[String])](('iidx, 'score, 'itypes) -> 'simiidsList) }
-    .then ( itemSimScores.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _ )
+    .then(itemSimScores.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _)
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRankTest.scala b/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRankTest.scala
index 4b542b3..23dff1e 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRankTest.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/latestrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/latestrank/LatestRankTest.scala
@@ -5,8 +5,8 @@
 import com.twitter.scalding._
 
 import io.prediction.commons.scalding.appdata.Items
-import io.prediction.commons.scalding.modeldata.{ItemSimScores}
-import io.prediction.commons.filepath.{AlgoFile}
+import io.prediction.commons.scalding.modeldata.{ ItemSimScores }
+import io.prediction.commons.filepath.{ AlgoFile }
 
 class LatestRankTest extends Specification with TupleConversions {
   def test(
@@ -42,8 +42,8 @@
       .arg("numSimilarItems", numSimilarItems.toString)
       .arg("modelSet", modelSet.toString)
       .arg("recommendationTime", recommendationTime.toString)
-      .source(Items(appId=appid, itypes=Some(itypes), dbType=training_dbType, dbName=training_dbName, dbHost=None, dbPort=None).getSource, items)
-      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType=modeldata_dbType, dbName=modeldata_dbName, dbHost=None, dbPort=None, algoid=algoid, modelset=modelSet).getSource) { outputBuffer =>
+      .source(Items(appId = appid, itypes = Some(itypes), dbType = training_dbType, dbName = training_dbName, dbHost = None, dbPort = None).getSource, items)
+      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType = modeldata_dbType, dbName = modeldata_dbName, dbHost = None, dbPort = None, algoid = algoid, modelset = modelSet).getSource) { outputBuffer =>
         "correctly write ItemSimScores" in {
           val outputList = outputBuffer.toList
 
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparator.scala b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparator.scala
index 0e1289a..4f09de8 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparator.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparator.scala
@@ -2,7 +2,7 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.scalding.appdata.{Users, Items, U2iActions}
+import io.prediction.commons.scalding.appdata.{ Users, Items, U2iActions }
 import io.prediction.commons.filepath.DataFile
 
 /**
@@ -16,9 +16,9 @@
  * Required args:
  * --dbType: <string> (eg. mongodb) (see --dbHost, --dbPort)
  * --dbName: <string> appdata database name. (eg predictionio_appdata, or predictionio_training_appdata)
- * 
+ *
  * --hdfsRoot: <string>. Root directory of the HDFS
- * 
+ *
  * --appid: <int>
  * --engineid: <int>
  * --algoid: <int>
@@ -28,17 +28,17 @@
  * --dislikeParam: <string>
  * --conversionParam: <string>
  * --conflictParam: <string>. (latest/highest/lowest)
- * 
+ *
  * Optional args:
  * --dbHost: <string> (eg. "127.0.0.1")
  * --dbPort: <int> (eg. 27017)
- * 
+ *
  * --itypes: <string separated by white space>. eg "--itypes type1 type2". If no --itypes specified, then ALL itypes will be used.
  * --evalid: <int>. Offline Evaluation if evalid is specified
  * --debug: <String>. "test" - for testing purpose
- * 
+ *
  * Example:
- * 
+ *
  */
 class DataPreparatorCommon(args: Args) extends Job(args) {
   /**
@@ -48,18 +48,18 @@
   val dbNameArg = args("dbName")
   val dbHostArg = args.optional("dbHost")
   val dbPortArg = args.optional("dbPort") map (x => x.toInt) // becomes Option[Int]
-  
+
   val hdfsRootArg = args("hdfsRoot")
-  
+
   val appidArg = args("appid").toInt
   val engineidArg = args("engineid").toInt
   val algoidArg = args("algoid").toInt
   val evalidArg = args.optional("evalid") map (x => x.toInt)
   val OFFLINE_EVAL = (evalidArg != None) // offline eval mode
-  
+
   val preItypesArg = args.list("itypes")
   val itypesArg: Option[List[String]] = if (preItypesArg.mkString(",").length == 0) None else Option(preItypesArg)
-  
+
   // determine how to map actions to rating values
   def getActionParam(name: String): Option[Int] = {
     val actionParam: Option[Int] = args(name) match {
@@ -73,24 +73,24 @@
   val likeParamArg: Option[Int] = getActionParam("likeParam")
   val dislikeParamArg: Option[Int] = getActionParam("dislikeParam")
   val conversionParamArg: Option[Int] = getActionParam("conversionParam")
-  
+
   // When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, 
   // determine which action will be considered as final preference.
   final val CONFLICT_LATEST: String = "latest" // use latest action
   final val CONFLICT_HIGHEST: String = "highest" // use the one with highest score
   final val CONFLICT_LOWEST: String = "lowest" // use the one with lowest score
-  
+
   val conflictParamArg: String = args("conflictParam")
 
   // check if the conflictParam is valid
-  require(List(CONFLICT_LATEST, CONFLICT_HIGHEST, CONFLICT_LOWEST).contains(conflictParamArg), "conflict param " +conflictParamArg +" is not valid.")
+  require(List(CONFLICT_LATEST, CONFLICT_HIGHEST, CONFLICT_LOWEST).contains(conflictParamArg), "conflict param " + conflictParamArg + " is not valid.")
 
   val debugArg = args.list("debug")
   val DEBUG_TEST = debugArg.contains("test") // test mode
 
   // NOTE: if OFFLINE_EVAL, read from training set, and use evalid as appid when read Items and U2iActions
-  val trainingAppid = if (OFFLINE_EVAL) evalidArg.get else appidArg 
-  
+  val trainingAppid = if (OFFLINE_EVAL) evalidArg.get else appidArg
+
 }
 
 class DataCopy(args: Args) extends DataPreparatorCommon(args) {
@@ -99,11 +99,11 @@
    * source
    */
 
-  val items = Items(appId=trainingAppid, itypes=itypesArg, 
-      dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg).readStartEndtime('iidx, 'itypes, 'starttime, 'endtime)
-  
-  val users = Users(appId=trainingAppid,
-      dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg).readData('uid)
+  val items = Items(appId = trainingAppid, itypes = itypesArg,
+    dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg).readStartEndtime('iidx, 'itypes, 'starttime, 'endtime)
+
+  val users = Users(appId = trainingAppid,
+    dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg).readData('uid)
 
   /**
    * sink
@@ -111,7 +111,7 @@
   val userIdSink = Tsv(DataFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "userIds.tsv"))
 
   val selectedItemSink = Tsv(DataFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "selectedItems.tsv"))
-  
+
   /**
    * computation
    */
@@ -120,7 +120,7 @@
 
   items.mapTo(('iidx, 'itypes, 'starttime, 'endtime) -> ('iidx, 'itypes, 'starttime, 'endtime)) { fields: (String, List[String], Long, Option[Long]) =>
     val (iidx, itypes, starttime, endtime) = fields
-    
+
     // NOTE: convert List[String] into comma-separated String
     // NOTE: endtime is optional
     (iidx, itypes.mkString(","), starttime, endtime.map(_.toString).getOrElse("PIO_NONE"))
@@ -143,9 +143,9 @@
   /**
    * source
    */
-  
-  val u2i = U2iActions(appId=trainingAppid, 
-      dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg).readData('action, 'uid, 'iid, 't, 'v)
+
+  val u2i = U2iActions(appId = trainingAppid,
+    dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg).readData('action, 'uid, 'iid, 't, 'v)
 
   // use byte offset as index for Mahout algo
   val itemsIndex = TextLine(DataFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "selectedItems.tsv")).read
@@ -155,14 +155,14 @@
       val lineArray = line.split("\t")
 
       val (iidx, itypes, starttime, endtime) = try {
-          (lineArray(0), lineArray(1), lineArray(2), lineArray(3))
-        } catch {
-          case e: Exception => {
-            assert(false, "Failed to extract iidx and itypes from the line: " + line + ". Exception: " + e)
-            (0, "dummy", "dummy", "dummy")
-          }
+        (lineArray(0), lineArray(1), lineArray(2), lineArray(3))
+      } catch {
+        case e: Exception => {
+          assert(false, "Failed to extract iidx and itypes from the line: " + line + ". Exception: " + e)
+          (0, "dummy", "dummy", "dummy")
         }
-        
+      }
+
       (offset, iidx, itypes, starttime, endtime)
     }
 
@@ -207,7 +207,7 @@
     }
     .map(('action, 'v, 't) -> ('rating, 'tLong)) { fields: (String, Option[String], String) =>
       val (action, v, t) = fields
-      
+
       // convert actions into rating value based on "action" and "v" fields
       val rating: Int = action match {
         case ACTION_RATE => try {
@@ -218,19 +218,19 @@
             1
           }
         }
-        case ACTION_LIKE => likeParamArg.getOrElse{
+        case ACTION_LIKE => likeParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
-        case ACTION_DISLIKE => dislikeParamArg.getOrElse{
+        case ACTION_DISLIKE => dislikeParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
-        case ACTION_VIEW => viewParamArg.getOrElse{
+        case ACTION_VIEW => viewParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
-        case ACTION_CONVERSION => conversionParamArg.getOrElse{
+        case ACTION_CONVERSION => conversionParamArg.getOrElse {
           assert(false, "Action type " + action + " should have been filtered out!")
           1
         }
@@ -239,27 +239,27 @@
           1
         }
       }
-      
+
       (rating, t.toLong)
-    } 
-    .then( resolveConflict('uid, 'iid, 'tLong, 'rating, conflictParamArg) _ )
+    }
+    .then(resolveConflict('uid, 'iid, 'tLong, 'rating, conflictParamArg) _)
     .joinWithSmaller('uid -> 'uidx, usersIndex)
     .project('uindex, 'iindex, 'rating)
     .write(ratingsSink) // write ratings to a file
- 
+
   /**
    * function to resolve conflicting actions of same uid-iid pair.
    */
   def resolveConflict(uidField: Symbol, iidField: Symbol, tfield: Symbol, ratingField: Symbol, conflictSolution: String)(p: RichPipe): RichPipe = {
- 
+
     // NOTE: sortBy() sort from smallest to largest. use reverse to pick the largest one.
     val dataPipe = conflictSolution match {
       case CONFLICT_LATEST => p.groupBy(uidField, iidField) { _.sortBy(tfield).reverse.take(1) } // take latest one (largest t)
       case CONFLICT_HIGHEST => p.groupBy(uidField, iidField) { _.sortBy(ratingField).reverse.take(1) } // take highest rating
       case CONFLICT_LOWEST => p.groupBy(uidField, iidField) { _.sortBy(ratingField).take(1) } // take lowest rating
     }
-   
+
     dataPipe
   }
-  
+
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructor.scala b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructor.scala
index 16bb57e..4ccb879 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructor.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/main/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructor.scala
@@ -2,22 +2,22 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.filepath.{DataFile, AlgoFile}
+import io.prediction.commons.filepath.{ DataFile, AlgoFile }
 import io.prediction.commons.scalding.modeldata.ItemSimScores
 
 /**
  * Source:
  *
- * Sink: 
- * 
+ * Sink:
+ *
  * Description:
- *   
+ *
  * Required args:
  * --dbType: <string> modeldata DB type (eg. mongodb) (see --dbHost, --dbPort)
  * --dbName: <string> (eg. predictionio_modeldata)
- * 
+ *
  * --hdfsRoot: <string>. Root directory of the HDFS
- * 
+ *
  * --appid: <int>
  * --engineid: <int>
  * --algoid: <int>
@@ -29,15 +29,15 @@
  * Optionsl args:
  * --dbHost: <string> (eg. "127.0.0.1")
  * --dbPort: <int> (eg. 27017)
- * 
+ *
  * --evalid: <int>. Offline Evaluation if evalid is specified
  * --debug: <String>. "test" - for testing purpose
- * 
+ *
  * Example:
  *
  */
 class ModelConstructor(args: Args) extends Job(args) {
-  
+
   /**
    * parse args
    */
@@ -45,20 +45,20 @@
   val dbNameArg = args("dbName")
   val dbHostArg = args.optional("dbHost")
   val dbPortArg = args.optional("dbPort") map (x => x.toInt)
-  
+
   val hdfsRootArg = args("hdfsRoot")
-  
+
   val appidArg = args("appid").toInt
   val engineidArg = args("engineid").toInt
   val algoidArg = args("algoid").toInt
   val evalidArg = args.optional("evalid") map (x => x.toInt)
   val OFFLINE_EVAL = (evalidArg != None) // offline eval mode
-    
+
   val debugArg = args.list("debug")
   val DEBUG_TEST = debugArg.contains("test") // test mode
-  
+
   val modelSetArg = args("modelSet").toBoolean
-  
+
   val numSimilarItems = args("numSimilarItems").toInt
   val recommendationTimeArg = args("recommendationTime").toLong
 
@@ -66,7 +66,7 @@
    * source
    */
   val similarities = Tsv(AlgoFile(hdfsRootArg, appidArg, engineidArg, algoidArg, evalidArg, "similarities.tsv"), ('iindex, 'simiindex, 'score)).read
-    .mapTo(('iindex, 'simiindex, 'score) -> ('iindex, 'simiindex, 'score)) { 
+    .mapTo(('iindex, 'simiindex, 'score) -> ('iindex, 'simiindex, 'score)) {
       fields: (String, String, Double) => fields // convert score from String to Double
     }
 
@@ -90,12 +90,12 @@
 
       (iindex, iid, itypes.split(",").toList, starttime, endtimeOpt)
     }
-  
+
   /**
    * sink
    */
 
-  val ItemSimScoresSink = ItemSimScores(dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg, algoid=algoidArg, modelset=modelSetArg)
+  val ItemSimScoresSink = ItemSimScores(dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg, algoid = algoidArg, modelset = modelSetArg)
 
   /**
    * computation
@@ -108,8 +108,9 @@
   // NOTE: use simiid's starttime and endtime. not iid's.
   val sim1 = sim.project('iid, 'iidI, 'itypesI, 'score, 'starttimeI, 'endtimeI)
   // NOTE: mahout only calculate half of the sim matrix, reverse the fields to get the other half
-  val sim2 = sim.mapTo(('iidI, 'iid, 'itypes, 'score, 'starttime, 'endtime) -> ('iid, 'iidI, 'itypesI, 'score, 'starttimeI, 'endtimeI)) { 
-    fields: (String, String, List[String], Double, Long, Option[Long]) => fields }
+  val sim2 = sim.mapTo(('iidI, 'iid, 'itypes, 'score, 'starttime, 'endtime) -> ('iid, 'iidI, 'itypesI, 'score, 'starttimeI, 'endtimeI)) {
+    fields: (String, String, List[String], Double, Long, Option[Long]) => fields
+  }
 
   val combinedSimilarities = sim1 ++ sim2
 
@@ -133,6 +134,6 @@
 
       (iid, simiidsList.take(numSimilarItems))
     }
-    .then ( ItemSimScoresSink.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _ )
-  
+    .then(ItemSimScoresSink.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _)
+
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparatorTest.scala b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparatorTest.scala
index 22ff23e..7ea820d 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparatorTest.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/DataPreparatorTest.scala
@@ -4,37 +4,37 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.scalding.appdata.{Users, Items, U2iActions}
+import io.prediction.commons.scalding.appdata.{ Users, Items, U2iActions }
 import io.prediction.commons.filepath.DataFile
 
 class DataPreparatorTest extends Specification with TupleConversions {
-  
+
   val Rate = "rate"
   val Like = "like"
   val Dislike = "dislike"
   val View = "view"
   //val ViewDetails = "viewDetails"
   val Conversion = "conversion"
-  
+
   val appid = 2
 
   def test(itypes: List[String], params: Map[String, String],
-      items: List[(String, String, String, String, String, String)], // id, itypes, appid, starttime, ct, endtime
-      users: List[Tuple1[String]], 
-      u2iActions: List[(String, String, String, String, String)],
-      ratings: List[(String, String, String)],
-      selectedItems: List[(String, String, String, String)], // id, itypes, starttime, endtime
-      itemsIndexer: Map[String, String],
-      usersIndexer: Map[String, String]) = {
-    
+    items: List[(String, String, String, String, String, String)], // id, itypes, appid, starttime, ct, endtime
+    users: List[Tuple1[String]],
+    u2iActions: List[(String, String, String, String, String)],
+    ratings: List[(String, String, String)],
+    selectedItems: List[(String, String, String, String)], // id, itypes, starttime, endtime
+    itemsIndexer: Map[String, String],
+    usersIndexer: Map[String, String]) = {
+
     val userIds = users map (x => x._1)
     val selectedItemsTextLine = selectedItems map { x => (itemsIndexer(x._1), x.productIterator.mkString("\t")) }
-    val usersTextLine = users map {x => (usersIndexer(x._1), x._1) }
-    
-    val itemsIndex = selectedItems map { x => (itemsIndexer(x._1), x._1, x._2, x._3, x._4) }
-    val usersIndex = users map {x => (usersIndexer(x._1), x._1) }
+    val usersTextLine = users map { x => (usersIndexer(x._1), x._1) }
 
-    val ratingsIndexed = ratings map {x => (usersIndexer(x._1), itemsIndexer(x._2), x._3)}
+    val itemsIndex = selectedItems map { x => (itemsIndexer(x._1), x._1, x._2, x._3, x._4) }
+    val usersIndex = users map { x => (usersIndexer(x._1), x._1) }
+
+    val ratingsIndexed = ratings map { x => (usersIndexer(x._1), itemsIndexer(x._2), x._3) }
 
     val dbType = "file"
     val dbName = "testpath/"
@@ -59,8 +59,8 @@
       .arg("dislikeParam", params("dislikeParam"))
       .arg("conversionParam", params("conversionParam"))
       .arg("conflictParam", params("conflictParam"))
-      .source(Items(appId=appid, itypes=Some(itypes), dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource, items)
-      .source(Users(appId=appid, dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource, users)
+      .source(Items(appId = appid, itypes = Some(itypes), dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource, items)
+      .source(Users(appId = appid, dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource, users)
       .sink[(String)](Tsv(DataFile(hdfsRoot, appid, engineid, algoid, evalid, "userIds.tsv"))) { outputBuffer =>
         "correctly write userIds.tsv" in {
           outputBuffer.toList must containTheSameElementsAs(userIds)
@@ -74,7 +74,6 @@
       .run
       .finish
 
-
     JobTest("io.prediction.algorithms.scalding.mahout.itemsim.DataPreparator")
       .arg("dbType", dbType)
       .arg("dbName", dbName)
@@ -88,7 +87,7 @@
       .arg("dislikeParam", params("dislikeParam"))
       .arg("conversionParam", params("conversionParam"))
       .arg("conflictParam", params("conflictParam"))
-      .source(U2iActions(appId=appid, dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource, u2iActions)
+      .source(U2iActions(appId = appid, dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource, u2iActions)
       .source(TextLine(DataFile(hdfsRoot, appid, engineid, algoid, evalid, "selectedItems.tsv")), selectedItemsTextLine)
       .source(TextLine(DataFile(hdfsRoot, appid, engineid, algoid, evalid, "userIds.tsv")), usersTextLine)
       .sink[(String, String, String, String, String)](Tsv(DataFile(hdfsRoot, appid, engineid, algoid, evalid, "itemsIndex.tsv"))) { outputBuffer =>
@@ -110,9 +109,9 @@
       }
       .run
       .finish
-    
+
   }
-  
+
   val noEndtime = "PIO_NONE"
   /**
    * Test 1. basic. Rate actions only without conflicts
@@ -144,32 +143,32 @@
   val test1UsersIndexer = Map("u0" -> "0", "u1" -> "1", "u2" -> "2", "u3" -> "3") // map uid to index
 
   val test1U2i = List(
-      (Rate, "u0", "i0", "123450", "3"), 
-      (Rate, "u0", "i1", "123457", "1"),
-      (Rate, "u0", "i2", "123458", "4"),
-      (Rate, "u0", "i3", "123459", "2"),
-      (Rate, "u1", "i0", "123457", "5"),
-      (Rate, "u1", "i1", "123458", "2"))
-      
-  val test1Ratings = List(      
-      ("u0", "i0", "3"), 
-      ("u0", "i1", "1"),
-      ("u0", "i2", "4"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "5"),
-      ("u1", "i1", "2"))
+    (Rate, "u0", "i0", "123450", "3"),
+    (Rate, "u0", "i1", "123457", "1"),
+    (Rate, "u0", "i2", "123458", "4"),
+    (Rate, "u0", "i3", "123459", "2"),
+    (Rate, "u1", "i0", "123457", "5"),
+    (Rate, "u1", "i1", "123458", "2"))
+
+  val test1Ratings = List(
+    ("u0", "i0", "3"),
+    ("u0", "i1", "1"),
+    ("u0", "i2", "4"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "5"),
+    ("u1", "i1", "2"))
 
   val test1Params: Map[String, String] = Map("viewParam" -> "3", "likeParam" -> "4", "dislikeParam" -> "1", "conversionParam" -> "5",
-      "conflictParam" -> "latest") 
-  
+    "conflictParam" -> "latest")
+
   "DataPreparator with only rate actions, all itypes, no conflict" should {
     test(test1AllItypes, test1Params, test1Items, test1Users, test1U2i, test1Ratings, genSelectedItems(test1Items), test1ItemsIndexer, test1UsersIndexer)
   }
-  
+
   "DataPreparator with only rate actions, no itypes specified, no conflict" should {
     test(List(), test1Params, test1Items, test1Users, test1U2i, test1Ratings, genSelectedItems(test1Items), test1ItemsIndexer, test1UsersIndexer)
   }
-  
+
   /**
    * Test 2. rate actions only with conflicts
    */
@@ -193,50 +192,50 @@
   val test2UsersIndexer = Map("u0" -> "0", "u1" -> "1", "u2" -> "2", "u3" -> "3") // map uid to index
 
   val test2U2i = List(
-      (Rate, "u0", "i0", "123448", "3"),
-      (Rate, "u0", "i0", "123449", "4"), // highest
-      (Rate, "u0", "i0", "123451", "2"), // latest 
-      (Rate, "u0", "i0", "123450", "1"), // lowest
-      
-      (Rate, "u0", "i1", "123456", "1"), // lowest
-      (Rate, "u0", "i1", "123457", "2"),
-      (Rate, "u0", "i1", "123458", "3"), // latest, highest
+    (Rate, "u0", "i0", "123448", "3"),
+    (Rate, "u0", "i0", "123449", "4"), // highest
+    (Rate, "u0", "i0", "123451", "2"), // latest 
+    (Rate, "u0", "i0", "123450", "1"), // lowest
 
-      (Rate, "u0", "i2", "123461", "2"), // latest, lowest
-      (Rate, "u0", "i2", "123459", "3"),
-      (Rate, "u0", "i2", "123460", "5"), // highest
-      
-      (Rate, "u0", "i3", "123459", "2"),
-      (Rate, "u1", "i0", "123457", "5"),
-      
-      (Rate, "u1", "i1", "123458", "3"), // lowest
-      (Rate, "u1", "i1", "123459", "4"), // highest
-      (Rate, "u1", "i1", "123460", "3")) // latest, lowest
-      
+    (Rate, "u0", "i1", "123456", "1"), // lowest
+    (Rate, "u0", "i1", "123457", "2"),
+    (Rate, "u0", "i1", "123458", "3"), // latest, highest
+
+    (Rate, "u0", "i2", "123461", "2"), // latest, lowest
+    (Rate, "u0", "i2", "123459", "3"),
+    (Rate, "u0", "i2", "123460", "5"), // highest
+
+    (Rate, "u0", "i3", "123459", "2"),
+    (Rate, "u1", "i0", "123457", "5"),
+
+    (Rate, "u1", "i1", "123458", "3"), // lowest
+    (Rate, "u1", "i1", "123459", "4"), // highest
+    (Rate, "u1", "i1", "123460", "3")) // latest, lowest
+
   val test2RatingsLatest = List(
-      ("u0", "i0", "2"), 
-      ("u0", "i1", "3"),
-      ("u0", "i2", "2"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "5"),
-      ("u1", "i1", "3"))
-  
-   val test2RatingsHighest = List(
-      ("u0", "i0", "4"), 
-      ("u0", "i1", "3"),
-      ("u0", "i2", "5"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "5"),
-      ("u1", "i1", "4"))
+    ("u0", "i0", "2"),
+    ("u0", "i1", "3"),
+    ("u0", "i2", "2"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "5"),
+    ("u1", "i1", "3"))
 
-   val test2RatingsLowest = List(
-      ("u0", "i0", "1"), 
-      ("u0", "i1", "1"),
-      ("u0", "i2", "2"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "5"),
-      ("u1", "i1", "3"))
-      
+  val test2RatingsHighest = List(
+    ("u0", "i0", "4"),
+    ("u0", "i1", "3"),
+    ("u0", "i2", "5"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "5"),
+    ("u1", "i1", "4"))
+
+  val test2RatingsLowest = List(
+    ("u0", "i0", "1"),
+    ("u0", "i1", "1"),
+    ("u0", "i2", "2"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "5"),
+    ("u1", "i1", "3"))
+
   val test2Itypes_t1t4 = List("t1", "t4")
   val test2Items_t1t4 = List(
     test2ItemsMap("i0"),
@@ -244,32 +243,32 @@
     test2ItemsMap("i3"))
 
   val test2RatingsHighest_t1t4 = List(
-      ("u0", "i0", "4"), 
-      ("u0", "i2", "5"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "5"))
-   
+    ("u0", "i0", "4"),
+    ("u0", "i2", "5"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "5"))
+
   val test2Params: Map[String, String] = Map("viewParam" -> "3", "likeParam" -> "4", "dislikeParam" -> "1", "conversionParam" -> "5",
-      "conflictParam" -> "latest")
+    "conflictParam" -> "latest")
   val test2ParamsHighest = test2Params + ("conflictParam" -> "highest")
   val test2ParamsLowest = test2Params + ("conflictParam" -> "lowest")
-      
+
   "DataPreparator with only rate actions, all itypes, conflict=latest" should {
     test(test2AllItypes, test2Params, test2Items, test2Users, test2U2i, test2RatingsLatest, genSelectedItems(test2Items), test2ItemsIndexer, test2UsersIndexer)
   }
-  
+
   "DataPreparator with only rate actions, all itypes, conflict=highest" should {
     test(test2AllItypes, test2ParamsHighest, test2Items, test2Users, test2U2i, test2RatingsHighest, genSelectedItems(test2Items), test2ItemsIndexer, test2UsersIndexer)
   }
-  
+
   "DataPreparator with only rate actions, all itypes, conflict=lowest" should {
     test(test2AllItypes, test2ParamsLowest, test2Items, test2Users, test2U2i, test2RatingsLowest, genSelectedItems(test2Items), test2ItemsIndexer, test2UsersIndexer)
   }
-  
+
   "DataPreparator with only rate actions, some itypes, conflict=highest" should {
     test(test2Itypes_t1t4, test2ParamsHighest, test2Items, test2Users, test2U2i, test2RatingsHighest_t1t4, genSelectedItems(test2Items_t1t4), test2ItemsIndexer, test2UsersIndexer)
   }
-  
+
   /**
    * Test 3. Different Actions without conflicts
    */
@@ -293,33 +292,33 @@
   val test3UsersIndexer = Map("u0" -> "0", "u1" -> "1", "u2" -> "2", "u3" -> "3") // map uid to index
 
   val test3U2i = List(
-      (Rate, "u0", "i0", "123450", "4"), 
-      (Like, "u0", "i1", "123457", "PIO_NONE"),
-      (Dislike, "u0", "i2", "123458", "PIO_NONE"),
-      (View, "u0", "i3", "123459", "PIO_NONE"), // NOTE: assume v field won't be missing
-      (Rate, "u1", "i0", "123457", "2"),
-      (Conversion, "u1", "i1", "123458", "PIO_NONE"))
-      
-  val test3Ratings = List(      
-      ("u0", "i0", "4"), 
-      ("u0", "i1", "4"),
-      ("u0", "i2", "2"),
-      ("u0", "i3", "1"),
-      ("u1", "i0", "2"),
-      ("u1", "i1", "5"))
-  
+    (Rate, "u0", "i0", "123450", "4"),
+    (Like, "u0", "i1", "123457", "PIO_NONE"),
+    (Dislike, "u0", "i2", "123458", "PIO_NONE"),
+    (View, "u0", "i3", "123459", "PIO_NONE"), // NOTE: assume v field won't be missing
+    (Rate, "u1", "i0", "123457", "2"),
+    (Conversion, "u1", "i1", "123458", "PIO_NONE"))
+
+  val test3Ratings = List(
+    ("u0", "i0", "4"),
+    ("u0", "i1", "4"),
+    ("u0", "i2", "2"),
+    ("u0", "i3", "1"),
+    ("u1", "i0", "2"),
+    ("u1", "i1", "5"))
+
   val test3Params: Map[String, String] = Map("viewParam" -> "1", "likeParam" -> "4", "dislikeParam" -> "2", "conversionParam" -> "5",
-      "conflictParam" -> "latest") 
-  
+    "conflictParam" -> "latest")
+
   "DataPreparator with only all actions, all itypes, no conflict" should {
     test(test3AllItypes, test3Params, test3Items, test3Users, test3U2i, test3Ratings, genSelectedItems(test3Items), test3ItemsIndexer, test3UsersIndexer)
   }
-    
+
   /**
    * test 4. Different Actions with conflicts
    */
   val test4Params: Map[String, String] = Map("viewParam" -> "2", "likeParam" -> "5", "dislikeParam" -> "1", "conversionParam" -> "4",
-      "conflictParam" -> "latest")
+    "conflictParam" -> "latest")
 
   val test4AllItypes = List("t1", "t2", "t3", "t4")
   val test4ItemsMap = Map(
@@ -341,33 +340,33 @@
   val test4UsersIndexer = Map("u0" -> "0", "u1" -> "1", "u2" -> "2", "u3" -> "3") // map uid to index
 
   val test4U2i = List(
-      (Rate, "u0", "i0", "123448", "3"),
-      (View, "u0", "i0", "123449", "PIO_NONE"), // lowest (2)
-      (Like, "u0", "i0", "123451", "PIO_NONE"), // latest, highest (5)
-      (Conversion, "u0", "i0", "123450", "PIO_NONE"), 
-      
-      (Rate, "u0", "i1", "123456", "1"), // lowest
-      (Rate, "u0", "i1", "123457", "4"), // highest
-      (View, "u0", "i1", "123458", "PIO_NONE"), // latest (2)
+    (Rate, "u0", "i0", "123448", "3"),
+    (View, "u0", "i0", "123449", "PIO_NONE"), // lowest (2)
+    (Like, "u0", "i0", "123451", "PIO_NONE"), // latest, highest (5)
+    (Conversion, "u0", "i0", "123450", "PIO_NONE"),
 
-      (Conversion, "u0", "i2", "123461", "PIO_NONE"), // latest, highest  (4)
-      (Rate, "u0", "i2", "123459", "3"),
-      (View, "u0", "i2", "123460", "PIO_NONE"), // lowest
-      
-      (Rate, "u0", "i3", "123459", "2"),
-      (View, "u1", "i0", "123457", "PIO_NONE"), // (2)
-      
-      (Rate, "u1", "i1", "123458", "5"), // highest
-      (Conversion, "u1", "i1", "123459", "PIO_NONE"), // (4)
-      (Dislike, "u1", "i1", "123460", "PIO_NONE")) // latest, lowest (1)
-      
+    (Rate, "u0", "i1", "123456", "1"), // lowest
+    (Rate, "u0", "i1", "123457", "4"), // highest
+    (View, "u0", "i1", "123458", "PIO_NONE"), // latest (2)
+
+    (Conversion, "u0", "i2", "123461", "PIO_NONE"), // latest, highest  (4)
+    (Rate, "u0", "i2", "123459", "3"),
+    (View, "u0", "i2", "123460", "PIO_NONE"), // lowest
+
+    (Rate, "u0", "i3", "123459", "2"),
+    (View, "u1", "i0", "123457", "PIO_NONE"), // (2)
+
+    (Rate, "u1", "i1", "123458", "5"), // highest
+    (Conversion, "u1", "i1", "123459", "PIO_NONE"), // (4)
+    (Dislike, "u1", "i1", "123460", "PIO_NONE")) // latest, lowest (1)
+
   val test4RatingsLatest = List(
-      ("u0", "i0", "5"), 
-      ("u0", "i1", "2"),
-      ("u0", "i2", "4"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "2"),
-      ("u1", "i1", "1"))
+    ("u0", "i0", "5"),
+    ("u0", "i1", "2"),
+    ("u0", "i2", "4"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "2"),
+    ("u1", "i1", "1"))
 
   "DataPreparator with all actions, all itypes, and conflicts=latest" should {
     test(test4AllItypes, test4Params, test4Items, test4Users, test4U2i, test4RatingsLatest, genSelectedItems(test4Items), test4ItemsIndexer, test4UsersIndexer)
@@ -376,11 +375,11 @@
   val test4ParamsIgnoreView = test4Params + ("viewParam" -> "ignore")
 
   val test4RatingsIgnoreViewLatest = List(
-      ("u0", "i0", "5"), 
-      ("u0", "i1", "4"),
-      ("u0", "i2", "4"),
-      ("u0", "i3", "2"),
-      ("u1", "i1", "1"))
+    ("u0", "i0", "5"),
+    ("u0", "i1", "4"),
+    ("u0", "i2", "4"),
+    ("u0", "i3", "2"),
+    ("u1", "i1", "1"))
 
   "DataPreparator with all actions, all itypes, ignore View actions and conflicts=latest" should {
     test(test4AllItypes, test4ParamsIgnoreView, test4Items, test4Users, test4U2i, test4RatingsIgnoreViewLatest, genSelectedItems(test4Items), test4ItemsIndexer, test4UsersIndexer)
@@ -390,12 +389,12 @@
   val test4ParamsIgnoreAllExceptView = test4Params + ("viewParam" -> "1", "likeParam" -> "ignore", "dislikeParam" -> "ignore", "conversionParam" -> "ignore")
 
   val test4RatingsIgnoreAllExceptViewLatest = List(
-      ("u0", "i0", "1"), 
-      ("u0", "i1", "1"),
-      ("u0", "i2", "1"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "1"),
-      ("u1", "i1", "5"))
+    ("u0", "i0", "1"),
+    ("u0", "i1", "1"),
+    ("u0", "i2", "1"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "1"),
+    ("u1", "i1", "5"))
 
   "DataPreparator with all actions, all itypes, ignore all actions except View (and Rate) and conflicts=latest" should {
     test(test4AllItypes, test4ParamsIgnoreAllExceptView, test4Items, test4Users, test4U2i, test4RatingsIgnoreAllExceptViewLatest, genSelectedItems(test4Items), test4ItemsIndexer, test4UsersIndexer)
@@ -405,11 +404,11 @@
   val test4ParamsIgnoreAll = test4Params + ("viewParam" -> "ignore", "likeParam" -> "ignore", "dislikeParam" -> "ignore", "conversionParam" -> "ignore")
 
   val test4RatingsIgnoreAllLatest = List(
-      ("u0", "i0", "3"), 
-      ("u0", "i1", "4"),
-      ("u0", "i2", "3"),
-      ("u0", "i3", "2"),
-      ("u1", "i1", "5"))
+    ("u0", "i0", "3"),
+    ("u0", "i1", "4"),
+    ("u0", "i2", "3"),
+    ("u0", "i3", "2"),
+    ("u1", "i1", "5"))
 
   "DataPreparator with all actions, all itypes, ignore all actions (except Rate) and conflicts=latest" should {
     test(test4AllItypes, test4ParamsIgnoreAll, test4Items, test4Users, test4U2i, test4RatingsIgnoreAllLatest, genSelectedItems(test4Items), test4ItemsIndexer, test4UsersIndexer)
@@ -424,16 +423,14 @@
     test4ItemsMap("i3"))
 
   val test4RatingsLowest_t3 = List(
-      ("u0", "i0", "2"), 
-      ("u0", "i1", "1"),
-      ("u0", "i3", "2"),
-      ("u1", "i0", "2"),
-      ("u1", "i1", "1"))
-    
+    ("u0", "i0", "2"),
+    ("u0", "i1", "1"),
+    ("u0", "i3", "2"),
+    ("u1", "i0", "2"),
+    ("u1", "i1", "1"))
+
   "DataPreparator with only all actions, some itypes, and conflicts=lowest" should {
     test(test4Itypes_t3, test4ParamsLowest, test4Items, test4Users, test4U2i, test4RatingsLowest_t3, genSelectedItems(test4Items_t3), test4ItemsIndexer, test4UsersIndexer)
   }
 
-
-} 
- 
\ No newline at end of file
+}
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructorTest.scala b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructorTest.scala
index 73e4790..3f5dcf3 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructorTest.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/mahout/src/test/scala/io/prediction/algorithms/scalding/mahout/itemsim/ModelConstructorTest.scala
@@ -4,9 +4,9 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.filepath.{AlgoFile, DataFile}
+import io.prediction.commons.filepath.{ AlgoFile, DataFile }
 import io.prediction.commons.scalding.modeldata.ItemSimScores
-import cascading.tuple.{Tuple, TupleEntry, TupleEntryIterator, Fields}
+import cascading.tuple.{ Tuple, TupleEntry, TupleEntryIterator, Fields }
 
 class ModelConstructorTest extends Specification with TupleConversions {
 
@@ -27,8 +27,8 @@
     val dbHost = None
     val dbPort = None
     val hdfsRoot = "testroot/"
-    
-    val itemSimScores = output map { case (iid, simiid, score, simitypes) => (iid, simiid, score, simitypes, algoid, modelSet)} 
+
+    val itemSimScores = output map { case (iid, simiid, score, simitypes) => (iid, simiid, score, simitypes, algoid, modelSet) }
 
     JobTest("io.prediction.algorithms.scalding.mahout.itemsim.ModelConstructor")
       .arg("dbType", dbType)
@@ -42,13 +42,13 @@
       .arg("recommendationTime", recommendationTime.toString)
       .source(Tsv(AlgoFile(hdfsRoot, appid, engineid, algoid, evalid, "similarities.tsv"), new Fields("iindex", "simiindex", "score")), similarities)
       .source(Tsv(DataFile(hdfsRoot, appid, engineid, algoid, evalid, "itemsIndex.tsv")), items)
-      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort, algoid=algoid, modelset=modelSet).getSource) { outputBuffer =>
+      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort, algoid = algoid, modelset = modelSet).getSource) { outputBuffer =>
         "correctly write model data to a file" in {
           outputBuffer.toList must containTheSameElementsAs(itemSimScores)
         }
-    }
-    .run
-    .finish
+      }
+      .run
+      .finish
 
   }
 
@@ -60,8 +60,8 @@
     ("1", "i1", "t1,t2", "12347", noEndtime),
     ("2", "i2", "t2,t3", "12348", noEndtime),
     ("3", "i3", "t2", "12349", noEndtime))
-    
-  val test1Similarities = List( 
+
+  val test1Similarities = List(
     ("0", "1", "0.83"),
     ("0", "2", "0.25"),
     ("0", "3", "0.49"),
@@ -106,14 +106,14 @@
   }
 
   /* test 2: score sorting */
-  
+
   val test2Items = List(
     ("0", "i0", "t1,t2,t3", "12346", noEndtime),
     ("1", "i1", "t1,t2", "12347", noEndtime),
     ("2", "i2", "t2,t3", "12348", noEndtime),
     ("3", "i3", "t2", "12349", noEndtime))
-    
-  val test2Similarities = List( 
+
+  val test2Similarities = List(
     ("0", "1", "83"),
     ("0", "2", "200"),
     ("0", "3", "4"),
@@ -156,7 +156,7 @@
     ("2", "i2", "t2,t3", "123567", "543432"),
     ("3", "i3", "t2", "123678", "543654"))
 
-  val test3Similarities = List( 
+  val test3Similarities = List(
     ("0", "1", "83"),
     ("0", "2", "200"),
     ("0", "3", "4"),
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRank.scala b/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRank.scala
index c43d4df..110d95f 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRank.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/main/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRank.scala
@@ -2,9 +2,9 @@
 
 import com.twitter.scalding._
 
-import io.prediction.commons.scalding.appdata.{Items, Users}
+import io.prediction.commons.scalding.appdata.{ Items, Users }
 import io.prediction.commons.scalding.modeldata.ItemSimScores
-import io.prediction.commons.filepath.{AlgoFile}
+import io.prediction.commons.filepath.{ AlgoFile }
 
 /**
  * Source:
@@ -79,12 +79,12 @@
 
   // get items data
   val items2 = Items(
-    appId=trainingAppid,
-    itypes=itypesArg,
-    dbType=training_dbTypeArg,
-    dbName=training_dbNameArg,
-    dbHost=training_dbHostArg,
-    dbPort=training_dbPortArg).readStartEndtime('iidx, 'itypes, 'starttime, 'endtime)
+    appId = trainingAppid,
+    itypes = itypesArg,
+    dbType = training_dbTypeArg,
+    dbName = training_dbNameArg,
+    dbHost = training_dbHostArg,
+    dbPort = training_dbPortArg).readStartEndtime('iidx, 'itypes, 'starttime, 'endtime)
     .filter('starttime, 'endtime) { fields: (Long, Option[Long]) =>
       // only keep items with valid starttime and endtime
       val (starttimeI, endtimeI) = fields
@@ -101,23 +101,23 @@
     }
 
   val items = Items(
-    appId=trainingAppid,
-    itypes=itypesArg,
-    dbType=training_dbTypeArg,
-    dbName=training_dbNameArg,
-    dbHost=training_dbHostArg,
-    dbPort=training_dbPortArg).readData('iid, 'itypesx)
+    appId = trainingAppid,
+    itypes = itypesArg,
+    dbType = training_dbTypeArg,
+    dbName = training_dbNameArg,
+    dbHost = training_dbHostArg,
+    dbPort = training_dbPortArg).readData('iid, 'itypesx)
 
   /**
    * sink
    */
   val itemSimScores = ItemSimScores(
-    dbType=modeldata_dbTypeArg,
-    dbName=modeldata_dbNameArg,
-    dbHost=modeldata_dbHostArg,
-    dbPort=modeldata_dbPortArg,
-    algoid=algoidArg,
-    modelset=modelSetArg)
+    dbType = modeldata_dbTypeArg,
+    dbName = modeldata_dbNameArg,
+    dbHost = modeldata_dbHostArg,
+    dbPort = modeldata_dbPortArg,
+    algoid = algoidArg,
+    modelset = modelSetArg)
 
   /**
    * computation
@@ -129,5 +129,5 @@
     .groupBy('iid) { _.sortBy('score).reverse.toList[(String, Double, List[String])](('iidx, 'score, 'itypes) -> 'simiidsList) }
 
   // write modeldata
-  scores.then( itemSimScores.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _ )
+  scores.then(itemSimScores.writeData('iid, 'simiidsList, algoidArg, modelSetArg) _)
 }
diff --git a/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRankTest.scala b/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRankTest.scala
index 92d5e3f..eb08dc6 100644
--- a/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRankTest.scala
+++ b/process/engines/itemsim/algorithms/hadoop/scalding/randomrank/src/test/scala/io/prediction/algorithms/scalding/itemsim/randomrank/RandomRankTest.scala
@@ -5,8 +5,8 @@
 import com.twitter.scalding._
 
 import io.prediction.commons.scalding.appdata.Items
-import io.prediction.commons.scalding.modeldata.{ItemSimScores}
-import io.prediction.commons.filepath.{AlgoFile}
+import io.prediction.commons.scalding.modeldata.{ ItemSimScores }
+import io.prediction.commons.filepath.{ AlgoFile }
 
 class RandomRankTest extends Specification with TupleConversions {
   def test(
@@ -42,17 +42,17 @@
       .arg("numSimilarItems", numSimilarItems.toString)
       .arg("modelSet", modelSet.toString)
       .arg("recommendationTime", recommendationTime.toString)
-      .source(Items(appId=appid, itypes=Some(itypes), dbType=training_dbType, dbName=training_dbName, dbHost=None, dbPort=None).getSource, items)
-      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType=modeldata_dbType, dbName=modeldata_dbName, dbHost=None, dbPort=None, algoid=algoid, modelset=modelSet).getSource) { outputBuffer =>
+      .source(Items(appId = appid, itypes = Some(itypes), dbType = training_dbType, dbName = training_dbName, dbHost = None, dbPort = None).getSource, items)
+      .sink[(String, String, String, String, Int, Boolean)](ItemSimScores(dbType = modeldata_dbType, dbName = modeldata_dbName, dbHost = None, dbPort = None, algoid = algoid, modelset = modelSet).getSource) { outputBuffer =>
 
         def takeOutScores(d: List[(String, String, String, String, Int, Boolean)]) = {
           // don't check score and itypes.
           // for iids, don't check order. convert to set
-          d map {x => (x._1, x._2.split(",").toSet, x._5, x._6) }
+          d map { x => (x._1, x._2.split(",").toSet, x._5, x._6) }
         }
 
         def getScoresOnly(d: List[(String, String, String, String, Int, Boolean)]) = {
-          d flatMap {x => x._3.split(",").toList.map(_.toDouble)}
+          d flatMap { x => x._3.split(",").toList.map(_.toDouble) }
         }
 
         def getIids(d: List[(String, String, String, String, Int, Boolean)]) = {
@@ -156,7 +156,7 @@
       ("i2", "t4", "19", "21", "88", noEndtime),
       ("i3", "t3,t4", "19", "9876543210", "67890", noEndtime))
     val itemSimScores = List(
-      ("i3", "i0,i1,i2", "0.0,0.0,0.0", "[t1,t2,t3],[t2,t3],[t4]", algoid, modelSet),      
+      ("i3", "i0,i1,i2", "0.0,0.0,0.0", "[t1,t2,t3],[t2,t3],[t4]", algoid, modelSet),
       ("i2", "i0,i1,i3", "0.0,0.0,0.0", "[t1,t2,t3],[t2,t3],[t3,t4]", algoid, modelSet),
       ("i1", "i0,i2,i3", "0.0,0.0,0.0", "[t1,t2,t3],[t4],[t3,t4]", algoid, modelSet),
       ("i0", "i1,i2,i3", "0.0,0.0,0.0", "[t2,t3],[t4],[t3,t4]", algoid, modelSet))
diff --git a/process/engines/itemsim/algorithms/scala/generic/build.sbt b/process/engines/itemsim/algorithms/scala/generic/build.sbt
new file mode 100644
index 0000000..06218c6
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/generic/build.sbt
@@ -0,0 +1,22 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-itemsim-algorithms-scala-generic"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "com.twitter" %% "scalding-args" % "0.8.11",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map(
+  "itemsim.generic.dataprep" -> "io.prediction.algorithms.generic.itemsim.GenericDataPreparator")
+
+packJvmOpts := Map(
+  "itemsim.generic.dataprep" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/itemsim/algorithms/scala/generic/src/main/resources/application.conf
similarity index 100%
rename from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/application.conf
rename to process/engines/itemsim/algorithms/scala/generic/src/main/resources/application.conf
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemsim/algorithms/scala/generic/src/main/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemsim/algorithms/scala/generic/src/main/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemsim/algorithms/scala/generic/src/main/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemsim/algorithms/scala/generic/src/main/scala/io/prediction/algorithms/itemsim/generic/GenericDataPreparator.scala b/process/engines/itemsim/algorithms/scala/generic/src/main/scala/io/prediction/algorithms/itemsim/generic/GenericDataPreparator.scala
new file mode 100644
index 0000000..202a8d4
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/generic/src/main/scala/io/prediction/algorithms/itemsim/generic/GenericDataPreparator.scala
@@ -0,0 +1,320 @@
+package io.prediction.algorithms.generic.itemsim
+
+import io.prediction.commons.Config
+import io.prediction.commons.appdata.{ Item, U2IAction, User }
+
+import grizzled.slf4j.Logger
+import java.io.File
+import java.io.FileWriter
+import java.io.RandomAccessFile
+import java.io.BufferedWriter
+import scala.io.Source
+
+import com.twitter.scalding.Args
+
+/**
+ * Generic single machine data preparator for ItemSim engine.
+ * Read data from appdata and output the following files:
+ * - itemsIndex.tsv (iindex iid itypes): all items
+ * - validItemsIndex.tsv (iindex): valid candidate items to be recommended
+ * - ratings.mm (if --matrixMarket true ): matrix market format rating
+ * - ratings.csv (if --matrixMarket false): comma separated rating file
+ */
+object GenericDataPreparator {
+
+  /* constants */
+  final val ACTION_RATE = "rate"
+  final val ACTION_LIKE = "like"
+  final val ACTION_DISLIKE = "dislike"
+  final val ACTION_VIEW = "view"
+  final val ACTION_CONVERSION = "conversion"
+
+  // When there are conflicting actions, e.g. a user gives an item a rating 5 but later dislikes it, 
+  // determine which action will be considered as final preference.
+  final val CONFLICT_LATEST: String = "latest" // use latest action
+  final val CONFLICT_HIGHEST: String = "highest" // use the one with highest score
+  final val CONFLICT_LOWEST: String = "lowest" // use the one with lowest score
+
+  /* global */
+  val logger = Logger(GenericDataPreparator.getClass)
+
+  //println(logger.isInfoEnabled)
+
+  val commonsConfig = new Config
+
+  // argument of this job
+  case class JobArg(
+    val outputDir: String,
+    val appid: Int,
+    val evalid: Option[Int],
+    val itypes: Option[List[String]],
+    val viewParam: Option[Int],
+    val likeParam: Option[Int],
+    val dislikeParam: Option[Int],
+    val conversionParam: Option[Int],
+    val conflictParam: String,
+    val recommendationTime: Option[Long],
+    val matrixMarket: Boolean)
+
+  def main(cmdArgs: Array[String]) {
+
+    logger.info("Running generic data preparator ...")
+    logger.info(cmdArgs.mkString(","))
+
+    /* get arg */
+    val args = Args(cmdArgs)
+
+    val outputDirArg = args("outputDir")
+    val appidArg = args("appid").toInt
+    val evalidArg = args.optional("evalid") map (x => x.toInt)
+    val OFFLINE_EVAL = (evalidArg != None) // offline eval mode
+
+    val preItypesArg = args.list("itypes")
+    val itypesArg: Option[List[String]] = if (preItypesArg.mkString(",").length == 0) None else Option(preItypesArg)
+
+    // determine how to map actions to rating values
+    def getActionParam(name: String): Option[Int] = {
+      val actionParam: Option[Int] = args(name) match {
+        case "ignore" => None
+        case x => Some(x.toInt)
+      }
+      actionParam
+    }
+
+    val viewParamArg: Option[Int] = getActionParam("viewParam")
+    val likeParamArg: Option[Int] = getActionParam("likeParam")
+    val dislikeParamArg: Option[Int] = getActionParam("dislikeParam")
+    val conversionParamArg: Option[Int] = getActionParam("conversionParam")
+
+    val conflictParamArg: String = args("conflictParam")
+
+    // check if the conflictParam is valid
+    require(List(CONFLICT_LATEST, CONFLICT_HIGHEST, CONFLICT_LOWEST).contains(conflictParamArg), "conflict param " + conflictParamArg + " is not valid.")
+
+    val recommendationTimeArg = args.optional("recommendationTime").map(_.toLong)
+
+    // write data in matrix market format
+    val matrixMarketArg: Boolean = args.optional("matrixMarket").map(x => x.toBoolean).getOrElse(true)
+
+    val arg = JobArg(
+      outputDir = outputDirArg,
+      appid = appidArg,
+      evalid = evalidArg,
+      itypes = itypesArg,
+      viewParam = viewParamArg,
+      likeParam = likeParamArg,
+      dislikeParam = dislikeParamArg,
+      conversionParam = conversionParamArg,
+      conflictParam = conflictParamArg,
+      recommendationTime = recommendationTimeArg,
+      matrixMarket = matrixMarketArg
+    )
+
+    /* run job */
+    dataPrep(arg)
+    cleanup(arg)
+
+  }
+
+  case class RatingData(
+    uid: Int,
+    iid: Int,
+    rating: Int,
+    t: Long)
+
+  def dataPrep(arg: JobArg) = {
+
+    // NOTE: if OFFLINE_EVAL, read from training set, and use evalid as appid when read Items and U2iActions
+    val OFFLINE_EVAL = (arg.evalid != None)
+
+    val usersDb = if (!OFFLINE_EVAL)
+      commonsConfig.getAppdataUsers
+    else
+      commonsConfig.getAppdataTrainingUsers
+
+    val itemsDb = if (!OFFLINE_EVAL)
+      commonsConfig.getAppdataItems
+    else
+      commonsConfig.getAppdataTrainingItems
+
+    val u2iDb = if (!OFFLINE_EVAL)
+      commonsConfig.getAppdataU2IActions
+    else
+      commonsConfig.getAppdataTrainingU2IActions
+
+    val appid = if (OFFLINE_EVAL) arg.evalid.get else arg.appid
+
+    // create outputDir if doesn't exist yet.
+    val outputDir = new File(arg.outputDir)
+    outputDir.mkdirs()
+
+    /* user data */
+    // convert to Map for later lookup
+    // assuming number of users can be fit into memory.
+    val usersMap: Map[String, Int] = usersDb.getByAppid(appid).map(_.id).zipWithIndex
+      .map { case (uid, index) => (uid, index + 1) }.toMap // +1 to make it starting from 1
+
+    /* item data */
+    case class ItemData(
+      val iindex: Int,
+      val itypes: Seq[String],
+      val starttime: Option[Long],
+      val endtime: Option[Long])
+
+    val itemsMap: Map[String, ItemData] = arg.itypes.map { itypes =>
+      itemsDb.getByAppidAndItypes(appid, itypes)
+    }.getOrElse {
+      itemsDb.getByAppid(appid)
+    }.zipWithIndex
+      .map {
+        case (item, index) =>
+          val itemData = ItemData(
+            iindex = index + 1, // +1 to make index starting from 1 (required by graphchi)
+            itypes = item.itypes,
+            starttime = item.starttime.map[Long](_.getMillis()),
+            endtime = item.endtime.map[Long](_.getMillis())
+          )
+          (item.id -> itemData)
+      }.toMap
+
+    /* write item index (iindex iid itypes) */
+    val itemsIndexWriter = new BufferedWriter(new FileWriter(new File(arg.outputDir + "itemsIndex.tsv")))
+    // NOTE: only write valid items (eg. valid starttime and endtime)
+    itemsMap.foreach {
+      case (iid, itemData) =>
+        val itypes = itemData.itypes.mkString(",")
+        itemsIndexWriter.write(s"${itemData.iindex}\t${iid}\t${itypes}\n")
+    }
+    itemsIndexWriter.close()
+
+    /* write valid item (iindex) */
+    val validItemsIndexWriter = new BufferedWriter(new FileWriter(new File(arg.outputDir + "validItemsIndex.tsv")))
+    // NOTE: only write valid items (eg. valid starttime and endtime)
+    itemsMap.filter {
+      case (iid, itemData) =>
+        itemTimeFilter(true, itemData.starttime, itemData.endtime, arg.recommendationTime)
+    }.foreach {
+      case (iid, itemData) =>
+        validItemsIndexWriter.write(s"${itemData.iindex}\n")
+    }
+    validItemsIndexWriter.close()
+
+    /* write u2i ratings */
+
+    val u2iRatings = u2iDb.getAllByAppid(appid)
+      .filter { u2i =>
+        val validAction = isValidAction(u2i, arg.likeParam, arg.dislikeParam, arg.viewParam, arg.conversionParam)
+        val validUser = usersMap.contains(u2i.uid)
+        val validItem = itemsMap.contains(u2i.iid)
+        (validAction && validUser && validItem)
+      }.map { u2i =>
+        val rating = convertToRating(u2i, arg.likeParam, arg.dislikeParam, arg.viewParam, arg.conversionParam)
+
+        RatingData(
+          uid = usersMap(u2i.uid),
+          iid = itemsMap(u2i.iid).iindex,
+          rating = rating,
+          t = u2i.t.getMillis
+        )
+      }.toSeq
+
+    if (!u2iRatings.isEmpty) {
+
+      val ratingReduced = u2iRatings.groupBy(x => (x.iid, x.uid))
+        .mapValues { v =>
+          v.reduce { (a, b) =>
+            resolveConflict(a, b, arg.conflictParam)
+          }
+        }.values
+        .toSeq
+        .sortBy { x: RatingData =>
+          (x.iid, x.uid)
+        }
+
+      val fileName = if (arg.matrixMarket) "ratings.mm" else "ratings.csv"
+      val ratingsWriter = new BufferedWriter(new FileWriter(new File(arg.outputDir + fileName))) // intermediate file
+
+      if (arg.matrixMarket) {
+        ratingsWriter.write("%%MatrixMarket matrix coordinate real general\n")
+        ratingsWriter.write(s"${usersMap.size} ${itemsMap.size} ${ratingReduced.size}\n")
+      }
+
+      ratingReduced.foreach { r =>
+        if (arg.matrixMarket) {
+          ratingsWriter.write(s"${r.uid} ${r.iid} ${r.rating}\n")
+        } else {
+          ratingsWriter.write(s"${r.uid},${r.iid},${r.rating}\n")
+        }
+      }
+
+      ratingsWriter.close()
+    }
+
+  }
+
+  def itemTimeFilter(enable: Boolean, starttime: Option[Long], endtime: Option[Long], recommendationTime: Option[Long]): Boolean = {
+    if (enable) {
+      recommendationTime.map { recTime =>
+        (starttime, endtime) match {
+          case (Some(start), None) => (recTime >= start)
+          case (Some(start), Some(end)) => ((recTime >= start) && (recTime < end))
+          case (None, Some(end)) => (recTime < end)
+          case (None, None) => true
+        }
+      }.getOrElse(true)
+    } else true
+  }
+
+  def isValidAction(u2i: U2IAction, likeParam: Option[Int], dislikeParam: Option[Int],
+    viewParam: Option[Int], conversionParam: Option[Int]): Boolean = {
+    val keepThis: Boolean = u2i.action match {
+      case ACTION_RATE => true
+      case ACTION_LIKE => (likeParam != None)
+      case ACTION_DISLIKE => (dislikeParam != None)
+      case ACTION_VIEW => (viewParam != None)
+      case ACTION_CONVERSION => (conversionParam != None)
+      case _ => {
+        assert(false, "Action type " + u2i.action + " in u2iActions appdata is not supported!")
+        false // all other unsupported actions
+      }
+    }
+    keepThis
+  }
+
+  def convertToRating(u2i: U2IAction, likeParam: Option[Int], dislikeParam: Option[Int],
+    viewParam: Option[Int], conversionParam: Option[Int]): Int = {
+    val rating: Int = u2i.action match {
+      case ACTION_RATE => u2i.v.get.toInt
+      case ACTION_LIKE => likeParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+      case ACTION_DISLIKE => dislikeParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+      case ACTION_VIEW => viewParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+      case ACTION_CONVERSION => conversionParam.getOrElse {
+        assert(false, "Action type " + u2i.action + " should have been filtered out!")
+        0
+      }
+    }
+    rating
+  }
+
+  def resolveConflict(a: RatingData, b: RatingData, conflictParam: String) = {
+    conflictParam match {
+      case CONFLICT_LATEST => if (a.t > b.t) a else b
+      case CONFLICT_HIGHEST => if (a.rating > b.rating) a else b
+      case CONFLICT_LOWEST => if (a.rating < b.rating) a else b
+    }
+  }
+
+  def cleanup(arg: JobArg) = {
+
+  }
+
+}
\ No newline at end of file
diff --git a/process/engines/itemsim/algorithms/scala/generic/src/test/resources/application.conf b/process/engines/itemsim/algorithms/scala/generic/src/test/resources/application.conf
new file mode 100644
index 0000000..9e296b6
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/generic/src/test/resources/application.conf
@@ -0,0 +1,37 @@
+# Used by PredictionIO Commons
+io.prediction.base=.
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+io.prediction.commons.appdata.db.name=predictionio_appdata_generic_dataprep_test
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+io.prediction.commons.appdata.test.db.name=predictionio_test_appdata_generic_dataprep_test
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+io.prediction.commons.appdata.training.db.name=predictionio_trainig_appdata_generic_dataprep_test
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+io.prediction.commons.appdata.validation.db.name=predictionio_validation_appdata_generic_dataprep_test
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+io.prediction.commons.modeldata.db.name=predictionio_modeldata_generic_dataprep_test
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+io.prediction.commons.modeldata.training.db.name=predictionio_training_modeldata_generic_dataprep_test
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
+io.prediction.commons.settings.db.name=predictionio_generic_dataprep_test
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemsim/algorithms/scala/generic/src/test/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemsim/algorithms/scala/generic/src/test/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemsim/algorithms/scala/generic/src/test/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemsim/algorithms/scala/generic/src/test/scala/io/prediction/algorithms/generic/itemsim/GenericDataPreparatorSpec.scala b/process/engines/itemsim/algorithms/scala/generic/src/test/scala/io/prediction/algorithms/generic/itemsim/GenericDataPreparatorSpec.scala
new file mode 100644
index 0000000..9985025
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/generic/src/test/scala/io/prediction/algorithms/generic/itemsim/GenericDataPreparatorSpec.scala
@@ -0,0 +1,167 @@
+package io.prediction.algorithms.generic.itemsim
+
+import io.prediction.commons.Config
+import io.prediction.commons.appdata.{ User, Item, U2IAction }
+
+import org.specs2.mutable._
+import com.github.nscala_time.time.Imports._
+import scala.io.Source
+import com.mongodb.casbah.Imports._
+
+class GenericDataPreparatorSpec extends Specification {
+
+  // note: should match the db name defined in the application.conf
+  val mongoDbName = "predictionio_appdata_generic_dataprep_test"
+  def cleanUp() = {
+    // remove the test database
+    MongoConnection()(mongoDbName).dropDatabase()
+  }
+
+  val commonConfig = new Config
+  val appdataUsers = commonConfig.getAppdataUsers
+  val appdataItems = commonConfig.getAppdataItems
+  val appdataU2IActions = commonConfig.getAppdataU2IActions
+
+  "GenericDataPreparator with basic rate action app data" should {
+    val appid = 23
+    // insert a few users into db
+    val user = User(
+      id = "u0",
+      appid = appid,
+      ct = DateTime.now,
+      latlng = None,
+      inactive = None,
+      attributes = None)
+
+    appdataUsers.insert(user.copy(id = "u0"))
+    appdataUsers.insert(user.copy(id = "u1"))
+    appdataUsers.insert(user.copy(id = "u2"))
+
+    // insert a few items into db
+    val item = Item(
+      id = "i0",
+      appid = appid,
+      ct = DateTime.now,
+      itypes = List("t1", "t2"),
+      starttime = None,
+      endtime = None,
+      price = None,
+      profit = None,
+      latlng = None,
+      inactive = None,
+      attributes = None)
+
+    appdataItems.insert(item.copy(id = "i0", itypes = List("t1", "t2")))
+    appdataItems.insert(item.copy(id = "i1", itypes = List("t1")))
+    appdataItems.insert(item.copy(id = "i2", itypes = List("t2", "t3")))
+    appdataItems.insert(item.copy(id = "i3", itypes = List("t3")))
+
+    // insert a few u2i into db
+    val u2i = U2IAction(
+      appid = appid,
+      action = "rate",
+      uid = "u0",
+      iid = "i0",
+      t = DateTime.now,
+      latlng = None,
+      v = Some(3),
+      price = None)
+
+    appdataU2IActions.insert(u2i.copy(uid = "u0", iid = "i0", action = "rate", v = Some(3)))
+    appdataU2IActions.insert(u2i.copy(uid = "u0", iid = "i1", action = "rate", v = Some(4)))
+    appdataU2IActions.insert(u2i.copy(uid = "u0", iid = "i2", action = "rate", v = Some(1)))
+
+    appdataU2IActions.insert(u2i.copy(uid = "u1", iid = "i0", action = "rate", v = Some(2)))
+    appdataU2IActions.insert(u2i.copy(uid = "u1", iid = "i1", action = "rate", v = Some(1)))
+    appdataU2IActions.insert(u2i.copy(uid = "u1", iid = "i3", action = "rate", v = Some(3)))
+
+    appdataU2IActions.insert(u2i.copy(uid = "u2", iid = "i1", action = "rate", v = Some(5)))
+    appdataU2IActions.insert(u2i.copy(uid = "u2", iid = "i2", action = "rate", v = Some(1)))
+    appdataU2IActions.insert(u2i.copy(uid = "u2", iid = "i3", action = "rate", v = Some(4)))
+
+    val outputDir = "/tmp/pio_test/"
+    val args = Map(
+      "outputDir" -> outputDir,
+      "appid" -> appid,
+      "viewParam" -> 4,
+      "likeParam" -> 3,
+      "dislikeParam" -> 1,
+      "conversionParam" -> 2,
+      "conflictParam" -> "highest"
+    )
+
+    val argsArray = args.toArray.flatMap {
+      case (k, v) =>
+        Array(s"--${k}", v.toString)
+    }
+
+    GenericDataPreparator.main(argsArray)
+
+    "correctly generate itemsIndex.tsv" in {
+      val itemsIndex = Source.fromFile(s"${outputDir}itemsIndex.tsv")
+        .getLines()
+        .toList
+
+      val expected = List(
+        "1\ti0\tt1,t2",
+        "2\ti1\tt1",
+        "3\ti2\tt2,t3",
+        "4\ti3\tt3"
+      )
+      itemsIndex must containTheSameElementsAs(expected)
+    }
+
+    "correctly write validItemsIndex.tsv" in {
+      val validItemsIndex = Source.fromFile(s"${outputDir}validItemsIndex.tsv")
+        .getLines()
+        .toList
+
+      val expected = List(
+        "1",
+        "2",
+        "3",
+        "4"
+      )
+      validItemsIndex must containTheSameElementsAs(expected)
+    }
+
+    "correctly generate ratings.mm" in {
+      val ratingsLines = Source.fromFile(s"${outputDir}ratings.mm")
+        .getLines()
+
+      val headers = ratingsLines.take(2).toList
+
+      val ratings = ratingsLines.toList
+
+      val expectedHeaders = List(
+        "%%MatrixMarket matrix coordinate real general",
+        "3 4 9"
+      )
+
+      val expected = List(
+        "1 1 3",
+        "1 2 4",
+        "1 3 1",
+        "2 1 2",
+        "2 2 1",
+        "2 4 3",
+        "3 2 5",
+        "3 3 1",
+        "3 4 4"
+      )
+      headers must be_==(expectedHeaders) and
+        (ratings must containTheSameElementsAs(expected))
+    }
+  }
+
+  // TODO: test csv output
+
+  // TODO: test mixed and conflict actions
+
+  // TODO: test start and end time
+
+  // TODO: test evalid != None
+
+  // clean up when finish test
+  step(cleanUp())
+}
diff --git a/process/engines/itemsim/algorithms/scala/graphchi/build.sbt b/process/engines/itemsim/algorithms/scala/graphchi/build.sbt
new file mode 100644
index 0000000..4250ed4
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/graphchi/build.sbt
@@ -0,0 +1,23 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-itemsim-algorithms-scala-graphchi"
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "com.twitter" %% "scalding-args" % "0.8.11",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1",
+  "org.scalanlp" %% "breeze" % "0.6.1")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map(
+  "itemsim.graphchi.modelcon" -> "io.prediction.algorithms.graphchi.itemsim.GraphChiModelConstructor")
+
+packJvmOpts := Map(
+  "itemsim.graphchi.modelcon" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/itemsim/algorithms/scala/graphchi/src/main/resources/application.conf
similarity index 100%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/application.conf
copy to process/engines/itemsim/algorithms/scala/graphchi/src/main/resources/application.conf
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemsim/algorithms/scala/graphchi/src/main/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemsim/algorithms/scala/graphchi/src/main/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemsim/algorithms/scala/graphchi/src/main/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemsim/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemsim/GraphChiModelConstructor.scala b/process/engines/itemsim/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemsim/GraphChiModelConstructor.scala
new file mode 100644
index 0000000..d0ec835
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/graphchi/src/main/scala/io/prediction/algorithms/graphchi/itemsim/GraphChiModelConstructor.scala
@@ -0,0 +1,150 @@
+package io.prediction.algorithms.graphchi.itemsim
+
+import grizzled.slf4j.Logger
+import breeze.linalg._
+import com.twitter.scalding.Args
+import scala.io.Source
+
+import io.prediction.commons.Config
+import io.prediction.commons.modeldata.{ ItemSimScore }
+
+/**
+ * Input files:
+ * - itemsIndex.tsv (iindex iid itypes): all items
+ * - validItemsIndex.tsv (iindex): valid candidate items to be recommended
+ * - ratings.mm-topk (iindex1 iindex2 score) generated by GraphChi
+ *
+ */
+object GraphChiModelConstructor {
+
+  /* global */
+  val logger = Logger(GraphChiModelConstructor.getClass)
+  //println(logger.isInfoEnabled)
+  val commonsConfig = new Config
+
+  // argument of this job
+  case class JobArg(
+    val inputDir: String,
+    val appid: Int,
+    val algoid: Int,
+    val evalid: Option[Int],
+    val modelSet: Boolean,
+    val numSimilarItems: Int)
+
+  def main(cmdArgs: Array[String]) {
+    logger.info("Running model constructor for GraphChi ItemSim ...")
+    logger.info(cmdArgs.mkString(","))
+
+    /* get arg */
+    val args = Args(cmdArgs)
+
+    val arg = JobArg(
+      inputDir = args("inputDir"),
+      appid = args("appid").toInt,
+      algoid = args("algoid").toInt,
+      evalid = args.optional("evalid") map (x => x.toInt),
+      modelSet = args("modelSet").toBoolean,
+      numSimilarItems = args("numSimilarItems").toInt
+    )
+
+    /* run job */
+    modelCon(arg)
+    cleanUp(arg)
+  }
+
+  def modelCon(arg: JobArg) = {
+
+    // NOTE: if OFFLINE_EVAL, write to training modeldata and use evalid as appid
+    val OFFLINE_EVAL = (arg.evalid != None)
+
+    val modeldataDb = if (!OFFLINE_EVAL)
+      commonsConfig.getModeldataItemSimScores
+    else
+      commonsConfig.getModeldataTrainingItemSimScores
+
+    val appid = if (OFFLINE_EVAL) arg.evalid.get else arg.appid
+
+    case class ItemData(
+      val iid: String,
+      val itypes: Seq[String])
+
+    // item index file (iindex iid itypes)
+    // iindex -> ItemData
+    val itemsMap: Map[Int, ItemData] = Source.fromFile(s"${arg.inputDir}itemsIndex.tsv")
+      .getLines()
+      .map[(Int, ItemData)] { line =>
+        val (iindex, item) = try {
+          val fields = line.split("\t")
+          val itemData = ItemData(
+            iid = fields(1),
+            itypes = fields(2).split(",").toList
+          )
+          (fields(0).toInt, itemData)
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get item info in line: ${line}. ${e}")
+          }
+        }
+        (iindex, item)
+      }.toMap
+
+    // valid item index file (iindex iid itypes)
+    // iindex
+    val validItemsSet: Set[Int] = Source.fromFile(s"${arg.inputDir}validItemsIndex.tsv")
+      .getLines()
+      .map[Int] { line =>
+        val iindex = try {
+          val fields = line.split("\t")
+          fields(0).toInt
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get item info in line: ${line}. ${e}")
+          }
+        }
+        iindex
+      }.toSet
+
+    // iindex1 iindex2 score
+    val simScores = Source.fromFile(s"${arg.inputDir}ratings.mm-topk")
+      .getLines()
+      .map[(Int, Int, Double)] { line =>
+        val (iindex1, iindex2, score) = try {
+          val fields = line.split("""\s+""")
+          (fields(0).toInt, fields(1).toInt, fields(2).toDouble)
+        } catch {
+          case e: Exception => throw new RuntimeException(s"Cannot read item index and score from the line: ${line}. ${e}")
+        }
+        (iindex1, iindex2, score)
+      }.toSeq
+
+    val similarities = simScores ++
+      simScores.map { case (iindex1, iindex2, score) => (iindex2, iindex1, score) }
+
+    // iindex1 -> Seq[(iindex1, iindex2, score)]
+    similarities.groupBy(_._1).foreach {
+      case (iindex1, scoresSeq) =>
+
+        // only recommend items in validItems
+        val topScores = scoresSeq.filter { x => validItemsSet(x._2) }
+          .sortBy(_._3)(Ordering[Double].reverse)
+          .take(arg.numSimilarItems)
+
+        logger.debug(s"${iindex1}: ${topScores.toList}")
+        modeldataDb.insert(ItemSimScore(
+          iid = itemsMap(iindex1).iid,
+          simiids = topScores.map(x => itemsMap(x._2).iid),
+          scores = topScores.map(_._3),
+          itypes = topScores.map(x => itemsMap(x._2).itypes),
+          appid = appid,
+          algoid = arg.algoid,
+          modelset = arg.modelSet))
+
+    }
+
+  }
+
+  def cleanUp(arg: JobArg) = {
+
+  }
+
+}
diff --git a/process/engines/itemsim/algorithms/scala/graphchi/src/test/resources/application.conf b/process/engines/itemsim/algorithms/scala/graphchi/src/test/resources/application.conf
new file mode 100644
index 0000000..45b3078
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/graphchi/src/test/resources/application.conf
@@ -0,0 +1,37 @@
+# Used by PredictionIO Commons
+io.prediction.base=.
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+io.prediction.commons.appdata.db.name=predictionio_appdata_graphchi_dataprep_test
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+io.prediction.commons.appdata.test.db.name=predictionio_test_appdata_graphchi_dataprep_test
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+io.prediction.commons.appdata.training.db.name=predictionio_trainig_appdata_graphchi_dataprep_test
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+io.prediction.commons.appdata.validation.db.name=predictionio_validation_appdata_graphchi_dataprep_test
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+io.prediction.commons.modeldata.db.name=predictionio_modeldata_graphchi_dataprep_test
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+io.prediction.commons.modeldata.training.db.name=predictionio_training_modeldata_graphchi_dataprep_test
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
+io.prediction.commons.settings.db.name=predictionio_graphchi_dataprep_test
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemsim/algorithms/scala/graphchi/src/test/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemsim/algorithms/scala/graphchi/src/test/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemsim/algorithms/scala/graphchi/src/test/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemsim/algorithms/scala/graphchi/src/test/scala/io/prediction/algorithms/graphchi/itemsim/GraphChiModelConstructorSpec.scala b/process/engines/itemsim/algorithms/scala/graphchi/src/test/scala/io/prediction/algorithms/graphchi/itemsim/GraphChiModelConstructorSpec.scala
new file mode 100644
index 0000000..58d4824
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/graphchi/src/test/scala/io/prediction/algorithms/graphchi/itemsim/GraphChiModelConstructorSpec.scala
@@ -0,0 +1,356 @@
+package io.prediction.algorithms.graphchi.itemsim
+
+import io.prediction.commons.Config
+import io.prediction.commons.settings.{ App, Algo }
+import io.prediction.commons.modeldata.{ ItemSimScore }
+
+import org.specs2.mutable._
+import com.github.nscala_time.time.Imports._
+import scala.io.Source
+import java.io.File
+import java.io.FileWriter
+import java.io.BufferedWriter
+
+import com.mongodb.casbah.Imports._
+
+class GraphChiItemSimModelConstructorSpec extends Specification {
+
+  // note: should match the db name defined in the application.conf
+  val mongoDbName = "predictionio_modeldata_graphchi_dataprep_test"
+  def cleanUp() = {
+    // remove the test database
+    MongoConnection()(mongoDbName).dropDatabase()
+  }
+
+  val commonConfig = new Config
+  val modeldataItemSimScores = commonConfig.getModeldataItemSimScores
+
+  def argMapToArray(args: Map[String, Any]): Array[String] = {
+    args.toArray.flatMap {
+      case (k, v) =>
+        Array(s"--${k}", v.toString)
+    }
+  }
+
+  def writeToFile(lines: List[String], filePath: String) = {
+    val writer = new BufferedWriter(new FileWriter(new File(filePath)))
+    lines.foreach { line =>
+      writer.write(s"${line}\n")
+    }
+    writer.close()
+  }
+
+  "GraphChiItemSimModelConstructor" should {
+    val inputDir = "/tmp/pio_test/"
+
+    val inputDirFile = new File(inputDir)
+    inputDirFile.mkdirs()
+
+    val itemsIndex = List(
+      "1\ti0\tt1,t2",
+      "2\ti1\tt1",
+      "3\ti2\tt2,t3",
+      "4\ti3\tt3"
+    )
+
+    val validItemIndex = List(
+      "1",
+      "2",
+      "3",
+      "4"
+    )
+
+    val scoresTopK = List(
+      "1 2 12.6",
+      "1 3 1.5",
+      "2 4 20.4",
+      "2 3 5.6",
+      "3 4 2.3",
+      "4 1 15.4"
+    )
+
+    writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+    writeToFile(validItemIndex, s"${inputDir}validItemsIndex.tsv")
+    writeToFile(scoresTopK, s"${inputDir}ratings.mm-topk")
+
+    val appid = 12
+
+    implicit val app = App(
+      id = appid,
+      userid = 0,
+      appkey = "1234",
+      display = "12345",
+      url = None,
+      cat = None,
+      desc = None,
+      timezone = "UTC"
+    )
+
+    "correctly writes ItemSimScores with larger numSimilarItems" in {
+
+      val algoid = 45
+      val modelSet = false
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "numSimilarItems" -> 10
+      )
+
+      val i0Expected = ItemSimScore(
+        iid = "i0",
+        simiids = Seq("i3", "i1", "i2"),
+        scores = Seq(15.4, 12.6, 1.5),
+        itypes = Seq(Seq("t3"), Seq("t1"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i1Expected = ItemSimScore(
+        iid = "i1",
+        simiids = Seq("i3", "i0", "i2"),
+        scores = Seq(20.4, 12.6, 5.6),
+        itypes = Seq(Seq("t3"), Seq("t1", "t2"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i2Expected = ItemSimScore(
+        iid = "i2",
+        simiids = Seq("i1", "i3", "i0"),
+        scores = Seq(5.6, 2.3, 1.5),
+        itypes = Seq(Seq("t1"), Seq("t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i3Expected = ItemSimScore(
+        iid = "i3",
+        simiids = Seq("i1", "i0", "i2"),
+        scores = Seq(20.4, 15.4, 2.3),
+        itypes = Seq(Seq("t1"), Seq("t1", "t2"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      GraphChiModelConstructor.main(argMapToArray(args))
+
+      val i0ItemSim = modeldataItemSimScores.getByIid("i0")
+      val i1ItemSim = modeldataItemSimScores.getByIid("i1")
+      val i2ItemSim = modeldataItemSimScores.getByIid("i2")
+      val i3ItemSim = modeldataItemSimScores.getByIid("i3")
+
+      // don't check id
+      i0ItemSim.map(_.copy(id = None)) must beSome(i0Expected) and
+        (i1ItemSim.map(_.copy(id = None)) must beSome(i1Expected)) and
+        (i2ItemSim.map(_.copy(id = None)) must beSome(i2Expected)) and
+        (i3ItemSim.map(_.copy(id = None)) must beSome(i3Expected))
+
+    }
+
+    "correctly writes ItemSimScores with smaller numSimilarItems" in {
+
+      val algoid = 45
+      val modelSet = true
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "numSimilarItems" -> 1
+      )
+
+      val i0Expected = ItemSimScore(
+        iid = "i0",
+        simiids = Seq("i3"),
+        scores = Seq(15.4),
+        itypes = Seq(Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i1Expected = ItemSimScore(
+        iid = "i1",
+        simiids = Seq("i3"),
+        scores = Seq(20.4),
+        itypes = Seq(Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i2Expected = ItemSimScore(
+        iid = "i2",
+        simiids = Seq("i1"),
+        scores = Seq(5.6),
+        itypes = Seq(Seq("t1")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i3Expected = ItemSimScore(
+        iid = "i3",
+        simiids = Seq("i1"),
+        scores = Seq(20.4),
+        itypes = Seq(Seq("t1")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      GraphChiModelConstructor.main(argMapToArray(args))
+
+      val i0ItemSim = modeldataItemSimScores.getByIid("i0")
+      val i1ItemSim = modeldataItemSimScores.getByIid("i1")
+      val i2ItemSim = modeldataItemSimScores.getByIid("i2")
+      val i3ItemSim = modeldataItemSimScores.getByIid("i3")
+
+      // don't check id
+      i0ItemSim.map(_.copy(id = None)) must beSome(i0Expected) and
+        (i1ItemSim.map(_.copy(id = None)) must beSome(i1Expected)) and
+        (i2ItemSim.map(_.copy(id = None)) must beSome(i2Expected)) and
+        (i3ItemSim.map(_.copy(id = None)) must beSome(i3Expected))
+
+    }
+
+    // TODO: subset valid items
+    "correctly writes ItemSimScores with subset numSimilarItems" in {
+
+      val algoid = 46
+      val modelSet = false
+
+      val inputDir = "/tmp/pio_test/subset/"
+
+      val inputDirFile = new File(inputDir)
+      inputDirFile.mkdirs()
+
+      val validItemIndex = List(
+        "1",
+        "4"
+      )
+
+      writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+      writeToFile(validItemIndex, s"${inputDir}validItemsIndex.tsv")
+      writeToFile(scoresTopK, s"${inputDir}ratings.mm-topk")
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "numSimilarItems" -> 10
+      )
+
+      val i0Expected = ItemSimScore(
+        iid = "i0",
+        simiids = Seq("i3"),
+        scores = Seq(15.4),
+        itypes = Seq(Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i1Expected = ItemSimScore(
+        iid = "i1",
+        simiids = Seq("i3", "i0"),
+        scores = Seq(20.4, 12.6),
+        itypes = Seq(Seq("t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i2Expected = ItemSimScore(
+        iid = "i2",
+        simiids = Seq("i3", "i0"),
+        scores = Seq(2.3, 1.5),
+        itypes = Seq(Seq("t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i3Expected = ItemSimScore(
+        iid = "i3",
+        simiids = Seq("i0"),
+        scores = Seq(15.4),
+        itypes = Seq(Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      GraphChiModelConstructor.main(argMapToArray(args))
+
+      val i0ItemSim = modeldataItemSimScores.getByIid("i0")
+      val i1ItemSim = modeldataItemSimScores.getByIid("i1")
+      val i2ItemSim = modeldataItemSimScores.getByIid("i2")
+      val i3ItemSim = modeldataItemSimScores.getByIid("i3")
+
+      // don't check id
+      i0ItemSim.map(_.copy(id = None)) must beSome(i0Expected) and
+        (i1ItemSim.map(_.copy(id = None)) must beSome(i1Expected)) and
+        (i2ItemSim.map(_.copy(id = None)) must beSome(i2Expected)) and
+        (i3ItemSim.map(_.copy(id = None)) must beSome(i3Expected))
+
+    }
+
+    // TODO: evalid
+
+  }
+
+  // NOTE: clean up when finish test
+  step(cleanUp())
+}
\ No newline at end of file
diff --git a/process/engines/itemsim/algorithms/scala/mahout/build.sbt b/process/engines/itemsim/algorithms/scala/mahout/build.sbt
new file mode 100644
index 0000000..cf4f6ed
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/mahout/build.sbt
@@ -0,0 +1,27 @@
+import xerial.sbt.Pack._
+
+name := "predictionio-process-itemsim-algorithms-scala-mahout"
+
+libraryDependencies ++= Seq(
+  "org.apache.mahout" % "mahout-core" % "0.9",
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "com.twitter" %% "scalding-args" % "0.8.11",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
+
+parallelExecution in Test := false
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map(
+  "itemsim.mahout.mahoutjob" -> "io.prediction.algorithms.mahout.itemsim.MahoutJob",
+  "itemsim.mahout.modelcon" -> "io.prediction.algorithms.mahout.itemsim.MahoutModelConstructor")
+
+packJvmOpts := Map(
+  "itemsim.mahout.mahoutjob" -> Common.packCommonJvmOpts,
+  "itemsim.mahout.modelcon" -> Common.packCommonJvmOpts)
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/application.conf b/process/engines/itemsim/algorithms/scala/mahout/src/main/resources/application.conf
similarity index 100%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/application.conf
copy to process/engines/itemsim/algorithms/scala/mahout/src/main/resources/application.conf
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemsim/algorithms/scala/mahout/src/main/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemsim/algorithms/scala/mahout/src/main/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemsim/algorithms/scala/mahout/src/main/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/MahoutJob.scala b/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/MahoutJob.scala
new file mode 100644
index 0000000..ed91d0c
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/MahoutJob.scala
@@ -0,0 +1,196 @@
+package io.prediction.algorithms.mahout.itemsim
+
+import grizzled.slf4j.Logger
+import java.io.File
+import java.io.FileWriter
+import java.io.BufferedWriter
+
+import scala.io.Source
+import scala.collection.JavaConversions._
+import scala.sys.process._
+import scala.collection.mutable.PriorityQueue
+
+import org.apache.mahout.cf.taste.similarity.ItemSimilarity
+import org.apache.mahout.cf.taste.model.DataModel
+import org.apache.mahout.cf.taste.impl.model.file.FileDataModel
+
+/** main function to run non-distributed Mahout Job */
+object MahoutJob {
+
+  val logger = Logger(MahoutJob.getClass)
+
+  def main(args: Array[String]) {
+    if (args.size < 1) {
+      logger.error("Please specify Mahout job class name")
+      logger.error("Example. <job class name> --param1 1 --param2 2")
+      System.exit(1)
+    }
+
+    val jobName = args(0)
+
+    logger.info("Running Job %s...".format(jobName))
+
+    logger.info(args.mkString(" "))
+    val (argMap, lastkey) = args.drop(1).foldLeft((Map[String, String](), "")) { (res, data) =>
+      val (argMap, lastkey) = res
+      val key: Option[String] = if (data.startsWith("--")) Some(data.stripPrefix("--")) else None
+
+      key map { k =>
+        (argMap ++ Map(k -> ""), k)
+      } getOrElse {
+        val orgData = argMap(lastkey)
+        val newData = orgData match {
+          case "" => data
+          case _ => orgData + " " + data
+        }
+        (argMap ++ Map(lastkey -> newData), lastkey)
+      }
+    }
+    //println(argMap)
+
+    val job = Class.forName(jobName).
+      getConstructor().
+      newInstance().
+      asInstanceOf[MahoutJob]
+
+    val runArgs = job.prepare(argMap)
+
+    val finishArgs = job.run(runArgs)
+
+    val cleanupArgs = job.finish(finishArgs)
+
+    job.cleanup(cleanupArgs)
+
+    logger.info("done")
+
+  }
+
+}
+
+/** Wrapper job class for Mahout algo */
+abstract class MahoutJob {
+  /** Get required arg */
+  def getArg(args: Map[String, String], key: String): String = {
+    if (!args.contains(key)) sys.error("Please specify value for parameter --" + key)
+
+    args(key)
+  }
+
+  /** Get optional arg */
+  def getArgOpt(args: Map[String, String], key: String, default: String): String = {
+    if (args.contains(key)) args(key) else default
+  }
+
+  def getArgOpt(args: Map[String, String], key: String): Option[String] = {
+    if (args.contains(key)) Some(args(key)) else None
+  }
+
+  /** Prepare stage for algo */
+  def prepare(args: Map[String, String]): Map[String, String] = {
+    // simply pass the args to next stage
+    args
+  }
+
+  /** create and return Mahout's ItemSimilarity object. */
+  def buildItemSimilarity(dataModel: DataModel, args: Map[String, String]): ItemSimilarity
+
+  /**
+   * Run algo job.
+   * In default implementation, the prepare() function does nothing
+   * The run() function read and process this local file (defined by --input arg) file and generate the prediction
+   * output file (defined by --output arg) for each user.
+   * Then finish() does nothing
+   */
+  def run(args: Map[String, String]): Map[String, String] = {
+
+    val input = args("input")
+    val output = args("output")
+    val itemsFile = args("itemsFile") // contains valid item index can be recommended
+    val numSimilarItems: Int = getArgOpt(args, "numSimilarItems", "10").toInt
+
+    // valid item index file (iindex)
+    // iindex
+    val validItemsSet: Set[Long] = Source.fromFile(itemsFile)
+      .getLines()
+      .map[Long] { line =>
+        val iindex = try {
+          val fields = line.split("\t")
+          fields(0).toLong
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get item info in line: ${line}. ${e}")
+          }
+        }
+        iindex
+      }.toSet
+
+    val dataModel: DataModel = new FileDataModel(new File(input))
+    val similarity: ItemSimilarity = buildItemSimilarity(dataModel, args)
+
+    val outputFile = new File(output)
+    // create dir if it doesn't exist yet.
+    outputFile.getParentFile().mkdirs()
+
+    // generate prediction output file
+    val outputWriter = new BufferedWriter(new FileWriter(outputFile))
+
+    val itemIds = dataModel.getItemIDs.toSeq
+    val candidateItemsIds = itemIds.filter(validItemsSet(_))
+
+    val allTopScores = itemIds.par.map { iid =>
+      val simScores = candidateItemsIds
+        .map { simiid => (simiid, similarity.itemSimilarity(iid, simiid)) }
+        // filter out invalid score or the same iid itself
+        .filter { x: (_, Double) => (!x._2.isNaN()) && (x._1 != iid) }
+
+      (iid, getTopN(simScores, numSimilarItems)(ScoreOdering.reverse))
+    }
+
+    allTopScores.seq.foreach {
+      case (iid, simScores) =>
+        if (!simScores.isEmpty) {
+          val scoresString = simScores.map(x => s"${x._1}:${x._2}").mkString(",")
+          outputWriter.write(s"${iid}\t[${scoresString}]\n")
+        }
+    }
+
+    outputWriter.close()
+
+    args
+  }
+
+  /** finish stage for algo */
+  def finish(args: Map[String, String]): Map[String, String] = {
+    // simply pass the args to next stage
+    args
+  }
+
+  /** Cleanup stage for algo */
+  def cleanup(args: Map[String, String]) = {
+    // simpley pass the args to next stage
+    args
+  }
+
+  object ScoreOdering extends Ordering[(java.lang.Long, Double)] {
+    override def compare(a: (java.lang.Long, Double), b: (java.lang.Long, Double)) = a._2 compare b._2
+  }
+
+  def getTopN[T](s: Seq[T], n: Int)(implicit ord: Ordering[T]): Seq[T] = {
+    val q = PriorityQueue()
+
+    for (x <- s) {
+      if (q.size < n)
+        q.enqueue(x)
+      else {
+        // q is full
+        if (ord.compare(x, q.head) < 0) {
+          q.dequeue()
+          q.enqueue(x)
+        }
+      }
+    }
+
+    q.dequeueAll.toSeq.reverse
+  }
+
+}
diff --git a/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/MahoutModelConstructor.scala b/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/MahoutModelConstructor.scala
new file mode 100644
index 0000000..ff48564
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/MahoutModelConstructor.scala
@@ -0,0 +1,168 @@
+package io.prediction.algorithms.mahout.itemsim
+
+import grizzled.slf4j.Logger
+import com.twitter.scalding.Args
+import scala.io.Source
+
+import io.prediction.commons.Config
+import io.prediction.commons.modeldata.{ ItemSimScore }
+
+/*
+ * Description:
+ * input file:
+ * - itemsIndex.tsv (iindex iid itypes): all items
+ * - similarities.tsv (iindex [iindex1:score,iindex2:score,...]: output of MahotJob
+ *
+ * Required args:
+ * --inputDir: <string>
+ * --appid: <int>
+ * --engineid: <int>
+ * --algoid: <int>
+ * --modelSet: <boolean> (true/false). flag to indicate which set
+ * --numSimilarItems: <int>. number of similar items to be generated
+ *
+ * Optionsl args:
+ * --evalid: <int>. Offline Evaluation if evalid is specified
+ *
+ * Example:
+ */
+object MahoutModelConstructor {
+  /* global */
+  val logger = Logger(MahoutModelConstructor.getClass)
+  val commonsConfig = new Config
+
+  // argument of this job
+  case class JobArg(
+    val inputDir: String,
+    val appid: Int,
+    val algoid: Int,
+    val evalid: Option[Int],
+    val modelSet: Boolean,
+    val numSimilarItems: Int)
+
+  def main(cmdArgs: Array[String]) {
+    logger.info("Running model constructor for Mahout ...")
+    logger.info(cmdArgs.mkString(","))
+
+    /* get arg */
+    val args = Args(cmdArgs)
+
+    val arg = JobArg(
+      inputDir = args("inputDir"),
+      appid = args("appid").toInt,
+      algoid = args("algoid").toInt,
+      evalid = args.optional("evalid") map (x => x.toInt),
+      modelSet = args("modelSet").toBoolean,
+      numSimilarItems = args("numSimilarItems").toInt
+    )
+
+    /* run job */
+    modelCon(arg)
+    cleanUp(arg)
+  }
+
+  def modelCon(arg: JobArg) = {
+
+    // NOTE: if OFFLINE_EVAL, write to training modeldata and use evalid as appid
+    val OFFLINE_EVAL = (arg.evalid != None)
+
+    val modeldataDb = if (!OFFLINE_EVAL)
+      commonsConfig.getModeldataItemSimScores
+    else
+      commonsConfig.getModeldataTrainingItemSimScores
+
+    val appid = if (OFFLINE_EVAL) arg.evalid.get else arg.appid
+
+    case class ItemData(
+      val iid: String,
+      val itypes: Seq[String])
+
+    // item index file (iindex iid itypes)
+    // iindex -> ItemData
+    val itemsMap: Map[Int, ItemData] = Source.fromFile(s"${arg.inputDir}itemsIndex.tsv")
+      .getLines()
+      .map[(Int, ItemData)] { line =>
+        val (iindex, item) = try {
+          val fields = line.split("\t")
+          val itemData = ItemData(
+            iid = fields(1),
+            itypes = fields(2).split(",").toList
+          )
+          (fields(0).toInt, itemData)
+        } catch {
+          case e: Exception => {
+            throw new RuntimeException(s"Cannot get item info in line: ${line}. ${e}")
+          }
+        }
+        (iindex, item)
+      }.toMap
+
+    // prediction
+    Source.fromFile(s"${arg.inputDir}similarities.tsv")
+      .getLines()
+      .foreach { line =>
+        val fields = line.split("\t")
+
+        val (iindex, predictedData) = try {
+          (fields(0).toInt, fields(1))
+        } catch {
+          case e: Exception => throw new RuntimeException(s"Cannot extract uindex and prediction output from this line: ${line}. ${e}")
+        }
+
+        val predicted: Seq[(Int, Double)] = parsePredictedData(predictedData)
+          .map { case (iindex, rating) => (iindex.toInt, rating) }
+
+        val topScores = predicted
+          // valid item filtering is donw inside MahoutJob
+          .sortBy(_._2)(Ordering[Double].reverse)
+          .take(arg.numSimilarItems)
+
+        logger.debug(s"${iindex}: ${topScores}")
+
+        val itemid = try {
+          itemsMap(iindex).iid
+        } catch {
+          case e: Exception => throw new RuntimeException(s"Cannot get iid for this iindex: ${line}. ${e}")
+        }
+        modeldataDb.insert(ItemSimScore(
+          iid = itemid,
+          simiids = topScores.map(x => itemsMap(x._1).iid),
+          scores = topScores.map(_._2),
+          itypes = topScores.map(x => itemsMap(x._1).itypes),
+          appid = appid,
+          algoid = arg.algoid,
+          modelset = arg.modelSet))
+
+      }
+  }
+
+  def cleanUp(arg: JobArg) = {
+
+  }
+
+  /* TODO refactor this
+  Mahout ItemRec output format
+  [24:3.2] => (24, 3.2)
+  [8:2.5,0:2.5]  => (8, 2.5), (0, 2.5)
+  [0:2.0]
+  [16:3.0]
+  */
+  def parsePredictedData(data: String): List[(String, Double)] = {
+    val dataLen = data.length
+    data.take(dataLen - 1).tail.split(",").toList.map { ratingData =>
+      val ratingDataArray = ratingData.split(":")
+      val item = ratingDataArray(0)
+      val rating: Double = try {
+        ratingDataArray(1).toDouble
+      } catch {
+        case e: Exception =>
+          {
+            assert(false, s"Cannot convert rating value of item ${item} to double: " + ratingDataArray + ". Exception: " + e)
+          }
+          0.0
+      }
+      (item, rating)
+    }
+  }
+
+}
diff --git a/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/itemsimcf/ItemSimCFJob.scala b/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/itemsimcf/ItemSimCFJob.scala
new file mode 100644
index 0000000..8038065
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/mahout/src/main/scala/io/prediction/algorithms/mahout/itemsim/itemsimcf/ItemSimCFJob.scala
@@ -0,0 +1,42 @@
+package io.prediction.algorithms.mahout.itemsim.itemsimcf
+
+import io.prediction.algorithms.mahout.itemsim.MahoutJob
+
+import org.apache.mahout.cf.taste.model.DataModel
+import org.apache.mahout.cf.taste.common.Weighting
+import org.apache.mahout.cf.taste.similarity.ItemSimilarity
+import org.apache.mahout.cf.taste.impl.similarity.{
+  CityBlockSimilarity,
+  EuclideanDistanceSimilarity,
+  LogLikelihoodSimilarity,
+  PearsonCorrelationSimilarity,
+  TanimotoCoefficientSimilarity,
+  UncenteredCosineSimilarity
+}
+
+class ItemSimCFJob extends MahoutJob {
+
+  val defaultItemSimilarity = "LogLikelihoodSimilarity"
+
+  override def buildItemSimilarity(dataModel: DataModel, args: Map[String, String]): ItemSimilarity = {
+
+    val booleanData: Boolean = getArgOpt(args, "booleanData", "false").toBoolean
+    val itemSimilarity: String = getArgOpt(args, "itemSimilarity", defaultItemSimilarity)
+    val weighted: Boolean = getArgOpt(args, "weighted", "false").toBoolean
+
+    val weightedParam: Weighting = if (weighted) Weighting.WEIGHTED else Weighting.UNWEIGHTED
+
+    val similarity: ItemSimilarity = itemSimilarity match {
+      case "CityBlockSimilarity" => new CityBlockSimilarity(dataModel)
+      case "EuclideanDistanceSimilarity" => new EuclideanDistanceSimilarity(dataModel, weightedParam)
+      case "LogLikelihoodSimilarity" => new LogLikelihoodSimilarity(dataModel)
+      case "PearsonCorrelationSimilarity" => new PearsonCorrelationSimilarity(dataModel, weightedParam)
+      case "TanimotoCoefficientSimilarity" => new TanimotoCoefficientSimilarity(dataModel)
+      case "UncenteredCosineSimilarity" => new UncenteredCosineSimilarity(dataModel, weightedParam)
+      case _ => throw new RuntimeException("Invalid ItemSimilarity: " + itemSimilarity)
+    }
+
+    similarity
+  }
+
+}
\ No newline at end of file
diff --git a/process/engines/itemsim/algorithms/scala/mahout/src/test/resources/application.conf b/process/engines/itemsim/algorithms/scala/mahout/src/test/resources/application.conf
new file mode 100644
index 0000000..28ac0ee
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/mahout/src/test/resources/application.conf
@@ -0,0 +1,37 @@
+# Used by PredictionIO Commons
+io.prediction.base=.
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+io.prediction.commons.appdata.db.name=predictionio_appdata_mahout_dataprep_test
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+io.prediction.commons.appdata.test.db.name=predictionio_test_appdata_mahout_dataprep_test
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+io.prediction.commons.appdata.training.db.name=predictionio_trainig_appdata_mahout_dataprep_test
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+io.prediction.commons.appdata.validation.db.name=predictionio_validation_appdata_mahout_dataprep_test
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+io.prediction.commons.modeldata.db.name=predictionio_modeldata_mahout_dataprep_test
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+io.prediction.commons.modeldata.training.db.name=predictionio_training_modeldata_mahout_dataprep_test
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
+io.prediction.commons.settings.db.name=predictionio_mahout_dataprep_test
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml b/process/engines/itemsim/algorithms/scala/mahout/src/test/resources/logback.xml
similarity index 99%
copy from process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
copy to process/engines/itemsim/algorithms/scala/mahout/src/test/resources/logback.xml
index f8dd5fb..1a2768e 100644
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/resources/logback.xml
+++ b/process/engines/itemsim/algorithms/scala/mahout/src/test/resources/logback.xml
@@ -8,3 +8,4 @@
     <appender-ref ref="STDOUT" />
   </root>
 </configuration>
+
diff --git a/process/engines/itemsim/algorithms/scala/mahout/src/test/scala/io/prediction/algorithms/mahout/itemsim/MahoutModelConstructorSpec.scala b/process/engines/itemsim/algorithms/scala/mahout/src/test/scala/io/prediction/algorithms/mahout/itemsim/MahoutModelConstructorSpec.scala
new file mode 100644
index 0000000..d48b50c
--- /dev/null
+++ b/process/engines/itemsim/algorithms/scala/mahout/src/test/scala/io/prediction/algorithms/mahout/itemsim/MahoutModelConstructorSpec.scala
@@ -0,0 +1,353 @@
+package io.prediction.algorithms.mahout.itemsim
+
+import io.prediction.commons.Config
+import io.prediction.commons.settings.{ App, Algo }
+import io.prediction.commons.modeldata.{ ItemSimScore }
+
+import org.specs2.mutable._
+import com.github.nscala_time.time.Imports._
+import scala.io.Source
+import java.io.File
+import java.io.FileWriter
+import java.io.BufferedWriter
+
+import com.mongodb.casbah.Imports._
+
+class MahoutItemSimModelConstructorSpec extends Specification {
+
+  // note: should match the db name defined in the application.conf
+  val mongoDbName = "predictionio_modeldata_mahout_dataprep_test"
+  def cleanUp() = {
+    // remove the test database
+    MongoConnection()(mongoDbName).dropDatabase()
+  }
+
+  val commonConfig = new Config
+  val modeldataItemSimScores = commonConfig.getModeldataItemSimScores
+
+  def argMapToArray(args: Map[String, Any]): Array[String] = {
+    args.toArray.flatMap {
+      case (k, v) =>
+        Array(s"--${k}", v.toString)
+    }
+  }
+
+  def writeToFile(lines: List[String], filePath: String) = {
+    val writer = new BufferedWriter(new FileWriter(new File(filePath)))
+    lines.foreach { line =>
+      writer.write(s"${line}\n")
+    }
+    writer.close()
+  }
+
+  "MahoutItemSimModelConstructor" should {
+    val inputDir = "/tmp/pio_test/"
+
+    val inputDirFile = new File(inputDir)
+    inputDirFile.mkdirs()
+
+    val itemsIndex = List(
+      "1\ti1\tt1,t2",
+      "2\ti2\tt1",
+      "3\ti3\tt2,t3",
+      "4\ti4\tt3"
+    )
+
+    val validItemIndex = List(
+      "1",
+      "2",
+      "3",
+      "4"
+    )
+
+    val similarities = List(
+      "1\t[2:3.2,3:12.5,4:20]",
+      "2\t[1:3.2,3:9.0]",
+      "3\t[1:12.5,2:9.0,4:12.0]",
+      "4\t[3:12.0,1:20]"
+    )
+
+    writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+    writeToFile(validItemIndex, s"${inputDir}validItemsIndex.tsv")
+    writeToFile(similarities, s"${inputDir}similarities.tsv")
+
+    val appid = 12
+
+    implicit val app = App(
+      id = appid,
+      userid = 0,
+      appkey = "1234",
+      display = "12345",
+      url = None,
+      cat = None,
+      desc = None,
+      timezone = "UTC"
+    )
+
+    "correctly writes ItemSimScores with larger numSimilarItems" in {
+
+      val algoid = 45
+      val modelSet = false
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "numSimilarItems" -> 10
+      )
+
+      val i1Expected = ItemSimScore(
+        iid = "i1",
+        simiids = Seq("i4", "i3", "i2"),
+        scores = Seq(20.0, 12.5, 3.2),
+        itypes = Seq(Seq("t3"), Seq("t2", "t3"), Seq("t1")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i2Expected = ItemSimScore(
+        iid = "i2",
+        simiids = Seq("i3", "i1"),
+        scores = Seq(9.0, 3.2),
+        itypes = Seq(Seq("t2", "t3"), Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i3Expected = ItemSimScore(
+        iid = "i3",
+        simiids = Seq("i1", "i4", "i2"),
+        scores = Seq(12.5, 12.0, 9.0),
+        itypes = Seq(Seq("t1", "t2"), Seq("t3"), Seq("t1")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i4Expected = ItemSimScore(
+        iid = "i4",
+        simiids = Seq("i1", "i3"),
+        scores = Seq(20.0, 12.0),
+        itypes = Seq(Seq("t1", "t2"), Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      MahoutModelConstructor.main(argMapToArray(args))
+
+      val i1ItemSim = modeldataItemSimScores.getByIid("i1")
+      val i2ItemSim = modeldataItemSimScores.getByIid("i2")
+      val i3ItemSim = modeldataItemSimScores.getByIid("i3")
+      val i4ItemSim = modeldataItemSimScores.getByIid("i4")
+
+      // don't check id
+      i1ItemSim.map(_.copy(id = None)) must beSome(i1Expected) and
+        (i2ItemSim.map(_.copy(id = None)) must beSome(i2Expected)) and
+        (i3ItemSim.map(_.copy(id = None)) must beSome(i3Expected)) and
+        (i4ItemSim.map(_.copy(id = None)) must beSome(i4Expected))
+
+    }
+
+    "correctly writes ItemSimScores with smaller numSimilarItems" in {
+
+      val algoid = 45
+      val modelSet = true
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "numSimilarItems" -> 1
+      )
+
+      val i1Expected = ItemSimScore(
+        iid = "i1",
+        simiids = Seq("i4"),
+        scores = Seq(20.0),
+        itypes = Seq(Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i2Expected = ItemSimScore(
+        iid = "i2",
+        simiids = Seq("i3"),
+        scores = Seq(9.0),
+        itypes = Seq(Seq("t2", "t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i3Expected = ItemSimScore(
+        iid = "i3",
+        simiids = Seq("i1"),
+        scores = Seq(12.5),
+        itypes = Seq(Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i4Expected = ItemSimScore(
+        iid = "i4",
+        simiids = Seq("i1"),
+        scores = Seq(20.0),
+        itypes = Seq(Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      MahoutModelConstructor.main(argMapToArray(args))
+
+      val i1ItemSim = modeldataItemSimScores.getByIid("i1")
+      val i2ItemSim = modeldataItemSimScores.getByIid("i2")
+      val i3ItemSim = modeldataItemSimScores.getByIid("i3")
+      val i4ItemSim = modeldataItemSimScores.getByIid("i4")
+
+      // don't check id
+      i1ItemSim.map(_.copy(id = None)) must beSome(i1Expected) and
+        (i2ItemSim.map(_.copy(id = None)) must beSome(i2Expected)) and
+        (i3ItemSim.map(_.copy(id = None)) must beSome(i3Expected)) and
+        (i4ItemSim.map(_.copy(id = None)) must beSome(i4Expected))
+
+    }
+    /* don't test, valid item filtering is not done in mahout itemsim modelcon
+    "correctly writes ItemSimScores with subset numSimilarItems" in {
+
+      val algoid = 46
+      val modelSet = false
+
+      val inputDir = "/tmp/pio_test/subset/"
+
+      val inputDirFile = new File(inputDir)
+      inputDirFile.mkdirs()
+
+      val validItemIndex = List(
+        "1",
+        "4"
+      )
+
+      writeToFile(itemsIndex, s"${inputDir}itemsIndex.tsv")
+      writeToFile(validItemIndex, s"${inputDir}validItemsIndex.tsv")
+      writeToFile(similarities, s"${inputDir}similarities.tsv")
+
+      implicit val algo = Algo(
+        id = algoid,
+        engineid = 1234,
+        name = "",
+        infoid = "abc",
+        command = "",
+        params = Map(),
+        settings = Map(),
+        modelset = modelSet,
+        createtime = DateTime.now,
+        updatetime = DateTime.now,
+        status = "deployed",
+        offlineevalid = None,
+        offlinetuneid = None,
+        loop = None,
+        paramset = None
+      )
+
+      val args = Map(
+        "inputDir" -> inputDir,
+        "appid" -> appid,
+        "algoid" -> algoid,
+        "modelSet" -> modelSet,
+        "numSimilarItems" -> 10
+      )
+
+      val i1Expected = ItemSimScore(
+        iid = "i1",
+        simiids = Seq("i4"),
+        scores = Seq(20.0),
+        itypes = Seq(Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i2Expected = ItemSimScore(
+        iid = "i2",
+        simiids = Seq("i1"),
+        scores = Seq(3.2),
+        itypes = Seq(Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i3Expected = ItemSimScore(
+        iid = "i3",
+        simiids = Seq("i1", "i4"),
+        scores = Seq(12.5, 12.0),
+        itypes = Seq(Seq("t1", "t2"), Seq("t3")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      val i4Expected = ItemSimScore(
+        iid = "i4",
+        simiids = Seq("i1"),
+        scores = Seq(20.0),
+        itypes = Seq(Seq("t1", "t2")),
+        appid = appid,
+        algoid = algoid,
+        modelset = modelSet)
+
+      MahoutModelConstructor.main(argMapToArray(args))
+
+      val i1ItemSim = modeldataItemSimScores.getByIid("i1")
+      val i2ItemSim = modeldataItemSimScores.getByIid("i2")
+      val i3ItemSim = modeldataItemSimScores.getByIid("i3")
+      val i4ItemSim = modeldataItemSimScores.getByIid("i4")
+
+      // don't check id
+      i1ItemSim.map(_.copy(id = None)) must beSome(i1Expected) and
+        (i2ItemSim.map(_.copy(id = None)) must beSome(i2Expected)) and
+        (i3ItemSim.map(_.copy(id = None)) must beSome(i3Expected)) and
+        (i4ItemSim.map(_.copy(id = None)) must beSome(i4Expected))
+
+    }*/
+
+    // TODO: evalid
+
+  }
+
+  // clean up when finish test
+  step(cleanUp())
+}
\ No newline at end of file
diff --git a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtK.scala b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtK.scala
index 222f7e2..b7aa683 100644
--- a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtK.scala
+++ b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtK.scala
@@ -8,59 +8,60 @@
 import io.prediction.commons.filepath.OfflineMetricFile
 import io.prediction.commons.scalding.settings.OfflineEvalResults
 
-/** Source:
-  *   relevantUsers.tsv
-  *     iid     uid
-  *     i0      u0
-  *     i0      u1
-  *     i0      u2
-  *   relevantItems.tsv
-  *     uid     iid
-  *     u0      i0
-  *     u0      i1
-  *     u0      i2
-  *   topKItems.tsv
-  *     iid     simiid  score
-  *     i0      i1      3.2
-  *     i0      i4      2.5
-  *     i0      i5      1.4
-  *
-  * Sink:
-  *   offlineEvalResults DB
-  *   averagePrecision.tsv
-  *     iid     ap
-  *     i0      0.03
-  *
-  *
-  * Description:
-  *   Calculate Item Similarity Mean Average Precision @ k score
-  *   There is an assumption that the number of missing similar items per item is always equal to k.
-  *
-  * Required args:
-  * --dbType: <string> The OfflineEvalResults DB Type (eg. mongodb) (see --dbHost, --dbPort)
-  * --dbName: <string>
-  *
-  * --hdfsRoot: <string>. Root directory of the HDFS
-  *
-  * --appid: <int>
-  * --engineid: <int>
-  * --evalid: <int>
-  * --metricid: <int>
-  * --algoid: <int>
-  * --iteration: <int>
-  * --splitset: <string>
-  *
-  * --kParam: <int>
-  *
-  * Optional args:
-  * --dbHost: <string> (eg. "127.0.0.1")
-  * --dbPort: <int> (eg. 27017)
-  *
-  * --debug: <String>. "test" - for testing purpose
-  *
-  * Example:
-  * scald.rb --hdfs-local io.prediction.metrics.scalding.itemsim.ismap.ISMAPAtK --dbType mongodb --dbName predictionio --dbHost 127.0.0.1 --dbPort 27017 --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 3 --evalid 15 --metricid 10 --algoid 9 --kParam 30
-  */
+/**
+ * Source:
+ *   relevantUsers.tsv
+ *     iid     uid
+ *     i0      u0
+ *     i0      u1
+ *     i0      u2
+ *   relevantItems.tsv
+ *     uid     iid
+ *     u0      i0
+ *     u0      i1
+ *     u0      i2
+ *   topKItems.tsv
+ *     iid     simiid  score
+ *     i0      i1      3.2
+ *     i0      i4      2.5
+ *     i0      i5      1.4
+ *
+ * Sink:
+ *   offlineEvalResults DB
+ *   averagePrecision.tsv
+ *     iid     ap
+ *     i0      0.03
+ *
+ *
+ * Description:
+ *   Calculate Item Similarity Mean Average Precision @ k score
+ *   There is an assumption that the number of missing similar items per item is always equal to k.
+ *
+ * Required args:
+ * --dbType: <string> The OfflineEvalResults DB Type (eg. mongodb) (see --dbHost, --dbPort)
+ * --dbName: <string>
+ *
+ * --hdfsRoot: <string>. Root directory of the HDFS
+ *
+ * --appid: <int>
+ * --engineid: <int>
+ * --evalid: <int>
+ * --metricid: <int>
+ * --algoid: <int>
+ * --iteration: <int>
+ * --splitset: <string>
+ *
+ * --kParam: <int>
+ *
+ * Optional args:
+ * --dbHost: <string> (eg. "127.0.0.1")
+ * --dbPort: <int> (eg. 27017)
+ *
+ * --debug: <String>. "test" - for testing purpose
+ *
+ * Example:
+ * scald.rb --hdfs-local io.prediction.metrics.scalding.itemsim.ismap.ISMAPAtK --dbType mongodb --dbName predictionio --dbHost 127.0.0.1 --dbPort 27017 --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 3 --evalid 15 --metricid 10 --algoid 9 --kParam 30
+ */
 class ISMAPAtK(args: Args) extends Job(args) {
   /** parse args */
   val dbTypeArg = args("dbType")
@@ -91,7 +92,7 @@
 
   /** sinks */
   val averagePrecisionSink = Tsv(OfflineMetricFile(hdfsRootArg, appidArg, engineidArg, evalidArg, metricidArg, algoidArg, "averagePrecision.tsv"))
-  val offlineEvalResultsSink = OfflineEvalResults(dbType=dbTypeArg, dbName=dbNameArg, dbHost=dbHostArg, dbPort=dbPortArg)
+  val offlineEvalResultsSink = OfflineEvalResults(dbType = dbTypeArg, dbName = dbNameArg, dbHost = dbHostArg, dbPort = dbPortArg)
 
   /** computation */
   val itemsMapAtK = topKItems
@@ -100,17 +101,17 @@
     .groupBy('iid, 'ruuid) {
       _.sortBy('score).reverse.scanLeft(('simiid, 'riiids) -> ('precision, 'hit, 'count))((0.0, 0, 0)) {
         (newFields: (Double, Int, Int), fields: (String, List[String])) =>
-        val (simiid, riiids) = fields
-        val (precision, hit, count) = newFields
-        Option(riiids) map { r =>
-          if (r.contains(simiid)) {
-            ((hit+1).toDouble/(count+1).toDouble, hit+1, count+1)
-          } else {
-            (0.0, hit, count+1)
+          val (simiid, riiids) = fields
+          val (precision, hit, count) = newFields
+          Option(riiids) map { r =>
+            if (r.contains(simiid)) {
+              ((hit + 1).toDouble / (count + 1).toDouble, hit + 1, count + 1)
+            } else {
+              (0.0, hit, count + 1)
+            }
+          } getOrElse {
+            (0.0, hit, count + 1)
           }
-        } getOrElse {
-          (0.0, hit, count+1)
-        }
       }
     }
     .filter('count) { count: Int => count > 0 }
@@ -123,5 +124,5 @@
     .mapTo('precision -> ('evalid, 'metricid, 'algoid, 'score, 'iteration, 'splitset)) { precision: Double =>
       (evalidArg, metricidArg, algoidArg, precision, iterationArg, splitsetArg)
     }
-    .then( offlineEvalResultsSink.writeData('evalid, 'metricid, 'algoid, 'score, 'iteration, 'splitset) _ )
+    .then(offlineEvalResultsSink.writeData('evalid, 'metricid, 'algoid, 'score, 'iteration, 'splitset) _)
 }
diff --git a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparator.scala b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparator.scala
index 8f62463..3d270bd 100644
--- a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparator.scala
+++ b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/main/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparator.scala
@@ -5,61 +5,62 @@
 import io.prediction.commons.filepath.OfflineMetricFile
 import io.prediction.commons.scalding.appdata.U2iActions
 
-/** Source:
-  *   Test set u2iActions.
-  *
-  * Sink:
-  *   relevantUsers.tsv
-  *     iid     uid
-  *     i0      u0
-  *     i0      u1
-  *     i0      u2
-  *   relevantItems.tsv
-  *     uid     iid
-  *     u0      i0
-  *     u0      i1
-  *     u0      i2
-  *
-  * Description:
-  *   Generate relevantUsers and relevantItems for ISMAP@k
-  *
-  * Required args:
-  * --test_dbType: <string> test_appdata DB type (eg. mongodb)
-  * --test_dbName: <string>
-  *
-  * --training_dbType: <string> training_appdata DB type
-  * --training_dbName: <string>
-  *
-  * --modeldata_dbType: <string> modeldata DB type
-  * --modeldata_dbName: <string>
-  *
-  * --hdfsRoot: <string>. Root directory of the HDFS
-  *
-  * --appid: <int>
-  * --engineid: <int>
-  * --evalid: <int>
-  * --metricid: <int>
-  * --algoid: <int>
-  *
-  * --kParam: <int>
-  * --goalParam: <string> ("view", "conversion", "like", "rate3", "rate4", "rate5)
-  *
-  * Optional args:
-  * --test_dbHost: <string> (eg. "127.0.0.1")
-  * --test_dbPort: <int> (eg. 27017)
-  *
-  * --training_dbHost: <string>
-  * --training_dbPort: <int>
-  *
-  * --modeldata_dbHost: <string>
-  * --modeldata_dbPort <int>
-  *
-  * --debug: <String>. "test" - for testing purpose
-  *
-  * Example:
-  * scald.rb --hdfs-local io.prediction.metrics.scalding.itemsim.map.ISMAPAtKDataPreparator --test_dbType mongodb --test_dbName test_appdata --test_dbHost 127.0.0.1 --test_dbPort 27017 --training_dbType mongodb --training_dbName training_appdata --training_dbHost 127.0.0.1 --training_dbPort 27017 --modeldata_dbType file --modeldata_dbName modeldata_path/ --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 3 --evalid 15 --metricid 10 --algoid 9 --kParam 30 --goalParam rate3
-  *
-  */
+/**
+ * Source:
+ *   Test set u2iActions.
+ *
+ * Sink:
+ *   relevantUsers.tsv
+ *     iid     uid
+ *     i0      u0
+ *     i0      u1
+ *     i0      u2
+ *   relevantItems.tsv
+ *     uid     iid
+ *     u0      i0
+ *     u0      i1
+ *     u0      i2
+ *
+ * Description:
+ *   Generate relevantUsers and relevantItems for ISMAP@k
+ *
+ * Required args:
+ * --test_dbType: <string> test_appdata DB type (eg. mongodb)
+ * --test_dbName: <string>
+ *
+ * --training_dbType: <string> training_appdata DB type
+ * --training_dbName: <string>
+ *
+ * --modeldata_dbType: <string> modeldata DB type
+ * --modeldata_dbName: <string>
+ *
+ * --hdfsRoot: <string>. Root directory of the HDFS
+ *
+ * --appid: <int>
+ * --engineid: <int>
+ * --evalid: <int>
+ * --metricid: <int>
+ * --algoid: <int>
+ *
+ * --kParam: <int>
+ * --goalParam: <string> ("view", "conversion", "like", "rate3", "rate4", "rate5)
+ *
+ * Optional args:
+ * --test_dbHost: <string> (eg. "127.0.0.1")
+ * --test_dbPort: <int> (eg. 27017)
+ *
+ * --training_dbHost: <string>
+ * --training_dbPort: <int>
+ *
+ * --modeldata_dbHost: <string>
+ * --modeldata_dbPort <int>
+ *
+ * --debug: <String>. "test" - for testing purpose
+ *
+ * Example:
+ * scald.rb --hdfs-local io.prediction.metrics.scalding.itemsim.map.ISMAPAtKDataPreparator --test_dbType mongodb --test_dbName test_appdata --test_dbHost 127.0.0.1 --test_dbPort 27017 --training_dbType mongodb --training_dbName training_appdata --training_dbHost 127.0.0.1 --training_dbPort 27017 --modeldata_dbType file --modeldata_dbName modeldata_path/ --hdfsRoot hdfs/predictionio/ --appid 34 --engineid 3 --evalid 15 --metricid 10 --algoid 9 --kParam 30 --goalParam rate3
+ *
+ */
 class ISMAPAtKDataPreparator(args: Args) extends Job(args) {
   val test_dbTypeArg = args("test_dbType")
   val test_dbNameArg = args("test_dbName")
@@ -106,14 +107,15 @@
   final val ACTION_CONVERSION = "conversion"
 
   /** source */
-  val testU2i = U2iActions(appId=evalidArg,
-      dbType=test_dbTypeArg, dbName=test_dbNameArg, dbHost=test_dbHostArg, dbPort=test_dbPortArg).readData('actionTest, 'uidTest, 'iidTest, 'tTest, 'vTest)
+  val testU2i = U2iActions(appId = evalidArg,
+    dbType = test_dbTypeArg, dbName = test_dbNameArg, dbHost = test_dbHostArg, dbPort = test_dbPortArg).readData('actionTest, 'uidTest, 'iidTest, 'tTest, 'vTest)
 
-  /** computation
-    *
-    * for each user, get a list of items which match the goalParam
-    * TODO: filter out items appeared in trainingU2i?
-    */
+  /**
+   * computation
+   *
+   * for each user, get a list of items which match the goalParam
+   * TODO: filter out items appeared in trainingU2i?
+   */
   testU2i
     .filter('actionTest, 'vTest) { fields: (String, Option[String]) =>
       val (action, v) = fields
@@ -136,7 +138,7 @@
           case e: Exception => {
             assert(false, s"Failed to convert v field ${v} to int. Exception:" + e)
             false
-          }   
+          }
         }
         case GOAL_RATE5 => try {
           (action == ACTION_RATE) && (v.get.toInt >= 5)
diff --git a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparatorTest.scala b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparatorTest.scala
index da6ee85..f9171c9 100644
--- a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparatorTest.scala
+++ b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKDataPreparatorTest.scala
@@ -57,7 +57,7 @@
       .arg("algoid", "8")
       .arg("goalParam", params("goalParam"))
       .arg("kParam", params("kParam"))
-      .source(U2iActions(appId=5, dbType=test_dbType, dbName=test_dbName, dbHost=test_dbHost, dbPort=test_dbPort).getSource, testU2i)
+      .source(U2iActions(appId = 5, dbType = test_dbType, dbName = test_dbName, dbHost = test_dbHost, dbPort = test_dbPort).getSource, testU2i)
       .sink[(String, String)](Tsv(OfflineMetricFile(hdfsRoot, 2, 4, 5, 6, 8, "relevantItems.tsv"))) { outputBuffer =>
         "correctly generates relevantItems for each user" in {
           val output = splitAndSortList(outputBuffer.toList)
diff --git a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKTest.scala b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKTest.scala
index 1e46d0b..cc41959 100644
--- a/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKTest.scala
+++ b/process/engines/itemsim/evaluations/hadoop/scalding/metrics/ismap/src/test/scala/io/prediction/metrics/scalding/itemsim/ismap/ISMAPAtKTest.scala
@@ -5,7 +5,7 @@
 import com.twitter.scalding._
 import com.twitter.scalding.Dsl._
 
-import io.prediction.commons.filepath.{OfflineMetricFile}
+import io.prediction.commons.filepath.{ OfflineMetricFile }
 import io.prediction.commons.scalding.settings.OfflineEvalResults
 
 class ISMAPAtKTest extends Specification {
@@ -57,8 +57,8 @@
           def roundingData(orgList: List[(String, Double)]) = {
             orgList map { x =>
               val (t1, t2) = x
-               // NOTE: use HALF_UP mode to avoid error caused by rounding when compare data
-               // (eg. 3.5 vs 3.499999999999, 0.6666666666 vs 0.666666667)
+              // NOTE: use HALF_UP mode to avoid error caused by rounding when compare data
+              // (eg. 3.5 vs 3.499999999999, 0.6666666666 vs 0.666666667)
               (t1, BigDecimal(t2).setScale(6, BigDecimal.RoundingMode.HALF_UP).toDouble)
             }
           }
@@ -66,7 +66,7 @@
           roundingData(outputBuffer.toList) must containTheSameElementsAs(roundingData(averagePrecision))
         }
       }
-      .sink[(Int, Int, Int, Double, Int, String)](OfflineEvalResults(dbType=dbType, dbName=dbName, dbHost=dbHost, dbPort=dbPort).getSource) { outputBuffer =>
+      .sink[(Int, Int, Int, Double, Int, String)](OfflineEvalResults(dbType = dbType, dbName = dbName, dbHost = dbHost, dbPort = dbPort).getSource) { outputBuffer =>
         def roundingData(orgList: List[(Int, Int, Int, Double, Int, String)]) = {
           orgList map { x =>
             val (t1, t2, t3, t4, t5, t6) = x
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/build.sbt b/process/engines/itemsim/evaluations/scala/topkitems/build.sbt
deleted file mode 100644
index fdd59a9..0000000
--- a/process/engines/itemsim/evaluations/scala/topkitems/build.sbt
+++ /dev/null
@@ -1,18 +0,0 @@
-import AssemblyKeys._
-
-assemblySettings
-
-name := "predictionio-process-itemsim-evaluations-topkitems"
-
-libraryDependencies ++= Seq(
-  "ch.qos.logback" % "logback-classic" % "1.0.9",
-  "ch.qos.logback" % "logback-core" % "1.0.9",
-  "com.github.scala-incubator.io" %% "scala-io-core" % "0.4.2",
-  "com.github.scala-incubator.io" %% "scala-io-file" % "0.4.2",
-  "com.typesafe" % "config" % "1.0.0",
-  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
-
-excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
-  val excludes = Set("minlog-1.2.jar")
-  cp filter { jar => excludes(jar.data.getName)}
-}
diff --git a/process/engines/itemsim/evaluations/scala/topkitems/src/main/scala/io/prediction/evaluations/itemsim/topkitems/TopKItems.scala b/process/engines/itemsim/evaluations/scala/topkitems/src/main/scala/io/prediction/evaluations/itemsim/topkitems/TopKItems.scala
deleted file mode 100644
index a988b1d..0000000
--- a/process/engines/itemsim/evaluations/scala/topkitems/src/main/scala/io/prediction/evaluations/itemsim/topkitems/TopKItems.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-package io.prediction.evaluations.itemsim.topkitems
-
-import io.prediction.commons._
-import io.prediction.commons.filepath.OfflineMetricFile
-import io.prediction.output.itemsim.ItemSimAlgoOutput
-
-import com.typesafe.config.ConfigFactory
-import grizzled.slf4j.Logger
-import java.io.File
-import scala.sys.process._
-import scalax.io._
-
-object TopKItems {
-  def main(args: Array[String]) {
-    val logger = Logger(TopKItems.getClass)
-
-    val config = ConfigFactory.load
-
-    val evalid = config.getInt("evalid")
-    val algoid = config.getInt("algoid")
-    val metricid = config.getInt("metricid")
-    val hdfsRoot = config.getString("hdfsroot")
-    val k = config.getInt("k")
-
-    val commonsConfig = new Config
-
-    /** Try search path if hadoop home is not set. */
-    val hadoopCommand = commonsConfig.settingsHadoopHome map { h => h+"/bin/hadoop" } getOrElse { "hadoop" }
-
-    val apps = commonsConfig.getSettingsApps
-    val engines = commonsConfig.getSettingsEngines
-    val algos = commonsConfig.getSettingsAlgos
-    val offlineEvals = commonsConfig.getSettingsOfflineEvals
-    val items = commonsConfig.getAppdataTrainingItems
-
-    val algo = algos.get(algoid).get
-    val offlineEval = offlineEvals.get(evalid).get
-    val engine = engines.get(offlineEval.engineid).get
-    val app = apps.get(engine.appid).get.copy(id = evalid)
-
-    val tmpFile = File.createTempFile("pdio-", ".topk", new File(commonsConfig.settingsLocalTempRoot))
-    tmpFile.deleteOnExit
-    val output: Output = Resource.fromFile(tmpFile)
-    logger.info(s"Dumping data to temporary file ${tmpFile}...")
-
-    val scores = Seq.range(1, k + 1).reverse
-
-    var itemCount = 0
-    items.getByAppid(evalid) foreach { i =>
-      val topKItems = ItemSimAlgoOutput.output(i.id, k, None, None, None, None)(app, engine, algo, Some(offlineEval))
-      if (topKItems.length > 0) {
-        itemCount += 1
-        topKItems.zip(scores) foreach { tuple =>
-          val (iid, score) = tuple
-          output.write(s"${evalid}_${i.id}\t${evalid}_${iid}\t${score}\n")
-        }
-      }
-    }
-    logger.info(s"Found ${itemCount} item(s) with non-zero top-K items")
-
-    val hdfsFile = OfflineMetricFile(hdfsRoot, engine.appid, engine.id, evalid, metricid, algoid, "topKItems.tsv")
-
-    val rmCommand = s"$hadoopCommand fs -rm $hdfsFile"
-    logger.info(s"Executing '${rmCommand}'...")
-    rmCommand.!
-
-    val copyCommand = s"$hadoopCommand fs -copyFromLocal $tmpFile $hdfsFile"
-    logger.info(s"Executing '${copyCommand}'...")
-    copyCommand.!
-
-    logger.info("Finished")
-  }
-}
diff --git a/project/Common.scala b/project/Common.scala
new file mode 100644
index 0000000..9988eac
--- /dev/null
+++ b/project/Common.scala
@@ -0,0 +1,6 @@
+import sbt._
+import Keys._
+
+object Common {
+  def packCommonJvmOpts = Seq("-Dconfig.file=${PROG_HOME}/conf/predictionio.conf", "-Dio.prediction.base=${PROG_HOME}")
+}
diff --git a/project/plugins.sbt b/project/plugins.sbt
index e36c0eb..1593a07 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1,7 +1,5 @@
 addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.10.1")
 
-addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.3.1")
+addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.5.1")
 
 addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.2.1")
-
-addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.6.4")
diff --git a/servers/admin/app/controllers/Application.scala b/servers/admin/app/controllers/Application.scala
index cac2814..03e8037 100644
--- a/servers/admin/app/controllers/Application.scala
+++ b/servers/admin/app/controllers/Application.scala
@@ -1642,18 +1642,30 @@
             "validationPercent" -> 0.0, // no validatoin set for sim eval
             "testPercent" -> (splitTest.toDouble / 100)
           )
+
+          // get list of algo obj
+          val optAlgos: List[Option[Algo]] = algoids.map { algos.get(_) }
+
+          // TODO: Allow selection of splitter
+          // Use Hadoop version if any algo in the list requires Hadoop
+          val hadoopRequired = optAlgos.map { optAlgo =>
+            optAlgo map { algo =>
+              algoInfos.get(algo.infoid) map { info =>
+                info.techreq.contains("Hadoop")
+              } getOrElse false
+            } getOrElse false
+          } reduce { _ || _ }
+
           val splitterList = params.filter(p => (p("infotype") == "offlineevalsplitter")).map(p =>
             OfflineEvalSplitter(
               id = 0,
               evalid = 0, // will be assigned later
               name = "", // will be assigned later
-              infoid = p("infoid").asInstanceOf[String],
+              //infoid = p("infoid").asInstanceOf[String],
+              infoid = if (hadoopRequired) "pio-distributed-trainingtestsplit" else "pio-single-trainingtestsplit",
               settings = p ++ percentageParam - "infoid" - "infotype" // remove these keys from params
             )).toList
 
-          // get list of algo obj
-          val optAlgos: List[Option[Algo]] = algoids.map { algos.get(_) }
-
           if (metricList.length == 0)
             BadRequest(Json.obj("message" -> "At least one metric is required."))
           else if (splitterList.length == 0)
@@ -2206,7 +2218,7 @@
           "allitemtypes" -> toJson(engine.itypes == None),
           "itemtypelist" -> engine.itypes.map(x => toJson(x.toIterator.toSeq)).getOrElse(JsNull),
           "trainingdisabled" -> engine.trainingdisabled.map(toJson(_)).getOrElse(toJson(false)),
-          "trainingschedule" -> engine.trainingschedule.map(toJson(_)).getOrElse(toJson("0 * * * *"))) ++
+          "trainingschedule" -> engine.trainingschedule.map(toJson(_)).getOrElse(toJson("0 0 * * * ?"))) ++
           (params map { case (k, v) => (k, toJson(v.toString)) })))
       } getOrElse {
         NotFound(toJson(Map("message" -> toJson(s"Invalid EngineInfo ID: ${engine.infoid}"))))
@@ -2280,29 +2292,41 @@
 
   def updateAlgoSettings(appid: Int, engineid: Int, algoid: Int) = WithAlgo(appid, engineid, algoid) { (user, app, engine, algo) =>
     implicit request =>
-      val f = Form(tuple("infoid" -> mapOfStringToAny, "tune" -> text, "tuneMethod" -> text))
+      val f = Form(tuple("infoid" -> mapOfStringToAny, "tune" -> optional(text), "tuneMethod" -> optional(text)))
       f.bindFromRequest.fold(
         e => BadRequest(toJson(Map("message" -> toJson(e.toString)))),
         bound => {
           val (params, tune, tuneMethod) = bound
           // NOTE: read-modify-write the original param
-          val updatedParams = algo.params ++ params ++ Map("tune" -> tune, "tuneMethod" -> tuneMethod) - "infoid"
+          val tuneObj = (tune, tuneMethod) match {
+            case (Some("manual"), _) => Map("tune" -> "manual")
+            case (Some("auto"), Some(m)) => Map("tune" -> "auto", "tuneMethod" -> m)
+            case (_, _) => Map()
+          }
+          val updatedParams = algo.params ++ params ++ tuneObj - "infoid"
           val updatedAlgo = algo.copy(params = updatedParams)
 
-          if (updatedParams("tune") != "auto") {
+          // NOTE: "tune" is optional param. default to "manual"
+          if (updatedParams.getOrElse("tune", "manual") != "auto") {
             algos.update(updatedAlgo)
             Ok
           } else {
             // create offline eval with baseline algo
             // TODO: get from UI
             val defaultBaseLineAlgoType = engine.infoid match {
-              case "itemrec" => "pdio-randomrank"
-              case "itemsim" => "pdio-itemsimrandomrank"
+              case "itemrec" => "pio-itemrec-single-random"
+              case "itemsim" => "pio-itemsim-single-random"
             }
 
             engineInfos.get(engine.infoid).map { engineInfo =>
-              val metricinfoid = engineInfo.defaultofflineevalmetricinfoid // TODO: from UI
-              val splitterinfoid = engineInfo.defaultofflineevalsplitterinfoid // TODO: from UI
+              val hadoopRequired = algoInfos.get(algo.infoid) map { _.techreq.contains("Hadoop") } getOrElse false
+              val metricinfoid = (hadoopRequired, engine.infoid) match {
+                case (true, "itemrec") => "pio-itemrec-distributed-map_k"
+                case (true, "itemsim") => "pio-itemsim-distributed-ismap_k"
+                case (false, "itemrec") => "pio-itemrec-single-map_k"
+                case (false, "itemsim") => "pio-itemsim-single-ismap_k"
+              } // TODO: from UI
+              val splitterinfoid = if (hadoopRequired) "pio-distributed-trainingtestsplit" else "pio-single-trainingtestsplit" // TODO: from UI
               algoInfos.get(defaultBaseLineAlgoType).map { baseLineAlgoInfo =>
                 offlineEvalMetricInfos.get(metricinfoid).map { metricInfo =>
                   offlineEvalSplitterInfos.get(splitterinfoid).map { splitterInfo =>
@@ -2341,7 +2365,7 @@
 
                     paramGens.insert(ParamGen(
                       id = -1,
-                      infoid = "random", // TODO: default random scan param gen now
+                      infoid = "pio-single-random", // TODO: default random scan param gen now
                       tuneid = tuneid,
                       params = Map() // TODO: param for param gen
                     ))
diff --git a/servers/admin/app/controllers/Helper.scala b/servers/admin/app/controllers/Helper.scala
index a821e4b..02a98ee 100644
--- a/servers/admin/app/controllers/Helper.scala
+++ b/servers/admin/app/controllers/Helper.scala
@@ -441,6 +441,32 @@
     }
   }
 
+  def hadoopRequiredByApp(appid: Int): Boolean = {
+    apps.get(appid) map { app =>
+      engines.getByAppid(app.id).foldLeft(false) { (b, a) => b || hadoopRequiredByEngine(a.id) }
+    } getOrElse false
+  }
+
+  def hadoopRequiredByEngine(engineid: Int): Boolean = {
+    engines.get(engineid) map { engine =>
+      algos.getByEngineid(engine.id).foldLeft(false) { (b, a) => (b || hadoopRequiredByAlgo(a.id)) }
+    } getOrElse false
+  }
+
+  def hadoopRequiredByAlgo(algoid: Int): Boolean = {
+    algos.get(algoid) map { algo =>
+      algoInfos.get(algo.infoid) map { algoinfo =>
+        algoinfo.techreq.contains("Hadoop")
+      } getOrElse false
+    } getOrElse false
+  }
+
+  def hadoopRequiredByOfflineEval(evalid: Int): Boolean = {
+    offlineEvals.get(evalid) map { oe =>
+      algos.getByOfflineEvalid(oe.id).foldLeft(false) { (b, a) => (b || hadoopRequiredByAlgo(a.id)) }
+    } getOrElse false
+  }
+
   /**
    * Request scheduler to stop and delete sim eval
    * @return Future[SimpleResult]
@@ -450,7 +476,8 @@
     /** Stop any possible running jobs */
     val stop = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/offlineevals/${evalid}/stop").get()
     /** Clean up intermediate data files */
-    val delete = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/offlineevals/${evalid}/delete").get()
+    val deleteHadoop = if (hadoopRequiredByOfflineEval(evalid)) "?hadoop=1" else ""
+    val delete = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/offlineevals/${evalid}/delete${deleteHadoop}").get()
     /** Synchronize on both scheduler actions */
     val remove = concurrent.Future.reduce(Seq(stop, delete)) { (a, b) =>
       if (a.status != http.Status.OK) // keep the 1st error
@@ -482,7 +509,8 @@
     val stop = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/offlinetunes/${tuneid}/stop").get()
 
     val deletes = offlineEvals.getByTuneid(tuneid) map { eval =>
-      WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/offlineevals/${eval.id}/delete").get()
+      val deleteHadoop = if (hadoopRequiredByOfflineEval(eval.id)) "?hadoop=1" else ""
+      WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/offlineevals/${eval.id}/delete${deleteHadoop}").get()
     }
 
     val remove = concurrent.Future.reduce(Seq(stop) ++ deletes) { (a, b) =>
@@ -511,7 +539,8 @@
    * @return Future[SimpleResult]
    */
   def deleteAlgoScheduler(appid: Int, engineid: Int, id: Int) = {
-    val delete = WS.url(settingsSchedulerUrl + "/apps/" + appid + "/engines/" + engineid + "/algos/" + id + "/delete").get()
+    val deleteHadoop = if (hadoopRequiredByAlgo(id)) "?hadoop=1" else ""
+    val delete = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/algos/${id}/delete${deleteHadoop}").get()
 
     delete map { r =>
       if (r.status == http.Status.OK)
@@ -529,7 +558,8 @@
    * @return Future[SimpleResult]
    */
   def deleteEngineScheduler(appid: Int, engineid: Int) = {
-    val delete = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/delete").get()
+    val deleteHadoop = if (hadoopRequiredByEngine(engineid)) "?hadoop=1" else ""
+    val delete = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/engines/${engineid}/delete${deleteHadoop}").get()
 
     delete map { r =>
       if (r.status == http.Status.OK)
@@ -547,7 +577,8 @@
    * @return Future[SimpleResult]
    */
   def deleteAppScheduler(appid: Int) = {
-    val delete = WS.url(settingsSchedulerUrl + "/apps/" + appid + "/delete").get()
+    val deleteHadoop = if (hadoopRequiredByApp(appid)) "?hadoop=1" else ""
+    val delete = WS.url(s"${settingsSchedulerUrl}/apps/${appid}/delete${deleteHadoop}").get()
 
     delete map { r =>
       if (r.status == http.Status.OK)
diff --git a/servers/admin/build.sbt b/servers/admin/build.sbt
index 887cb7a..cd30acf 100644
--- a/servers/admin/build.sbt
+++ b/servers/admin/build.sbt
@@ -1,6 +1,6 @@
 name := "predictionio-admin"
 
-version := "0.6.8"
+version := "0.7.0"
 
 organization := "io.prediction"
 
diff --git a/servers/admin/project/plugins.sbt b/servers/admin/project/plugins.sbt
index 6153ef1..16ba183 100644
--- a/servers/admin/project/plugins.sbt
+++ b/servers/admin/project/plugins.sbt
@@ -5,6 +5,6 @@
 resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
 
 // Use the Play sbt plugin for Play projects
-addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.0")
+addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.2")
 
 addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.2.1")
diff --git a/servers/api/README.md b/servers/api/README.md
new file mode 100644
index 0000000..247cab5
--- /dev/null
+++ b/servers/api/README.md
@@ -0,0 +1,26 @@
+How to build and run API server
+===============================
+
+## Prereq
+
+1. Install [sbt](http://www.scala-sbt.org/release/docs/Getting-Started/Setup.html).
+2. Install [play framework](http://www.playframework.com/download).
+
+## Build and run
+```
+# From the repository root. API server depends on other pio modules, need to compile and publish them first.
+sbt commons/publish output/publish
+
+# Go to API server directory and run play framework
+cd server/api/
+play
+
+# In play framework console
+run
+# Or, if you need to specify port number.
+run -Dhttp.port=8000
+
+# Everytime when pio modules are updated (and published).
+# Need to update the dependency in Play console.
+update 
+```
diff --git a/servers/api/app/io/prediction/api/API.scala b/servers/api/app/io/prediction/api/API.scala
index bbcc5e5..0e050b9 100644
--- a/servers/api/app/io/prediction/api/API.scala
+++ b/servers/api/app/io/prediction/api/API.scala
@@ -150,7 +150,17 @@
         }
       }
   }
-  val itypes: Mapping[String] = of[String] verifying Constraints.pattern("""[^\t]+""".r, "itypes", "Must not contain \t.")
+  val itypes: Mapping[String] = of[String] verifying Constraint[String]("itypes") { o =>
+    """[^\t]+""".r.unapplySeq(o) map { _ =>
+      val splitted = o.split(",")
+      if (splitted.size == 0)
+        Invalid(ValidationError("Must specify at least one valid item type."))
+      else if (splitted.exists(_.size == 0))
+        Invalid(ValidationError("Must not contain any empty item types."))
+      else
+        Valid
+    } getOrElse (Invalid(ValidationError("Must not contain any tab characters.")))
+  }
   val latlngRegex = """-?\d+(\.\d*)?,-?\d+(\.\d*)?""".r
   val latlng: Mapping[String] = of[String] verifying Constraint[String]("latlng", () => latlngRegex) {
     o =>
diff --git a/servers/api/build.sbt b/servers/api/build.sbt
index 5efe8ca..8100717 100644
--- a/servers/api/build.sbt
+++ b/servers/api/build.sbt
@@ -1,6 +1,6 @@
 name := "predictionio-api"
 
-version := "0.6.8"
+version := "0.7.0"
 
 organization := "io.prediction"
 
diff --git a/servers/api/project/plugins.sbt b/servers/api/project/plugins.sbt
index 6153ef1..16ba183 100644
--- a/servers/api/project/plugins.sbt
+++ b/servers/api/project/plugins.sbt
@@ -5,6 +5,6 @@
 resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
 
 // Use the Play sbt plugin for Play projects
-addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.0")
+addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.2")
 
 addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.2.1")
diff --git a/servers/api/test/APISpec.scala b/servers/api/test/APISpec.scala
index 9d5e20f..67d3b15 100644
--- a/servers/api/test/APISpec.scala
+++ b/servers/api/test/APISpec.scala
@@ -39,52 +39,52 @@
     timezone = "UTC"))
 
   val dac = Item(
-    id         = "dac",
-    appid      = appid,
-    ct         = DateTime.now,
-    itypes     = List("fresh", "meat"),
-    starttime  = Some(DateTime.now.hour(14).minute(13)),
-    endtime    = None,
-    price      = Some(49.394),
-    profit     = None,
-    latlng     = Some((37.3197611, -122.0466141)),
-    inactive   = None,
+    id = "dac",
+    appid = appid,
+    ct = DateTime.now,
+    itypes = List("fresh", "meat"),
+    starttime = Some(DateTime.now),
+    endtime = None,
+    price = Some(49.394),
+    profit = None,
+    latlng = Some((37.3197611, -122.0466141)),
+    inactive = None,
     attributes = Some(Map("foo" -> "bar", "foo2" -> "bar2")))
   val hsh = Item(
-    id         = "hsh",
-    appid      = appid,
-    ct         = DateTime.now,
-    itypes     = List("fresh", "meat"),
-    starttime  = Some(DateTime.now.hour(23).minute(13)),
-    endtime    = None,
-    price      = Some(49.394),
-    profit     = None,
-    latlng     = Some((37.3370801, -122.0493201)),
-    inactive   = None,
+    id = "hsh",
+    appid = appid,
+    ct = DateTime.now,
+    itypes = List("fresh", "meat"),
+    starttime = Some(DateTime.now),
+    endtime = None,
+    price = Some(49.394),
+    profit = None,
+    latlng = Some((37.3370801, -122.0493201)),
+    inactive = None,
     attributes = None)
   val mvh = Item(
-    id         = "mvh",
-    appid      = appid,
-    ct         = DateTime.now,
-    itypes     = List("fresh", "meat"),
-    starttime  = Some(DateTime.now.hour(17).minute(13)),
-    endtime    = None,
-    price      = Some(49.394),
-    profit     = None,
-    latlng     = Some((37.3154153, -122.0566829)),
-    inactive   = None,
+    id = "mvh",
+    appid = appid,
+    ct = DateTime.now,
+    itypes = List("fresh", "meat"),
+    starttime = Some(DateTime.now),
+    endtime = None,
+    price = Some(49.394),
+    profit = None,
+    latlng = Some((37.3154153, -122.0566829)),
+    inactive = None,
     attributes = Some(Map("foo3" -> "bar3")))
   val lbh = Item(
-    id         = "lbh",
-    appid      = appid,
-    ct         = DateTime.now,
-    itypes     = List("fresh", "meat"),
-    starttime  = Some(DateTime.now.hour(3).minute(13)),
-    endtime    = None,
-    price      = Some(49.394),
-    profit     = None,
-    latlng     = Some((37.2997029, -122.0034684)),
-    inactive   = None,
+    id = "lbh",
+    appid = appid,
+    ct = DateTime.now,
+    itypes = List("fresh", "meat"),
+    starttime = Some(DateTime.now),
+    endtime = None,
+    price = Some(49.394),
+    profit = None,
+    latlng = Some((37.2997029, -122.0034684)),
+    inactive = None,
     attributes = Some(Map("foo4" -> "bar4", "foo5" -> "bar5")))
   val allItems = Seq(dac, hsh, lbh, mvh)
   allItems foreach { items.insert(_) }
@@ -93,20 +93,20 @@
     val enginename = "itemrec"
 
     val engineid = engines.insert(Engine(
-      id       = 0,
-      appid    = appid,
-      name     = "itemrec",
-      infoid   = "itemrec",
-      itypes   = None,
-      params   = Map()))
+      id = 0,
+      appid = appid,
+      name = "itemrec",
+      infoid = "itemrec",
+      itypes = None,
+      params = Map()))
 
     val algoid = algos.insert(Algo(
-      id       = 0,
+      id = 0,
       engineid = engineid,
-      name     = enginename,
-      infoid   = "pdio-knnitembased",
-      command  = "itemr",
-      params   = Map("foo" -> "bar"),
+      name = enginename,
+      infoid = "pdio-knnitembased",
+      command = "itemr",
+      params = Map("foo" -> "bar"),
       settings = Map("dead" -> "beef"),
       modelset = true,
       createtime = DateTime.now,
@@ -116,36 +116,9 @@
 
     itemRecScores.insert(ItemRecScore(
       uid = "user1",
-      iid = "dac",
-      score = 1,
-      itypes = Seq("bar"),
-      appid = appid,
-      algoid = algoid,
-      modelset = true))
-
-    itemRecScores.insert(ItemRecScore(
-      uid = "user1",
-      iid = "hsh",
-      score = 4,
-      itypes = Seq("foo"),
-      appid = appid,
-      algoid = algoid,
-      modelset = true))
-
-    itemRecScores.insert(ItemRecScore(
-      uid = "user1",
-      iid = "mvh",
-      score = 3,
-      itypes = Seq("unrelated"),
-      appid = appid,
-      algoid = algoid,
-      modelset = true))
-
-    itemRecScores.insert(ItemRecScore(
-      uid = "user1",
-      iid = "lbh",
-      score = 2,
-      itypes = Seq("unrelated"),
+      iids = Seq("hsh", "mvh", "lbh", "dac"),
+      scores = Seq(4, 3, 2, 1),
+      itypes = Seq(Seq("fresh", "meat"), Seq("fresh", "meat"), Seq("fresh", "meat"), Seq("fresh", "meat")),
       appid = appid,
       algoid = algoid,
       modelset = true))
@@ -154,8 +127,8 @@
       val response = Helpers.await(wsUrl(s"/engines/itemrec/${enginename}/topn.json")
         .withQueryString(
           "pio_appkey" -> "appkey",
-          "pio_uid"    -> "user1",
-          "pio_n"      -> "10")
+          "pio_uid" -> "user1",
+          "pio_n" -> "10")
         .get())
       response.status must beEqualTo(OK) and
         (response.body must beEqualTo("""{"pio_iids":["hsh","mvh","lbh","dac"]}"""))
@@ -165,8 +138,8 @@
       val response = Helpers.await(wsUrl(s"/engines/itemrec/${enginename}/topn.json")
         .withQueryString(
           "pio_appkey" -> "appkey",
-          "pio_uid"    -> "user1",
-          "pio_n"      -> "10",
+          "pio_uid" -> "user1",
+          "pio_n" -> "10",
           "pio_latlng" -> "37.3229978,-122.0321823",
           "pio_within" -> "2.2")
         .get())
@@ -179,20 +152,20 @@
     val enginename = "itemsim"
 
     val engineid = engines.insert(Engine(
-      id       = 0,
-      appid    = appid,
-      name     = "itemsim",
-      infoid   = "itemsim",
-      itypes   = None,
-      params   = Map()))
+      id = 0,
+      appid = appid,
+      name = "itemsim",
+      infoid = "itemsim",
+      itypes = None,
+      params = Map()))
 
     val algoid = algos.insert(Algo(
-      id       = 0,
+      id = 0,
       engineid = engineid,
-      name     = enginename,
-      infoid   = "pdio-itembasedcf",
-      command  = "items",
-      params   = Map("foo" -> "bar"),
+      name = enginename,
+      infoid = "pdio-itembasedcf",
+      command = "items",
+      params = Map("foo" -> "bar"),
       settings = Map("dead" -> "beef"),
       modelset = true,
       createtime = DateTime.now,
@@ -202,36 +175,9 @@
 
     itemSimScores.insert(ItemSimScore(
       iid = "user1",
-      simiid = "dac",
-      score = 1,
-      itypes = Seq("bar"),
-      appid = appid,
-      algoid = algoid,
-      modelset = true))
-
-    itemSimScores.insert(ItemSimScore(
-      iid = "user1",
-      simiid = "hsh",
-      score = 4,
-      itypes = Seq("foo"),
-      appid = appid,
-      algoid = algoid,
-      modelset = true))
-
-    itemSimScores.insert(ItemSimScore(
-      iid = "user1",
-      simiid = "mvh",
-      score = 3,
-      itypes = Seq("unrelated"),
-      appid = appid,
-      algoid = algoid,
-      modelset = true))
-
-    itemSimScores.insert(ItemSimScore(
-      iid = "user1",
-      simiid = "lbh",
-      score = 2,
-      itypes = Seq("unrelated"),
+      simiids = Seq("hsh", "mvh", "lbh", "dac"),
+      scores = Seq(4, 3, 2, 1),
+      itypes = Seq(Seq("fresh", "meat"), Seq("fresh", "meat"), Seq("fresh", "meat"), Seq("fresh", "meat")),
       appid = appid,
       algoid = algoid,
       modelset = true))
@@ -240,8 +186,8 @@
       val response = Helpers.await(wsUrl(s"/engines/itemsim/${enginename}/topn.json")
         .withQueryString(
           "pio_appkey" -> "appkey",
-          "pio_iid"    -> "user1",
-          "pio_n"      -> "10")
+          "pio_iid" -> "user1",
+          "pio_n" -> "10")
         .get())
       response.status must beEqualTo(OK) and
         (response.body must beEqualTo("""{"pio_iids":["hsh","mvh","lbh","dac"]}"""))
@@ -251,8 +197,8 @@
       val response = Helpers.await(wsUrl(s"/engines/itemsim/${enginename}/topn.json")
         .withQueryString(
           "pio_appkey" -> "appkey",
-          "pio_iid"    -> "user1",
-          "pio_n"      -> "10",
+          "pio_iid" -> "user1",
+          "pio_n" -> "10",
           "pio_latlng" -> "37.3229978,-122.0321823",
           "pio_within" -> "2.2")
         .get())
@@ -265,7 +211,7 @@
     "fail creation with tabs in itypes" in new WithServer {
       val response = Helpers.await(wsUrl(s"/items.json").post(Map(
         "pio_appkey" -> Seq("appkey"),
-        "pio_iid"    -> Seq("fooitem"),
+        "pio_iid" -> Seq("fooitem"),
         "pio_itypes" -> Seq("footype\tbartype"))))
       response.status must beEqualTo(BAD_REQUEST)
     }
@@ -276,4 +222,4 @@
     MongoConnection()(config.appdataDbName).dropDatabase()
     MongoConnection()(config.modeldataDbName).dropDatabase()
   }
-}
+}
\ No newline at end of file
diff --git a/servers/scheduler/app/controllers/Jobs.scala b/servers/scheduler/app/controllers/Jobs.scala
index 7f80d15..5014e4b 100644
--- a/servers/scheduler/app/controllers/Jobs.scala
+++ b/servers/scheduler/app/controllers/Jobs.scala
@@ -162,7 +162,8 @@
     command.setAttribute("modeldataTrainingDbHost", config.modeldataTrainingDbHost)
     command.setAttribute("modeldataTrainingDbPort", config.modeldataTrainingDbPort)
     engine.itypes foreach { it =>
-      command.setAttribute("itypes", "--itypes" + " " + it.mkString(" ")) // NOTE: a space ' ' is necessary after --itypes
+      command.setAttribute("itypes", "--itypes " + it.mkString(" ")) // NOTE: a space ' ' is necessary after --itypes
+      command.setAttribute("itypesCSV", "--itypes " + it.mkString(","))
     }
     command.setAttribute("numRecommendations", engine.params.getOrElse("numRecommendations", 500))
     command.setAttribute("numSimilarItems", engine.params.getOrElse("numSimilarItems", 500))
@@ -287,22 +288,21 @@
 
     Some(steptype) collect {
       case "split" => {
-        if (iteration == 1) {
-          /** Delete old model data, if any (for recovering from an incomplete run, and clean old score for multi-iterations) */
-          Scheduler.offlineEvals.get(evalid) map { offlineEval =>
-            Scheduler.engines.get(offlineEval.engineid) map { engine =>
-              val algosToRun = Scheduler.algos.getByOfflineEvalid(offlineEval.id).toSeq
-              val modelData = Scheduler.config.getModeldataTraining(engine.infoid)
-              algosToRun foreach { algo =>
-                Logger.info(s"${logPrefix}Algo ID ${algo.id}: Deleting any old model data")
-                modelData.delete(algo.id, false)
-              }
-              Logger.info(s"${logPrefix}Deleting any old user-to-item actions")
-              Scheduler.appdataTrainingU2IActions.deleteByAppid(offlineEval.id)
-              Scheduler.appdataTestU2IActions.deleteByAppid(offlineEval.id)
+        /** Delete old model data, if any (for recovering from an incomplete run, and clean old score for multi-iterations) */
+        Scheduler.offlineEvals.get(evalid) map { offlineEval =>
+          Scheduler.engines.get(offlineEval.engineid) map { engine =>
+            val algosToRun = Scheduler.algos.getByOfflineEvalid(offlineEval.id).toSeq
+            val modelData = Scheduler.config.getModeldataTraining(engine.infoid)
+            algosToRun foreach { algo =>
+              Logger.info(s"${logPrefix}Algo ID ${algo.id}: Deleting any old model data")
+              modelData.delete(algo.id, false)
             }
+            Logger.info(s"${logPrefix}Deleting any old user-to-item actions")
+            Scheduler.appdataTrainingU2IActions.deleteByAppid(offlineEval.id)
+            Scheduler.appdataTestU2IActions.deleteByAppid(offlineEval.id)
           }
         }
+
         if (iteration > 1) {
           val iterationkey = s"iteration-${iteration - 1}"
           while (!finishFlags(iterationkey)) {
@@ -312,21 +312,25 @@
       }
       case "training" => {
         val splitkey = s"split-${iteration}"
+        val trainingkey = s"training-${iteration}-${algoid.get}"
         while (!finishFlags(splitkey)) {
           Thread.sleep(1000)
         }
         if (exitCodes(splitkey) != 0) {
           abort = true
+          exitCodes(trainingkey) = 1
           Logger.info(s"${logPrefix}(${steptype}) Aborted due to split error")
         }
       }
       case "metric" => {
         val trainingkey = s"training-${iteration}-${algoid.get}"
+        val metrickey = s"metric-${iteration}-${algoid.get}-${metricid.get}"
         while (!finishFlags(trainingkey)) {
           Thread.sleep(1000)
         }
         if (exitCodes(trainingkey) != 0) {
           abort = true
+          exitCodes(metrickey) = 1
           Logger.info(s"${logPrefix}(${steptype}) Aborted due to training error")
         }
       }
@@ -345,21 +349,35 @@
     }
 
     commands map { _.trim } foreach { c =>
+      var exception = false
       this.synchronized {
         if (!kill && !abort && !c.isEmpty && exitCodes(key) == 0) {
           Logger.info(s"${logPrefix}(${steptype}) Going to run: $c")
-          procs(key) = Process(c).run
-          Logger.info(s"${logPrefix}(${steptype}) Scheduler waiting for sub-process to finish")
+          try {
+            procs(key) = Process(c).run
+            Logger.info(s"${logPrefix}(${steptype}) Scheduler waiting for sub-process to finish")
+          } catch {
+            case e: java.io.IOException => {
+              exception = true
+              Logger.info(s"${logPrefix}(${steptype}) ${e.getMessage}")
+            }
+          }
         }
       }
 
-      procs.get(key) map { p =>
-        val exitCode = p.exitValue
+      // Continue if the last command succeeded
+      if (exitCodes(key) == 0) {
+        procs.get(key) map { p =>
+          val exitCode = if (exception) 1 else p.exitValue
 
-        /** Save completion information for global access */
-        exitCodes(key) = exitCode
+          /** Save completion information for global access */
+          exitCodes(key) = exitCode
 
-        Logger.info(s"${logPrefix}(${steptype}) Sub-process has finished with exit code ${exitCode}")
+          if (exception)
+            Logger.info(s"${logPrefix}(${steptype}) Exception trying to run sub-process")
+          else
+            Logger.info(s"${logPrefix}(${steptype}) Sub-process has finished with exit code ${exitCode}")
+        }
       }
     }
 
@@ -480,19 +498,22 @@
             Thread.sleep(1000)
           }
 
-          /** Clean up */
-          val modelData = config.getModeldataTraining(engine.infoid)
-          algosToRun foreach { algo =>
-            Logger.info(s"${logPrefix}Algo ID ${algo.id}: Deleting used model data")
-            modelData.delete(algo.id, false)
+          /** Clean up if ended normally or killed */
+          val sumExitCodes = exitCodes.values.sum
+          if (kill || sumExitCodes == 0) {
+            val modelData = config.getModeldataTraining(engine.infoid)
+            algosToRun foreach { algo =>
+              Logger.info(s"${logPrefix}Algo ID ${algo.id}: Deleting used model data")
+              modelData.delete(algo.id, false)
+            }
+            Logger.info(s"${logPrefix}Deleting used app data")
+            Scheduler.appdataTrainingUsers.deleteByAppid(offlineEval.id)
+            Scheduler.appdataTrainingItems.deleteByAppid(offlineEval.id)
+            Scheduler.appdataTrainingU2IActions.deleteByAppid(offlineEval.id)
+            Scheduler.appdataTestUsers.deleteByAppid(offlineEval.id)
+            Scheduler.appdataTestItems.deleteByAppid(offlineEval.id)
+            Scheduler.appdataTestU2IActions.deleteByAppid(offlineEval.id)
           }
-          Logger.info(s"${logPrefix}Deleting used app data")
-          Scheduler.appdataTrainingUsers.deleteByAppid(offlineEval.id)
-          Scheduler.appdataTrainingItems.deleteByAppid(offlineEval.id)
-          Scheduler.appdataTrainingU2IActions.deleteByAppid(offlineEval.id)
-          Scheduler.appdataTestUsers.deleteByAppid(offlineEval.id)
-          Scheduler.appdataTestItems.deleteByAppid(offlineEval.id)
-          Scheduler.appdataTestU2IActions.deleteByAppid(offlineEval.id)
 
           /** Check for errors from metric */
           Logger.info(s"${logPrefix}Exit code summary:")
@@ -508,7 +529,7 @@
             }
           }
 
-          if (exitCodes.values.sum != 0)
+          if (sumExitCodes != 0)
             Logger.warn(s"${logPrefix}Offline evaluation completed with error(s)")
           else
             Logger.info(s"${logPrefix}Offline evaluation completed")
diff --git a/servers/scheduler/app/controllers/Operations.scala b/servers/scheduler/app/controllers/Operations.scala
index 9f7525f..286f3df 100644
--- a/servers/scheduler/app/controllers/Operations.scala
+++ b/servers/scheduler/app/controllers/Operations.scala
@@ -4,7 +4,9 @@
 import io.prediction.commons.Config
 
 import scala.sys.process._
+import java.io.File
 
+import org.apache.commons.io.FileUtils
 import play.api._
 import play.api.libs.json._
 import play.api.mvc._
@@ -12,60 +14,103 @@
 object Operations extends Controller {
   val config = new Config
 
-  def deleteApp(appid: Int) = Action {
-    val path = BaseDir.appDir(config.settingsHdfsRoot, appid)
+  def hadoopRequired(request: Request[_]) = request.queryString.contains("hadoop")
+
+  def deleteApp(appid: Int) = Action { implicit request =>
+    val localPath = BaseDir.appDir(config.settingsLocalTempRoot, appid)
+    val localFile = new File(localPath)
+    localFile.mkdirs()
+    val localCode = FileUtils.deleteQuietly(localFile)
+    val hadoopPath = BaseDir.appDir(config.settingsHdfsRoot, appid)
     // mkdir again to make sure that rmr failure is not due to non existing dir.
     // mkdir error can be ignored.
-    val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $path".!
-    val code = s"${Scheduler.hadoopCommand} fs -rmr $path".!
-    if (code == 0)
-      Ok(Json.obj("message" -> s"Deleted HDFS storage for App ID: $appid ($path)"))
+    val hadoopCode = if (hadoopRequired(request)) {
+      try {
+        val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $hadoopPath".!
+        val code = s"${Scheduler.hadoopCommand} fs -rmr $hadoopPath".!
+        if (code == 0) true else false
+      } catch {
+        case e: java.io.IOException => true // allow deletion if hadoop command is absent         
+      }
+    } else true
+    if (localCode && hadoopCode)
+      Ok(Json.obj("message" -> s"Deleted local (and HDFS, if applicable) storage for App ID: $appid (local: $localPath; HDFS: $hadoopPath)"))
+    else if (localCode)
+      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid ($hadoopPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
     else
-      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid ($path). Please check scheduler.log and hadoop log files."))
+      InternalServerError(Json.obj("message" -> s"Unable to delete local temporary storage for App ID: $appid ($localPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
   }
 
-  def deleteEngine(appid: Int, engineid: Int) = Action {
-    val path = BaseDir.engineDir(config.settingsHdfsRoot, appid, engineid)
+  def deleteEngine(appid: Int, engineid: Int) = Action { implicit request =>
+    val localPath = BaseDir.engineDir(config.settingsLocalTempRoot, appid, engineid)
+    val localFile = new File(localPath)
+    localFile.mkdirs()
+    val localCode = FileUtils.deleteQuietly(localFile)
+    val hadoopPath = BaseDir.engineDir(config.settingsHdfsRoot, appid, engineid)
     // mkdir again to make sure that rmr failure is not due to non existing dir.
     // mkdir error can be ignored.
-    val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $path".!
-    val code = s"${Scheduler.hadoopCommand} fs -rmr $path".!
-    if (code == 0)
-      Ok(Json.obj("message" -> s"Deleted HDFS storage for App ID: $appid, Engine ID: $engineid ($path)"))
+    val hadoopCode = if (hadoopRequired(request)) {
+      try {
+        val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $hadoopPath".!
+        val code = s"${Scheduler.hadoopCommand} fs -rmr $hadoopPath".!
+        if (code == 0) true else false
+      } catch {
+        case e: java.io.IOException => true // allow deletion if hadoop command is absent         
+      }
+    } else true
+    if (localCode && hadoopCode)
+      Ok(Json.obj("message" -> s"Deleted local (and HDFS, if applicable) storage for App ID: $appid, Engine ID: $engineid (local: $localPath; HDFS: $hadoopPath)"))
+    else if (localCode)
+      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid, Engine ID: $engineid ($hadoopPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
     else
-      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid, Engine ID: $engineid ($path). Please check scheduler.log and hadoop log files."))
+      InternalServerError(Json.obj("message" -> s"Unable to delete local temporary storage for App ID: $appid, Engine ID: $engineid ($localPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
   }
 
-  def deleteAlgo(appid: Int, engineid: Int, algoid: Int) = Action {
-    val path = BaseDir.algoDir(config.settingsHdfsRoot, appid, engineid, algoid, None)
-    deleteAlgoBase(path, appid, engineid, algoid, None)
+  def deleteAlgo(appid: Int, engineid: Int, algoid: Int) = Action { implicit request =>
+    val localPath = BaseDir.algoDir(config.settingsLocalTempRoot, appid, engineid, algoid, None)
+    val localFile = new File(localPath)
+    localFile.mkdirs()
+    val localCode = FileUtils.deleteQuietly(localFile)
+    val hadoopPath = BaseDir.algoDir(config.settingsHdfsRoot, appid, engineid, algoid, None)
+    val hadoopCode = if (hadoopRequired(request)) {
+      try {
+        val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $hadoopPath".!
+        val code = s"${Scheduler.hadoopCommand} fs -rmr $hadoopPath".!
+        if (code == 0) true else false
+      } catch {
+        case e: java.io.IOException => true // allow deletion if hadoop command is absent         
+      }
+    } else true
+    if (localCode && hadoopCode)
+      Ok(Json.obj("message" -> s"Deleted HDFS storage for App ID: $appid, Engine ID: $engineid, Algo ID: $algoid (local: $localPath; HDFS: $hadoopPath)"))
+    else if (localCode)
+      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid, Engine ID: $engineid, Algo ID: $algoid ($hadoopPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
+    else
+      InternalServerError(Json.obj("message" -> s"Unable to delete local temporary storage for App ID: $appid, Engine ID: $engineid, Algo ID: $algoid ($localPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
   }
 
-  def deleteOfflineEval(appid: Int, engineid: Int, offlineevalid: Int) = Action {
-    val path = BaseDir.offlineEvalDir(config.settingsHdfsRoot, appid, engineid, offlineevalid)
+  def deleteOfflineEval(appid: Int, engineid: Int, offlineevalid: Int) = Action { implicit request =>
+    val localPath = BaseDir.offlineEvalDir(config.settingsLocalTempRoot, appid, engineid, offlineevalid)
+    val localFile = new File(localPath)
+    localFile.mkdirs()
+    val localCode = FileUtils.deleteQuietly(localFile)
+    val hadoopPath = BaseDir.offlineEvalDir(config.settingsHdfsRoot, appid, engineid, offlineevalid)
     // mkdir again to make sure that rmr failure is not due to non existing dir.
     // mkdir error can be ignored.
-    val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $path".!
-    val code = s"${Scheduler.hadoopCommand} fs -rmr $path".!
-    if (code == 0)
-      Ok(Json.obj("message" -> s"Deleted HDFS storage for App ID: $appid, Engine ID: $engineid, OfflineEval ID: $offlineevalid ($path)"))
+    val hadoopCode = if (hadoopRequired(request)) {
+      try {
+        val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $hadoopPath".!
+        val code = s"${Scheduler.hadoopCommand} fs -rmr $hadoopPath".!
+        if (code == 0) true else false
+      } catch {
+        case e: java.io.IOException => true // allow deletion if hadoop command is absent         
+      }
+    } else true
+    if (localCode && hadoopCode)
+      Ok(Json.obj("message" -> s"Deleted local (and HDFS, if applicable) storage for App ID: $appid, Engine ID: $engineid, OfflineEval ID: $offlineevalid (local: $localPath; HDFS: $hadoopPath)"))
+    else if (localCode)
+      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid, Engine ID: $engineid, OfflineEval ID: $offlineevalid ($hadoopPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
     else
-      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid, Engine ID: $engineid, OfflineEval ID: $offlineevalid ($path). Please check scheduler.log and hadoop log files."))
-  }
-
-  def deleteOfflineEvalAlgo(appid: Int, engineid: Int, offlineevalid: Int, algoid: Int) = Action {
-    val path = BaseDir.algoDir(config.settingsHdfsRoot, appid, engineid, algoid, Some(offlineevalid))
-    deleteAlgoBase(path, appid, engineid, algoid, Some(offlineevalid))
-  }
-
-  def deleteAlgoBase(path: String, appid: Int, engineid: Int, algoid: Int, offlineevalid: Option[Int]) = {
-    // mkdir again to make sure that rmr failure is not due to non existing dir.
-    // mkdir error can be ignored.
-    val mkdir = s"${Scheduler.hadoopCommand} fs -mkdir $path".!
-    val code = s"${Scheduler.hadoopCommand} fs -rmr $path".!
-    if (code == 0)
-      Ok(Json.obj("message" -> s"Deleted HDFS storage for App ID: $appid, Engine ID: $engineid, Algo ID: $algoid, OfflineEval ID: ${offlineevalid.getOrElse("N/A")} ($path)"))
-    else
-      InternalServerError(Json.obj("message" -> s"Unable to delete HDFS storage for App ID: $appid, Engine ID: $engineid, Algo ID: $algoid, OfflineEval ID: ${offlineevalid.getOrElse("N/A")} ($path). Please check scheduler and hadoop log files."))
+      InternalServerError(Json.obj("message" -> s"Unable to delete local temporary storage for App ID: $appid, Engine ID: $engineid, OfflineEval ID: $offlineevalid ($localPath). Please check logs/scheduler.log, logs/scheduler.err and Hadoop log files."))
   }
 }
diff --git a/servers/scheduler/build.sbt b/servers/scheduler/build.sbt
index 1cbf303..fac7529 100644
--- a/servers/scheduler/build.sbt
+++ b/servers/scheduler/build.sbt
@@ -1,6 +1,6 @@
 name := "predictionio-scheduler"
 
-version := "0.6.8"
+version := "0.7.0"
 
 organization := "io.prediction"
 
diff --git a/servers/scheduler/conf/application.conf b/servers/scheduler/conf/application.conf
index 3063136..8296e23 100644
--- a/servers/scheduler/conf/application.conf
+++ b/servers/scheduler/conf/application.conf
@@ -86,28 +86,28 @@
 io.prediction.commons.settings.db.port=27017
 
 # PredictionIO Algorithms
-pdio-knnitembased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-pdio-latestrank.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-pdio-randomrank.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-itembased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-parallelals.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-knnuserbased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-thresholduserbased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-slopeone.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-alswr.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-svdsgd.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-svdplusplus.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
+pdio-knnitembased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+pdio-latestrank.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+pdio-randomrank.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-itembased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-parallelals.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-knnuserbased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-thresholduserbased.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-slopeone.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-alswr.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-svdsgd.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-svdplusplus.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
 
-pdio-itemsimcf.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-pdio-itemsimlatestrank.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-pdio-itemsimrandomrank.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
-mahout-itemsimcf.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
+pdio-itemsimcf.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+pdio-itemsimlatestrank.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+pdio-itemsimrandomrank.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
+mahout-itemsimcf.jar=${io.prediction.itemsim.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemSim-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
 
 # PredictionIO generic scalding job
-io.prediction.algorithms.scalding.itemrec.generic.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.6.8.jar
+io.prediction.algorithms.scalding.itemrec.generic.jar=${io.prediction.itemrec.base}/algorithms/hadoop/scalding/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Hadoop-Scalding-assembly-0.7.0.jar
 
 # Itemrec Scala Mahout Algorithms
-io.prediction.algorithms.mahout.itemrec.jar=${io.prediction.itemrec.base}/algorithms/scala/mahout/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Scala-Mahout-assembly-0.6.8.jar
+io.prediction.algorithms.mahout.itemrec.jar=${io.prediction.itemrec.base}/algorithms/scala/mahout/target/scala-2.10/PredictionIO-Process-ItemRec-Algorithms-Scala-Mahout-assembly-0.7.0.jar
 
 # Mahout core job
 io.prediction.algorithms.mahout-core-job.jar=${io.prediction.base}/vendors/mahout-distribution-0.8/mahout-core-0.8-job.jar
diff --git a/servers/scheduler/conf/routes b/servers/scheduler/conf/routes
index 18fdef7..79a9740 100644
--- a/servers/scheduler/conf/routes
+++ b/servers/scheduler/conf/routes
@@ -17,6 +17,4 @@
 GET     /apps/:appid/engines/:engineid/offlineevals/:offlineevalid/delete       io.prediction.scheduler.Operations.deleteOfflineEval(appid: Int, engineid: Int, offlineevalid: Int)
 GET     /apps/:appid/engines/:engineid/offlineevals/:offlineevalid/stop         io.prediction.scheduler.Scheduler.stopOfflineEval(appid: Int, engineid: Int, offlineevalid: Int)
 
-GET     /apps/:appid/engines/:engineid/offlineevals/:offlineevalid/algos/:algoid/delete   io.prediction.scheduler.Operations.deleteOfflineEvalAlgo(appid: Int, engineid: Int, offlineevalid: Int, algoid: Int)
-
 GET     /apps/:appid/engines/:engineid/offlinetunes/:offlinetuneid/stop         io.prediction.scheduler.Scheduler.stopOfflineTune(appid: Int, engineid: Int, offlinetuneid: Int)
diff --git a/servers/scheduler/project/plugins.sbt b/servers/scheduler/project/plugins.sbt
index 6153ef1..16ba183 100644
--- a/servers/scheduler/project/plugins.sbt
+++ b/servers/scheduler/project/plugins.sbt
@@ -5,6 +5,6 @@
 resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
 
 // Use the Play sbt plugin for Play projects
-addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.0")
+addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.2")
 
 addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.2.1")
diff --git a/tools/conncheck/build.sbt b/tools/conncheck/build.sbt
index 9d5c9b9..0b9c78a 100644
--- a/tools/conncheck/build.sbt
+++ b/tools/conncheck/build.sbt
@@ -1,11 +1,19 @@
-import com.typesafe.sbt.packager.Keys._
+import xerial.sbt.Pack._
 
 name := "conncheck"
 
 scalariformSettings
 
-packageArchetype.java_application
-
-bashScriptExtraDefines += "addJava \"-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/..\""
-
 libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.6.0"
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("conncheck" -> "io.prediction.tools.conncheck.ConnCheck")
+
+packJvmOpts := Map("conncheck" -> Common.packCommonJvmOpts)
diff --git a/tools/migration/0.7/infos/build.sbt b/tools/migration/0.7/infos/build.sbt
new file mode 100644
index 0000000..894c71e
--- /dev/null
+++ b/tools/migration/0.7/infos/build.sbt
@@ -0,0 +1,22 @@
+import xerial.sbt.Pack._
+
+name := "standardized-info-ids"
+
+scalariformSettings
+
+libraryDependencies ++= Seq(
+  "ch.qos.logback" % "logback-classic" % "1.1.1",
+  "ch.qos.logback" % "logback-core" % "1.1.1",
+  "org.clapper" %% "grizzled-slf4j" % "1.0.1")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("standardized-info-ids" -> "io.prediction.tools.migration.StandardizedInfoIDs")
+
+packJvmOpts := Map("standardized-info-ids" -> Common.packCommonJvmOpts)
diff --git a/tools/migration/0.7/infos/src/main/resources/application.conf b/tools/migration/0.7/infos/src/main/resources/application.conf
new file mode 100644
index 0000000..d6f0db9
--- /dev/null
+++ b/tools/migration/0.7/infos/src/main/resources/application.conf
@@ -0,0 +1,30 @@
+# Used by PredictionIO Commons
+io.prediction.base=../..
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
diff --git a/tools/migration/0.7/infos/src/main/scala/StandardizedInfoIDs.scala b/tools/migration/0.7/infos/src/main/scala/StandardizedInfoIDs.scala
new file mode 100644
index 0000000..f634705
--- /dev/null
+++ b/tools/migration/0.7/infos/src/main/scala/StandardizedInfoIDs.scala
@@ -0,0 +1,68 @@
+package io.prediction.tools.migration
+
+import io.prediction.commons.Config
+
+import grizzled.slf4j.Logger
+
+object StandardizedInfoIDs {
+  def main(args: Array[String]) {
+    val logger = Logger(StandardizedInfoIDs.getClass)
+    val config = new Config()
+    val algos = config.getSettingsAlgos()
+    val algoOldToNew = Map[String, String](
+      "pdio-local-itemrec-random" -> "pio-itemrec-single-random",
+      "pdio-local-itemsim-random" -> "pio-itemsim-single-random",
+      "pdio-randomrank" -> "pio-itemrec-distributed-random",
+      "pdio-latestrank" -> "pio-itemrec-distributed-latest",
+      "mahout-itembased" -> "pio-itemrec-distributed-mahout-itembased",
+      "mahout-parallelals" -> "pio-itemrec-distributed-mahout-parallelals",
+      "mahout-knnitembased" -> "pio-itemrec-single-mahout-knnitembased",
+      "mahout-knnuserbased" -> "pio-itemrec-single-mahout-knnuserbased",
+      "mahout-thresholduserbased" -> "pio-itemrec-single-mahout-thresholduserbased",
+      "mahout-alswr" -> "pio-itemrec-single-mahout-alswr",
+      "mahout-svdsgd" -> "pio-itemrec-single-mahout-svdsgd",
+      "mahout-svdplusplus" -> "pio-itemrec-single-mahout-svdplusplus",
+      "pdio-itemsimrandomrank" -> "pio-itemsim-distributed-random",
+      "pdio-itemsimlatestrank" -> "pio-itemsim-distributed-latest",
+      "mahout-itemsimcf-single" -> "pio-itemsim-single-mahout-itemsimcf",
+      "mahout-itemsimcf" -> "pio-itemsim-distributed-mahout-itemsimcf",
+      "graphchi-als" -> "pio-itemrec-single-graphchi-als",
+      "graphchi-climf" -> "pio-itemrec-single-graphchi-climf")
+    algos.getAll foreach { algo =>
+      val newAlgoInfoID = algoOldToNew.get(algo.infoid).getOrElse(algo.infoid)
+      logger.info(s"Algo ID ${algo.id}: ${algo.infoid} -> ${newAlgoInfoID}")
+      algos.update(algo.copy(infoid = newAlgoInfoID))
+    }
+
+    val offlineEvalSplitters = config.getSettingsOfflineEvalSplitters()
+    val splittersOldToNew = Map[String, String](
+      "trainingtestsplit" -> "pio-distributed-trainingtestsplit",
+      "u2isplit" -> "pio-single-trainingtestsplit")
+    offlineEvalSplitters.getAll foreach { splitter =>
+      val newSplitterInfoID = splittersOldToNew.get(splitter.infoid).getOrElse(splitter.infoid)
+      logger.info(s"OfflineEvalSplitter ID ${splitter.id}: ${splitter.infoid} -> ${newSplitterInfoID}")
+      offlineEvalSplitters.update(splitter.copy(infoid = newSplitterInfoID))
+    }
+
+    val offlineEvalMetrics = config.getSettingsOfflineEvalMetrics()
+    val metricsOldToNew = Map[String, String](
+      "map_k" -> "pio-itemrec-distributed-map_k",
+      "map_k_nd" -> "pio-itemrec-single-map_k",
+      "ismap_k" -> "pio-itemsim-distributed-ismap_k",
+      "ismap_k_nd" -> "pio-itemsim-single-ismap_k")
+    offlineEvalMetrics.getAll foreach { metric =>
+      val newMetricInfoID = metricsOldToNew.get(metric.infoid).getOrElse(metric.infoid)
+      logger.info(s"OfflineEvalMetric ID ${metric.id}: ${metric.infoid} -> ${newMetricInfoID}")
+      offlineEvalMetrics.update(metric.copy(infoid = newMetricInfoID))
+    }
+
+    val paramGens = config.getSettingsParamGens()
+    val paramGensOldToNew = Map[String, String](
+      "random" -> "pio-single-random")
+    paramGens.getAll foreach { paramGen =>
+      val newParamGenInfoID = paramGensOldToNew.get(paramGen.infoid).getOrElse(paramGen.infoid)
+      logger.info(s"ParamGen ID ${paramGen.id}: ${paramGen.infoid} -> ${newParamGenInfoID}")
+      paramGens.update(paramGen.copy(infoid = newParamGenInfoID))
+    }
+  }
+}
diff --git a/tools/settingsinit/build.sbt b/tools/settingsinit/build.sbt
index d274bfb..6473c34 100644
--- a/tools/settingsinit/build.sbt
+++ b/tools/settingsinit/build.sbt
@@ -1,11 +1,19 @@
-import com.typesafe.sbt.packager.Keys._
+import xerial.sbt.Pack._
 
 name := "settingsinit"
 
 scalariformSettings
 
-packageArchetype.java_application
-
-bashScriptExtraDefines += "addJava \"-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/..\""
-
 libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.6.0"
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("settingsinit" -> "io.prediction.tools.settingsinit.SettingsInit")
+
+packJvmOpts := Map("settingsinit" -> Common.packCommonJvmOpts)
diff --git a/tools/settingsinit/src/main/resources/application.conf b/tools/settingsinit/src/main/resources/application.conf
new file mode 100644
index 0000000..d6f0db9
--- /dev/null
+++ b/tools/settingsinit/src/main/resources/application.conf
@@ -0,0 +1,30 @@
+# Used by PredictionIO Commons
+io.prediction.base=../..
+
+io.prediction.commons.appdata.db.type=mongodb
+io.prediction.commons.appdata.db.host=localhost
+io.prediction.commons.appdata.db.port=27017
+
+io.prediction.commons.appdata.test.db.type=mongodb
+io.prediction.commons.appdata.test.db.host=localhost
+io.prediction.commons.appdata.test.db.port=27017
+
+io.prediction.commons.appdata.training.db.type=mongodb
+io.prediction.commons.appdata.training.db.host=localhost
+io.prediction.commons.appdata.training.db.port=27017
+
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+
+io.prediction.commons.modeldata.db.type=mongodb
+io.prediction.commons.modeldata.db.host=localhost
+io.prediction.commons.modeldata.db.port=27017
+
+io.prediction.commons.modeldata.training.db.type=mongodb
+io.prediction.commons.modeldata.training.db.host=localhost
+io.prediction.commons.modeldata.training.db.port=27017
+
+io.prediction.commons.settings.db.type=mongodb
+io.prediction.commons.settings.db.host=localhost
+io.prediction.commons.settings.db.port=27017
diff --git a/tools/settingsinit/src/main/scala/io/prediction/tools/settingsinit/SettingsInit.scala b/tools/settingsinit/src/main/scala/io/prediction/tools/settingsinit/SettingsInit.scala
index d18d285..5750418 100644
--- a/tools/settingsinit/src/main/scala/io/prediction/tools/settingsinit/SettingsInit.scala
+++ b/tools/settingsinit/src/main/scala/io/prediction/tools/settingsinit/SettingsInit.scala
@@ -53,6 +53,11 @@
     println("PredictionIO settings initialization starting")
 
     M.unapply(settingsJson) map { settings =>
+      println("Deleting old SystemInfo entries...")
+      systemInfos.getAll() foreach { info =>
+        println(s"- ${info.id}")
+        systemInfos.delete(info.id)
+      }
       M.unapply(settings("systeminfos")) map { infos =>
         println("Populating SystemInfos...")
         for {
@@ -73,6 +78,11 @@
         }
       } getOrElse println("Cannot find any SystemInfo information. Skipping.")
 
+      println("Deleting old EngineInfo entries...")
+      engineInfos.getAll() foreach { info =>
+        println(s"- ${info.id}")
+        engineInfos.delete(info.id)
+      }
       M.unapply(settings("engineinfos")) map { infos =>
         println("Populating EngineInfos...")
         for {
@@ -106,6 +116,11 @@
         }
       } getOrElse println("Cannot find any EngineInfo information. Skipping.")
 
+      println("Deleting old AlgoInfo entries...")
+      algoInfos.getAll() foreach { info =>
+        println(s"- ${info.id}")
+        algoInfos.delete(info.id)
+      }
       M.unapply(settings("algoinfos")) map { infos =>
         println("Populating AlgoInfos...")
         for {
@@ -145,6 +160,11 @@
         }
       } getOrElse println("Cannot find any AlgoInfo information. Skipping.")
 
+      println("Deleting old OfflineEvalSplitterInfo entries...")
+      offlineEvalSplitterInfos.getAll() foreach { info =>
+        println(s"- ${info.id}")
+        offlineEvalSplitterInfos.delete(info.id)
+      }
       M.unapply(settings("offlineevalsplitterinfos")) map { infos =>
         println("Populating OfflineEvalSplitterInfos...")
         for {
@@ -178,6 +198,11 @@
         }
       } getOrElse println("Cannot find any OfflineEvalSplitterInfo information. Skipping.")
 
+      println("Deleting old OfflineEvalMetricInfo entries...")
+      offlineEvalMetricInfos.getAll() foreach { info =>
+        println(s"- ${info.id}")
+        offlineEvalMetricInfos.delete(info.id)
+      }
       M.unapply(settings("offlineevalmetricinfos")) map { infos =>
         println("Populating OfflineEvalMetricInfos...")
         for {
@@ -211,6 +236,11 @@
         }
       } getOrElse println("Cannot find any OfflineEvalMetricInfo information. Skipping.")
 
+      println("Deleting old ParamGenInfo entries...")
+      paramGenInfos.getAll() foreach { info =>
+        println(s"- ${info.id}")
+        paramGenInfos.delete(info.id)
+      }
       M.unapply(settings("paramgeninfos")) map { infos =>
         println("Populating ParamGenInfos...")
         for {
diff --git a/tools/softwaremanager/build.sbt b/tools/softwaremanager/build.sbt
index 6152d29..2b595e5 100644
--- a/tools/softwaremanager/build.sbt
+++ b/tools/softwaremanager/build.sbt
@@ -1,14 +1,30 @@
-import com.typesafe.sbt.packager.Keys._
+import xerial.sbt.Pack._
 
 name := "softwaremanager"
 
 scalariformSettings
 
-packageArchetype.java_application
-
-bashScriptExtraDefines += "addJava \"-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/..\""
-
 libraryDependencies ++= Seq(
   "com.github.scopt" %% "scopt" % "3.1.0",
   "commons-io" % "commons-io" % "2.4",
   "org.slf4j" % "slf4j-nop" % "1.6.0")
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map(
+  "backup"      -> "io.prediction.tools.softwaremanager.Backup",
+  "restore"     -> "io.prediction.tools.softwaremanager.Restore",
+  "updatecheck" -> "io.prediction.tools.softwaremanager.UpdateCheck",
+  "upgrade"     -> "io.prediction.tools.softwaremanager.Upgrade")
+
+packJvmOpts := Map(
+  "backup"      -> Common.packCommonJvmOpts,
+  "restore"     -> Common.packCommonJvmOpts,
+  "updatecheck" -> Common.packCommonJvmOpts,
+  "upgrade"     -> Common.packCommonJvmOpts)
diff --git a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Backup.scala b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Backup.scala
index fa1f1f0..bfe634f 100644
--- a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Backup.scala
+++ b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Backup.scala
@@ -27,7 +27,7 @@
 
   def main(args: Array[String]) {
     val parser = new scopt.OptionParser[BackupConfig]("backup") {
-      head("PredictionIO Backup Utility", "0.6.8")
+      head("PredictionIO Backup Utility", "0.7.0")
       help("help") text ("prints this usage text")
       arg[String]("<backup directory>") action { (x, c) =>
         c.copy(backupDir = x)
diff --git a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Restore.scala b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Restore.scala
index 9f492d9..7707a0f 100644
--- a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Restore.scala
+++ b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Restore.scala
@@ -27,7 +27,7 @@
 
   def main(args: Array[String]) {
     val parser = new scopt.OptionParser[RestoreConfig]("restore") {
-      head("PredictionIO Restore Utility", "0.6.8")
+      head("PredictionIO Restore Utility", "0.7.0")
       help("help") text ("prints this usage text")
       opt[Unit]("upgrade") action { (_, c) =>
         c.copy(upgrade = true)
diff --git a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/UpdateCheck.scala b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/UpdateCheck.scala
index 2ee0eb3..a5eb060 100644
--- a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/UpdateCheck.scala
+++ b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/UpdateCheck.scala
@@ -16,7 +16,7 @@
 
   def main(args: Array[String]) {
     val parser = new scopt.OptionParser[UpdateCheckConfig]("updatecheck") {
-      head("PredictionIO Update Checker", "0.6.8")
+      head("PredictionIO Update Checker", "0.7.0")
       help("help") text ("prints this usage text")
       opt[String]("localVersion") action { (x, c) =>
         c.copy(localVersion = x)
diff --git a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Upgrade.scala b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Upgrade.scala
index a8d05c4..efcc545 100644
--- a/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Upgrade.scala
+++ b/tools/softwaremanager/src/main/scala/io/prediction/tools/softwaremanager/Upgrade.scala
@@ -17,7 +17,7 @@
 /** Upgrades previous version to current version. */
 object Upgrade {
   def main(args: Array[String]) {
-    val thisVersion = "0.6.8"
+    val thisVersion = "0.7.0"
     val parser = new scopt.OptionParser[UpgradeConfig]("upgrade") {
       head("PredictionIO Software Upgrade Utility", thisVersion)
       help("help") text ("prints this usage text")
diff --git a/tools/users/build.sbt b/tools/users/build.sbt
index e509d5e..8183f4a 100644
--- a/tools/users/build.sbt
+++ b/tools/users/build.sbt
@@ -1,15 +1,23 @@
-import com.typesafe.sbt.packager.Keys._
+import xerial.sbt.Pack._
 
 name := "users"
 
 scalariformSettings
 
-packageArchetype.java_application
-
-bashScriptExtraDefines += "addJava \"-Dconfig.file=${app_home}/../conf/predictionio.conf -Dio.prediction.base=${app_home}/..\""
-
 libraryDependencies ++= Seq(
   "commons-codec" % "commons-codec" % "1.8",
   "jline" % "jline" % "2.9",
   "org.slf4j" % "slf4j-nop" % "1.6.0"
 )
+
+packSettings
+
+packJarNameConvention := "full"
+
+packExpandedClasspath := true
+
+packGenerateWindowsBatFile := false
+
+packMain := Map("users" -> "io.prediction.tools.users.Users")
+
+packJvmOpts := Map("users" -> Common.packCommonJvmOpts)
diff --git a/tools/users/src/main/resources/application.conf b/tools/users/src/main/resources/application.conf
index 46e3b7d..d6f0db9 100644
--- a/tools/users/src/main/resources/application.conf
+++ b/tools/users/src/main/resources/application.conf
@@ -13,6 +13,10 @@
 io.prediction.commons.appdata.training.db.host=localhost
 io.prediction.commons.appdata.training.db.port=27017
 
+io.prediction.commons.appdata.validation.db.type=mongodb
+io.prediction.commons.appdata.validation.db.host=localhost
+io.prediction.commons.appdata.validation.db.port=27017
+
 io.prediction.commons.modeldata.db.type=mongodb
 io.prediction.commons.modeldata.db.host=localhost
 io.prediction.commons.modeldata.db.port=27017