Merge branch 'livedoc' into release-0.9.2
diff --git a/NOTICE.txt b/NOTICE.txt
index 4de5d9a..a27ecc8 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
Binary files differ
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh
index d111753..e303faa 100755
--- a/bin/compute-classpath.sh
+++ b/bin/compute-classpath.sh
@@ -19,12 +19,12 @@
 # Figure out where PredictionIO is installed
 FWDIR="$(cd `dirname $0`/..; pwd)"
 
-. $FWDIR/bin/load-pio-env.sh
+. ${FWDIR}/bin/load-pio-env.sh
 
 # Build up classpath
-CLASSPATH="$FWDIR/conf"
+CLASSPATH="${FWDIR}/conf"
 
-ASSEMBLY_DIR="$FWDIR/assembly"
+ASSEMBLY_DIR="${FWDIR}/assembly"
 
 if [ -n "$JAVA_HOME" ]; then
   JAR_CMD="$JAVA_HOME/bin/jar"
@@ -33,15 +33,15 @@
 fi
 
 # Use pio-assembly JAR from either RELEASE or assembly directory
-if [ -f "$FWDIR/RELEASE" ]; then
-  assembly_folder="$FWDIR"/lib
+if [ -f "${FWDIR}/RELEASE" ]; then
+  assembly_folder="${FWDIR}"/lib
 else
-  assembly_folder="$ASSEMBLY_DIR"
+  assembly_folder="${ASSEMBLY_DIR}"
 fi
 
-ASSEMBLY_JAR=$(ls "$assembly_folder"/pio-assembly*.jar 2>/dev/null)
+ASSEMBLY_JAR=$(ls "${assembly_folder}"/pio-assembly*.jar 2>/dev/null)
 
-CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
+CLASSPATH="$CLASSPATH:${ASSEMBLY_JAR}"
 
 # Add hadoop conf dir if given -- otherwise FileSystem.*, etc fail ! Note, this
 # assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
diff --git a/bin/install.sh b/bin/install.sh
index 2d69fc1..5d3201d 100755
--- a/bin/install.sh
+++ b/bin/install.sh
@@ -9,13 +9,13 @@
 # License: http://www.apache.org/licenses/LICENSE-2.0
 
 OS=`uname`
-PIO_VERSION=0.9.1
-SPARK_VERSION=1.2.1
+PIO_VERSION=0.9.2
+SPARK_VERSION=1.3.0
 ELASTICSEARCH_VERSION=1.4.4
-HBASE_VERSION=0.98.11
+HBASE_VERSION=1.0.0
 PIO_DIR=$HOME/PredictionIO
 USER_PROFILE=$HOME/.profile
-PIO_FILE=PredictionIO-$PIO_VERSION.tar.gz
+PIO_FILE=PredictionIO-${PIO_VERSION}.tar.gz
 TEMP_DIR=/tmp
 
 # Ask a yes/no question, with a default of "yes".
@@ -23,7 +23,7 @@
   echo -ne $@ "[Y/n] "
   read -r response
 
-  case $response in
+  case ${response} in
     [yY][eE][sS]|[yY]|"")
       true
       ;;
@@ -62,13 +62,13 @@
   # REQUIRED: No user input for Docker!
   echo -e "\033[1;33mDocker detected!\033[0m"
   echo -e "\033[1;33mForcing Docker defaults!\033[0m"
-  pio_dir=$PIO_DIR
-  vendors_dir=$pio_dir/vendors
+  pio_dir=${PIO_DIR}
+  vendors_dir=${pio_dir}/vendors
 
-  spark_dir=$vendors_dir/spark-$SPARK_VERSION
-  elasticsearch_dir=$vendors_dir/elasticsearch-$ELASTICSEARCH_VERSION
-  hbase_dir=$vendors_dir/hbase-$HBASE_VERSION
-  zookeeper_dir=$vendors_dir/zookeeper
+  spark_dir=${vendors_dir}/spark-${SPARK_VERSION}
+  elasticsearch_dir=${vendors_dir}/elasticsearch-${ELASTICSEARCH_VERSION}
+  hbase_dir=${vendors_dir}/hbase-${HBASE_VERSION}
+  zookeeper_dir=${vendors_dir}/zookeeper
 
   echo "--------------------------------------------------------------------------------"
   echo -e "\033[1;32mOK, looks good!\033[0m"
@@ -93,13 +93,13 @@
   # Non-interactive
   echo -e "\033[1;33mNon-interactive installation requested!\033[0m"
   echo -e "\033[1;33mForcing defaults!\033[0m"
-  pio_dir=$PIO_DIR
-  vendors_dir=$pio_dir/vendors
+  pio_dir=${PIO_DIR}
+  vendors_dir=${pio_dir}/vendors
 
-  spark_dir=$vendors_dir/spark-$SPARK_VERSION
-  elasticsearch_dir=$vendors_dir/elasticsearch-$ELASTICSEARCH_VERSION
-  hbase_dir=$vendors_dir/hbase-$HBASE_VERSION
-  zookeeper_dir=$vendors_dir/zookeeper
+  spark_dir=${vendors_dir}/spark-${SPARK_VERSION}
+  elasticsearch_dir=${vendors_dir}/elasticsearch-${ELASTICSEARCH_VERSION}
+  hbase_dir=${vendors_dir}/hbase-${HBASE_VERSION}
+  zookeeper_dir=${vendors_dir}/zookeeper
 
   echo "--------------------------------------------------------------------------------"
   echo -e "\033[1;32mOK, looks good!\033[0m"
@@ -114,6 +114,7 @@
   # Java Install
   echo -e "\033[1;36mStarting Java install...\033[0m"
 
+  # todo: make java installation platform independant
   sudo apt-get update
   sudo apt-get install openjdk-7-jdk libgfortran3 -y
 
@@ -137,21 +138,21 @@
         guess_email=$(git config --global user.email)
       fi
 
-      if [ -n "$guess_email" ]; then
-        read -e -p "Email ($guess_email): " email
+      if [ -n "${guess_email}" ]; then
+        read -e -p "Email (${guess_email}): " email
       else
         read -e -p "Enter email: " email
       fi
       email=${email:-$guess_email}
 
       url="http://direct.prediction.io/$PIO_VERSION/install.json/install/install/$email/"
-      curl --silent $url > /dev/null
+      curl --silent ${url} > /dev/null
     fi
 
-    spark_dir=$vendors_dir/spark-$SPARK_VERSION
-    elasticsearch_dir=$vendors_dir/elasticsearch-$ELASTICSEARCH_VERSION
-    hbase_dir=$vendors_dir/hbase-$HBASE_VERSION
-    zookeeper_dir=$vendors_dir/zookeeper
+    spark_dir=${vendors_dir}/spark-${SPARK_VERSION}
+    elasticsearch_dir=${vendors_dir}/elasticsearch-${ELASTICSEARCH_VERSION}
+    hbase_dir=${vendors_dir}/hbase-${HBASE_VERSION}
+    zookeeper_dir=${vendors_dir}/zookeeper
 
     echo "--------------------------------------------------------------------------------"
     echo -e "\033[1;32mOK, looks good!\033[0m"
@@ -168,10 +169,10 @@
   done
 
   # Java Install
-  if [[ $OS = "Linux" ]] && confirm "\033[1mWould you like to install Java?\033[0m"; then
+  if [[ ${OS} = "Linux" ]] && confirm "\033[1mWould you like to install Java?\033[0m"; then
     echo -e "\033[1mSelect your linux distribution:\033[0m"
     select distribution in "Debian/Ubuntu" "Other"; do
-      case $distribution in
+      case ${distribution} in
         "Debian/Ubuntu")
           echo -e "\033[1;36mStarting Java install...\033[0m"
 
@@ -220,120 +221,120 @@
 
 # PredictionIO
 echo -e "\033[1;36mStarting PredictionIO setup in:\033[0m $pio_dir"
-cd $TEMP_DIR
+cd ${TEMP_DIR}
 
 # delete existing tmp file before download again
-if [[ -e  $PIO_FILE ]]; then
+if [[ -e  ${PIO_FILE} ]]; then
   if confirm "Delete existing $PIO_FILE?"; then
-    rm $PIO_FILE
+    rm ${PIO_FILE}
   fi
 fi
 
-if [[ ! -e $PIO_FILE ]]; then
+if [[ ! -e ${PIO_FILE} ]]; then
   echo "Downloading PredictionIO..."
-  curl -O https://d8k1yxp8elc6b.cloudfront.net/$PIO_FILE
+  curl -O https://d8k1yxp8elc6b.cloudfront.net/${PIO_FILE}
 fi
-tar zxf $PIO_FILE
-rm -rf $pio_dir
-mv PredictionIO-$PIO_VERSION $pio_dir
+tar zxf ${PIO_FILE}
+rm -rf ${pio_dir}
+mv PredictionIO-${PIO_VERSION} ${pio_dir}
 
 if [[ $USER ]]; then
-  chown -R $USER $pio_dir
+  chown -R $USER ${pio_dir}
 fi
 
 echo "Updating ~/.profile to include: $pio_dir"
-PATH=$PATH:$pio_dir/bin
-echo "export PATH=\$PATH:$pio_dir/bin" >> $USER_PROFILE
+PATH=$PATH:${pio_dir}/bin
+echo "export PATH=\$PATH:$pio_dir/bin" >> ${USER_PROFILE}
 
 echo -e "\033[1;32mPredictionIO setup done!\033[0m"
 
-mkdir $vendors_dir
+mkdir ${vendors_dir}
 
 # Spark
 echo -e "\033[1;36mStarting Spark setup in:\033[0m $spark_dir"
-if [[ -e spark-$SPARK_VERSION-bin-hadoop2.4.tgz ]]; then
+if [[ -e spark-${SPARK_VERSION}-bin-hadoop2.4.tgz ]]; then
   if confirm "Delete existing spark-$SPARK_VERSION-bin-hadoop2.4.tgz?"; then
-    rm spark-$SPARK_VERSION-bin-hadoop2.4.tgz
+    rm spark-${SPARK_VERSION}-bin-hadoop2.4.tgz
   fi
 fi
-if [[ ! -e spark-$SPARK_VERSION-bin-hadoop2.4.tgz ]]; then
+if [[ ! -e spark-${SPARK_VERSION}-bin-hadoop2.4.tgz ]]; then
   echo "Downloading Spark..."
-  curl -O http://d3kbcqa49mib13.cloudfront.net/spark-$SPARK_VERSION-bin-hadoop2.4.tgz
+  curl -O http://d3kbcqa49mib13.cloudfront.net/spark-${SPARK_VERSION}-bin-hadoop2.4.tgz
 fi
-tar xf spark-$SPARK_VERSION-bin-hadoop2.4.tgz
-rm -rf $spark_dir
-mv spark-$SPARK_VERSION-bin-hadoop2.4 $spark_dir
+tar xf spark-${SPARK_VERSION}-bin-hadoop2.4.tgz
+rm -rf ${spark_dir}
+mv spark-${SPARK_VERSION}-bin-hadoop2.4 ${spark_dir}
 
 echo "Updating: $pio_dir/conf/pio-env.sh"
-$SED_CMD "s|SPARK_HOME=/path_to_apache_spark|SPARK_HOME=$spark_dir|g" $pio_dir/conf/pio-env.sh
+${SED_CMD} "s|SPARK_HOME=.*|SPARK_HOME=$spark_dir|g" ${pio_dir}/conf/pio-env.sh
 
 echo -e "\033[1;32mSpark setup done!\033[0m"
 
 # Elasticsearch
 echo -e "\033[1;36mStarting Elasticsearch setup in:\033[0m $elasticsearch_dir"
-if [[ -e elasticsearch-$ELASTICSEARCH_VERSION.tar.gz ]]; then
+if [[ -e elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz ]]; then
   if confirm "Delete existing elasticsearch-$ELASTICSEARCH_VERSION.tar.gz?"; then
-    rm elasticsearch-$ELASTICSEARCH_VERSION.tar.gz
+    rm elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
   fi
 fi
-if [[ ! -e elasticsearch-$ELASTICSEARCH_VERSION.tar.gz ]]; then
+if [[ ! -e elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz ]]; then
   echo "Downloading Elasticsearch..."
-  curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-$ELASTICSEARCH_VERSION.tar.gz
+  curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
 fi
-tar zxf elasticsearch-$ELASTICSEARCH_VERSION.tar.gz
-rm -rf $elasticsearch_dir
-mv elasticsearch-$ELASTICSEARCH_VERSION $elasticsearch_dir
+tar zxf elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
+rm -rf ${elasticsearch_dir}
+mv elasticsearch-${ELASTICSEARCH_VERSION} ${elasticsearch_dir}
 
 echo "Updating: $elasticsearch_dir/config/elasticsearch.yml"
-echo 'network.host: 127.0.0.1' >> $elasticsearch_dir/config/elasticsearch.yml
+echo 'network.host: 127.0.0.1' >> ${elasticsearch_dir}/config/elasticsearch.yml
 
 echo "Updating: $pio_dir/conf/pio-env.sh"
-echo "PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$elasticsearch_dir" >> $pio_dir/conf/pio-env.sh
+echo "PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$elasticsearch_dir" >> ${pio_dir}/conf/pio-env.sh
 
 echo -e "\033[1;32mElasticsearch setup done!\033[0m"
 
 # HBase
 echo -e "\033[1;36mStarting HBase setup in:\033[0m $hbase_dir"
-if [[ -e hbase-$HBASE_VERSION-hadoop2-bin.tar.gz ]]; then
-  if confirm "Delete existing hbase-$HBASE_VERSION-hadoop2-bin.tar.gz?"; then
-    rm hbase-$HBASE_VERSION-hadoop2-bin.tar.gz
+if [[ -e hbase-${HBASE_VERSION}-bin.tar.gz ]]; then
+  if confirm "Delete existing hbase-$HBASE_VERSION-bin.tar.gz?"; then
+    rm hbase-${HBASE_VERSION}-bin.tar.gz
   fi
 fi
-if [[ ! -e hbase-$HBASE_VERSION-hadoop2-bin.tar.gz ]]; then
+if [[ ! -e hbase-${HBASE_VERSION}-bin.tar.gz ]]; then
   echo "Downloading HBase..."
-  curl -O http://archive.apache.org/dist/hbase/hbase-$HBASE_VERSION/hbase-$HBASE_VERSION-hadoop2-bin.tar.gz
+  curl -O http://archive.apache.org/dist/hbase/hbase-${HBASE_VERSION}/hbase-${HBASE_VERSION}-bin.tar.gz
 fi
-tar zxf hbase-$HBASE_VERSION-hadoop2-bin.tar.gz
-rm -rf $hbase_dir
-mv hbase-$HBASE_VERSION-hadoop2 $hbase_dir
+tar zxf hbase-${HBASE_VERSION}-bin.tar.gz
+rm -rf ${hbase_dir}
+mv hbase-${HBASE_VERSION} ${hbase_dir}
 
 echo "Creating default site in: $hbase_dir/conf/hbase-site.xml"
-cat <<EOT > $hbase_dir/conf/hbase-site.xml
+cat <<EOT > ${hbase_dir}/conf/hbase-site.xml
 <configuration>
   <property>
     <name>hbase.rootdir</name>
-    <value>file://$hbase_dir/data</value>
+    <value>file://${hbase_dir}/data</value>
   </property>
   <property>
     <name>hbase.zookeeper.property.dataDir</name>
-    <value>$zookeeper_dir</value>
+    <value>${zookeeper_dir}</value>
   </property>
 </configuration>
 EOT
 
 echo "Updating: $hbase_dir/conf/hbase-env.sh to include $JAVA_HOME"
-$SED_CMD "s|# export JAVA_HOME=/usr/java/jdk1.6.0/|export JAVA_HOME=$JAVA_HOME|" $hbase_dir/conf/hbase-env.sh
+${SED_CMD} "s|# export JAVA_HOME=/usr/java/jdk1.6.0/|export JAVA_HOME=$JAVA_HOME|" ${hbase_dir}/conf/hbase-env.sh
 
 echo "Updating: $pio_dir/conf/pio-env.sh"
-echo "PIO_STORAGE_SOURCES_HBASE_HOME=$hbase_dir" >> $pio_dir/conf/pio-env.sh
-$SED_CMD "s|HBASE_CONF_DIR=\$PIO_HOME/conf|HBASE_CONF_DIR=$hbase_dir/conf|" $pio_dir/conf/pio-env.sh
+echo "PIO_STORAGE_SOURCES_HBASE_HOME=$hbase_dir" >> ${pio_dir}/conf/pio-env.sh
+${SED_CMD} "s|HBASE_CONF_DIR=\$PIO_HOME/conf|HBASE_CONF_DIR=$hbase_dir/conf|" ${pio_dir}/conf/pio-env.sh
 
 echo -e "\033[1;32mHBase setup done!\033[0m"
 
 echo "Updating permissions on: $vendors_dir"
 
 if [[ $USER ]]; then
-  chown -R $USER $vendors_dir
+  chown -R $USER ${vendors_dir}
 fi
 
 echo -e "\033[1;32mInstallation done!\033[0m"
diff --git a/bin/load-pio-env.sh b/bin/load-pio-env.sh
index 3842cac..79187b0 100755
--- a/bin/load-pio-env.sh
+++ b/bin/load-pio-env.sh
@@ -24,7 +24,7 @@
   # Returns the parent of the directory this script lives in.
   parent_dir="$(cd `dirname $0`/..; pwd)"
 
-  use_conf_dir=${PIO_CONF_DIR:-"$parent_dir/conf"}
+  use_conf_dir=${PIO_CONF_DIR:-"${parent_dir}/conf"}
 
   if [ -f "${use_conf_dir}/pio-env.sh" ]; then
     # Promote all variable declarations to environment (exported) variables
@@ -32,7 +32,6 @@
     . "${use_conf_dir}/pio-env.sh"
     set +a
   else
-    echo "Warning: pio-env.sh was not found in ${use_conf_dir}. Using system environment variables instead."
-    echo ""
+    echo -e "\033[0;35mWarning: pio-env.sh was not found in ${use_conf_dir}. Using system environment variables instead.\033[0m\n"
   fi
 fi
diff --git a/bin/pio b/bin/pio
index 7ff4566..f2ca006 100755
--- a/bin/pio
+++ b/bin/pio
@@ -19,19 +19,19 @@
   local needle=$1;
   shift
   for str in $@; do
-    if [ "$str" = "$needle" ]; then
-      echo $i
+    if [ "${str}" = "$needle" ]; then
+      echo ${i}
       return
     else
       ((i++))
     fi
   done
-  echo $i
+  echo ${i}
 }
 
 export PIO_HOME="$(cd `dirname $0`/..; pwd)"
 
-export PIO_CONF_DIR="$PIO_HOME/conf"
+export PIO_CONF_DIR="${PIO_HOME}/conf"
 
 FIRST_SEP=$(search "--" $@)
 
@@ -39,4 +39,4 @@
 
 SECOND_HALF="${@:$FIRST_SEP+1}"
 
-exec $PIO_HOME/bin/pio-class io.prediction.tools.console.Console $FIRST_HALF --pio-home $PIO_HOME $SECOND_HALF
+exec ${PIO_HOME}/bin/pio-class io.prediction.tools.console.Console ${FIRST_HALF} --pio-home ${PIO_HOME} ${SECOND_HALF}
diff --git a/bin/pio-class b/bin/pio-class
index 4a27a04..181dbb5 100755
--- a/bin/pio-class
+++ b/bin/pio-class
@@ -25,11 +25,11 @@
 FWDIR="$(cd `dirname $0`/..; pwd)"
 
 # Export this as PIO_HOME
-export PIO_HOME="$FWDIR"
+export PIO_HOME="${FWDIR}"
 
-. $FWDIR/bin/load-pio-env.sh
+. ${FWDIR}/bin/load-pio-env.sh
 
-. $FWDIR/bin/semver.sh
+. ${FWDIR}/bin/semver.sh
 
 if [ -z "$1" ]; then
   echo "Usage: pio-class <class> [<args>]" 1>&2
@@ -38,17 +38,17 @@
 
 # Warn if log4j.properties is not present
 if [ ! -f "$FWDIR/conf/log4j.properties" ]; then
-  echo "Warning: log4j.properties is missing from $FWDIR/conf"
+  echo -e "Warning: log4j.properties is missing from $FWDIR/conf\033[0m"
 fi
 
 # Make sure the Apache Spark version meets the prerequisite if it is a binary
 # distribution
-MIN_SPARK_VERSION="1.2.0"
+MIN_SPARK_VERSION="1.3.0"
 if [ -r "$SPARK_HOME/RELEASE" ]; then
-  SPARK_VERSION=`awk '{print $2}' $SPARK_HOME/RELEASE`
-  if semverLT $SPARK_VERSION $MIN_SPARK_VERSION; then
-    echo "You have Apache Spark $SPARK_VERSION at $SPARK_HOME which does not meet the minimum version requirement of $MIN_SPARK_VERSION."
-    echo "Aborting."
+  SPARK_VERSION=`head -n 1 $SPARK_HOME/RELEASE | awk '{print $2}'`
+  if semverLT ${SPARK_VERSION} ${MIN_SPARK_VERSION}; then
+    echo -e "\033[0;31mYou have Apache Spark $SPARK_VERSION at $SPARK_HOME which does not meet the minimum version requirement of $MIN_SPARK_VERSION.\033[0m"
+    echo -e "\033[0;31mAborting.\033[0m"
     exit 1
   fi
 else
@@ -62,18 +62,18 @@
   if [ `command -v java` ]; then
     RUNNER="java"
   else
-    echo "JAVA_HOME is not set" >&2
+    echo -e "\033[0;31mJAVA_HOME is not set\033[0m" >&2
     exit 1
   fi
 fi
 
 # Compute classpath using external script
-classpath_output=$($FWDIR/bin/compute-classpath.sh)
+classpath_output=$(${FWDIR}/bin/compute-classpath.sh)
 if [[ "$?" != "0" ]]; then
   echo "$classpath_output"
   exit 1
 else
-  CLASSPATH=$classpath_output
+  CLASSPATH=${classpath_output}
 fi
 
 export CLASSPATH
diff --git a/bin/pio-daemon b/bin/pio-daemon
index 82543a9..b63ddb6 100755
--- a/bin/pio-daemon
+++ b/bin/pio-daemon
@@ -20,13 +20,13 @@
   shift
   for str in $@; do
     if [ "$str" = "$needle" ]; then
-      echo $i
+      echo ${i}
       return
     else
       ((i++))
     fi
   done
-  echo $i
+  echo ${i}
 }
 
 export PIO_HOME="$(cd `dirname $0`/..; pwd)"
@@ -43,6 +43,6 @@
 
 SECOND_HALF="${@:$FIRST_SEP+1}"
 
-exec nohup $PIO_HOME/bin/pio-class io.prediction.tools.console.Console $FIRST_HALF --pio-home $PIO_HOME $SECOND_HALF <&- > /dev/null 2>&1 &
+exec nohup ${PIO_HOME}/bin/pio-class io.prediction.tools.console.Console ${FIRST_HALF} --pio-home ${PIO_HOME} ${SECOND_HALF} <&- > /dev/null 2>&1 &
 
-echo $! > $PIDFILE
+echo $! > ${PIDFILE}
diff --git a/bin/pio-shell b/bin/pio-shell
new file mode 100755
index 0000000..226f2e0
--- /dev/null
+++ b/bin/pio-shell
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+# Copyright 2015 TappingStone, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+  #
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+export PIO_HOME="$(cd `dirname $0`/..; pwd)"
+. ${PIO_HOME}/bin/load-pio-env.sh
+
+if [[ "$1" == "--with-spark" ]]
+then
+  echo "Starting the PIO shell with the Apache Spark Shell."
+  # compute the $ASSEMPLY_JAR, the location of the assemply jar, with
+  # bin/compute-classpath.sh
+  . ${PIO_HOME}/bin/compute-classpath.sh
+  shift
+  $SPARK_HOME/bin/spark-shell --jars $ASSEMBLY_JAR $@
+else
+  echo -e "\033[0;33mStarting the PIO shell without Apache Spark.\033[0m"
+  echo -e "\033[0;33mIf you need the Apache Spark library, run 'pio-shell --with-spark [spark-submit arguments...]'.\033[0m"
+  cd ${PIO_HOME}
+  ./sbt/sbt console
+fi
+
diff --git a/bin/pio-start-all b/bin/pio-start-all
index 7072fac..e999b21 100755
--- a/bin/pio-start-all
+++ b/bin/pio-start-all
@@ -20,23 +20,23 @@
 # Figure out where PredictionIO is installed
 export PIO_HOME="$(cd `dirname $0`/..; pwd)"
 
-. $PIO_HOME/bin/load-pio-env.sh
+. ${PIO_HOME}/bin/load-pio-env.sh
 
 # Elasticsearch
 echo "Starting Elasticsearch..."
 if [ -n "$PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME" ]; then
   JPS=`jps`
-  if [[ $JPS =~ "Elasticsearch" ]]; then
-    echo "Elasticsearch is already running. Please use pio-stop-all to try stopping it first."
-    echo "Note: If you started Elasticsearch manually, you will need to kill it manually."
-    echo "Aborting..."
+  if [[ ${JPS} =~ "Elasticsearch" ]]; then
+    echo -e "\033[0;31mElasticsearch is already running. Please use pio-stop-all to try stopping it first.\033[0m"
+    echo -e "\033[0;31mNote: If you started Elasticsearch manually, you will need to kill it manually.\033[0m"
+    echo -e "\033[0;31mAborting...\033[0m"
     exit 1
   else
     $PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME/bin/elasticsearch -d -p $PIO_HOME/es.pid
   fi
 else
-  echo "Please set PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME in conf/pio-env.sh, or in your environment."
-  echo "Cannot start Elasticsearch. Aborting..."
+  echo -e "\033[0;31mPlease set PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME in conf/pio-env.sh, or in your environment.\033[0m"
+  echo -e "\033[0;31mCannot start Elasticsearch. Aborting...\033[0m"
   exit 1
 fi
 
@@ -45,11 +45,11 @@
 if [ -n "$PIO_STORAGE_SOURCES_HBASE_HOME" ]; then
   $PIO_STORAGE_SOURCES_HBASE_HOME/bin/start-hbase.sh
 else
-  echo "Please set PIO_STORAGE_SOURCES_HBASE_HOME in conf/pio-env.sh, or in your environment."
+  echo -e "\033[0;31mPlease set PIO_STORAGE_SOURCES_HBASE_HOME in conf/pio-env.sh, or in your environment.\033[0m"
   # Kill everything for cleanliness
-  echo "Cannot start HBase. Aborting..."
+  echo -e "\033[0;31mCannot start HBase. Aborting...\033[0m"
   sleep 3
-  $PIO_HOME/bin/pio-stop-all
+  ${PIO_HOME}/bin/pio-stop-all
   exit 1
 fi
 
@@ -57,4 +57,4 @@
 echo "Waiting 10 seconds for HBase to fully initialize..."
 sleep 10
 echo "Starting PredictionIO Event Server..."
-$PIO_HOME/bin/pio-daemon $PIO_HOME/eventserver.pid eventserver --ip 0.0.0.0
+${PIO_HOME}/bin/pio-daemon ${PIO_HOME}/eventserver.pid eventserver --ip 0.0.0.0
diff --git a/bin/pio-stop-all b/bin/pio-stop-all
index 2f071c2..326461b 100755
--- a/bin/pio-stop-all
+++ b/bin/pio-stop-all
@@ -20,14 +20,14 @@
 # Figure out where PredictionIO is installed
 export PIO_HOME="$(cd `dirname $0`/..; pwd)"
 
-. $PIO_HOME/bin/load-pio-env.sh
+. ${PIO_HOME}/bin/load-pio-env.sh
 
 # PredictionIO Event Server
 echo "Stopping PredictionIO Event Server..."
-PIDFILE=$PIO_HOME/eventserver.pid
-if [ -e $PIDFILE ]; then
-  cat $PIDFILE | xargs kill
-  rm $PIDFILE
+PIDFILE=${PIO_HOME}/eventserver.pid
+if [ -e ${PIDFILE} ]; then
+  cat ${PIDFILE} | xargs kill
+  rm ${PIDFILE}
 fi
 
 # HBase
@@ -38,8 +38,8 @@
 
 # Elasticsearch
 echo "Stopping Elasticsearch..."
-PIDFILE=$PIO_HOME/es.pid
-if [ -e $PIDFILE ]; then
-  cat $PIDFILE | xargs kill
-  rm $PIDFILE
+PIDFILE=${PIO_HOME}/es.pid
+if [ -e ${PIDFILE} ]; then
+  cat ${PIDFILE} | xargs kill
+  rm ${PIDFILE}
 fi
diff --git a/build.sbt b/build.sbt
index e4a0fbf..d5d0649 100644
--- a/build.sbt
+++ b/build.sbt
@@ -18,7 +18,7 @@
 
 name := "pio"
 
-version in ThisBuild := "0.9.2-SNAPSHOT"
+version in ThisBuild := "0.9.2"
 
 organization in ThisBuild := "io.prediction"
 
@@ -37,7 +37,7 @@
 
 json4sVersion in ThisBuild := "3.2.10"
 
-sparkVersion in ThisBuild := "1.2.0"
+sparkVersion in ThisBuild := "1.3.0"
 
 lazy val pioBuildInfoSettings = buildInfoSettings ++ Seq(
   sourceGenerators in Compile <+= buildInfo,
@@ -104,6 +104,7 @@
     "breeze",
     "html",
     "io.prediction.annotation",
+    "io.prediction.controller.html",
     "io.prediction.controller.java",
     "io.prediction.core",
     "io.prediction.data.api",
diff --git a/conf/hbase-site.xml b/conf/hbase-site.xml
deleted file mode 100644
index 1ccc855..0000000
--- a/conf/hbase-site.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<configuration>
-  <property>
-    <name>hbase.zookeeper.quorum</name>
-    <value>localhost</value>
-  </property>
-</configuration>
diff --git a/conf/log4j.properties b/conf/log4j.properties
index 791f21b..067a1a6 100644
--- a/conf/log4j.properties
+++ b/conf/log4j.properties
@@ -16,7 +16,9 @@
 log4j.logger.org.elasticsearch=WARN
 log4j.logger.org.apache.hadoop=WARN
 log4j.logger.org.apache.hadoop.hbase.zookeeper=ERROR
+log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
 log4j.logger.org.apache.spark=WARN
 log4j.logger.org.apache.zookeeper=ERROR
 log4j.logger.org.eclipse.jetty=WARN
+log4j.logger.org.spark-project.jetty=WARN
 log4j.logger.akka=WARN
diff --git a/conf/pio-env.sh.template b/conf/pio-env.sh.template
index ca10957..8200ab9 100644
--- a/conf/pio-env.sh.template
+++ b/conf/pio-env.sh.template
@@ -8,7 +8,7 @@
 # you need to change these to fit your site.
 
 # SPARK_HOME: Apache Spark is a hard dependency and must be configured.
-SPARK_HOME=/path_to_apache_spark
+SPARK_HOME=$PIO_HOME/vendors/spark-1.3.0-bin-hadoop2.4
 
 # ES_CONF_DIR: You must configure this if you have advanced configuration for
 #              your Elasticsearch setup.
@@ -20,7 +20,7 @@
 
 # HBASE_CONF_DIR: You must configure this if you intend to run PredictionIO
 #                 with HBase on a remote cluster.
-HBASE_CONF_DIR=$PIO_HOME/conf
+HBASE_CONF_DIR=$PIO_HOME/vendors/hbase-1.0.0/conf
 
 # Filesystem paths where PredictionIO uses as block storage.
 PIO_FS_BASEDIR=$HOME/.pio_store
@@ -46,6 +46,7 @@
 PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch
 PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=localhost
 PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300
+PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$PIO_HOME/vendors/elasticsearch-1.4.4
 
 PIO_STORAGE_SOURCES_LOCALFS_TYPE=localfs
 PIO_STORAGE_SOURCES_LOCALFS_HOSTS=$PIO_FS_BASEDIR/models
@@ -54,3 +55,4 @@
 PIO_STORAGE_SOURCES_HBASE_TYPE=hbase
 PIO_STORAGE_SOURCES_HBASE_HOSTS=0
 PIO_STORAGE_SOURCES_HBASE_PORTS=0
+PIO_STORAGE_SOURCES_HBASE_HOME=$PIO_HOME/vendors/hbase-1.0.0
diff --git a/core/build.sbt b/core/build.sbt
index 6450581..53b98a6 100644
--- a/core/build.sbt
+++ b/core/build.sbt
@@ -15,27 +15,26 @@
 name := "core"
 
 libraryDependencies ++= Seq(
-  "com.github.scopt"       %% "scopt"           % "3.2.0",
-  "com.google.code.gson"    % "gson"            % "2.2.4",
-  "com.google.guava"        % "guava"           % "18.0",
-  "com.twitter"            %% "chill"           % "0.5.0"
+  "com.github.scopt"       %% "scopt"            % "3.2.0",
+  "com.google.code.gson"    % "gson"             % "2.2.4",
+  "com.google.guava"        % "guava"            % "18.0",
+  "com.twitter"            %% "chill"            % "0.5.0"
     exclude("com.esotericsoftware.minlog", "minlog"),
-  "com.twitter"            %% "chill-bijection" % "0.5.0",
-  "commons-io"              % "commons-io"      % "2.4",
-  "io.spray"               %% "spray-can"       % "1.3.2",
-  "io.spray"               %% "spray-routing"   % "1.3.2",
-  "net.jodah"               % "typetools"       % "0.3.1",
-  "org.apache.spark"       %% "spark-core"      % sparkVersion.value % "provided",
-  "org.clapper"            %% "grizzled-slf4j"  % "1.0.2",
-  "org.elasticsearch"       % "elasticsearch"   % elasticsearchVersion.value,
-  "org.json4s"             %% "json4s-native"   % json4sVersion.value,
-  "org.json4s"             %% "json4s-ext"      % json4sVersion.value,
-  "org.scalaj"             %% "scalaj-http"     % "1.1.0",
-  "org.scalatest"          %% "scalatest"       % "2.1.6" % "test",
-  "org.slf4j"               % "slf4j-log4j12"   % "1.7.7",
-  "org.specs2"             %% "specs2"          % "2.3.13" % "test")
-
-net.virtualvoid.sbt.graph.Plugin.graphSettings
+  "com.twitter"            %% "chill-bijection"  % "0.5.0",
+  "de.javakaffee"           % "kryo-serializers" % "0.28",
+  "commons-io"              % "commons-io"       % "2.4",
+  "io.spray"               %% "spray-can"        % "1.3.2",
+  "io.spray"               %% "spray-routing"    % "1.3.2",
+  "net.jodah"               % "typetools"        % "0.3.1",
+  "org.apache.spark"       %% "spark-core"       % sparkVersion.value % "provided",
+  "org.clapper"            %% "grizzled-slf4j"   % "1.0.2",
+  "org.elasticsearch"       % "elasticsearch"    % elasticsearchVersion.value,
+  "org.json4s"             %% "json4s-native"    % json4sVersion.value,
+  "org.json4s"             %% "json4s-ext"       % json4sVersion.value,
+  "org.scalaj"             %% "scalaj-http"      % "1.1.0",
+  "org.scalatest"          %% "scalatest"        % "2.1.6" % "test",
+  "org.slf4j"               % "slf4j-log4j12"    % "1.7.7",
+  "org.specs2"             %% "specs2"           % "2.3.13" % "test")
 
 //testOptions := Seq(Tests.Filter(s => Seq("Dev").exists(s.contains(_))))
 
diff --git a/core/src/main/java/io/prediction/controller/java/JavaEngineBuilder.java b/core/src/main/java/io/prediction/controller/java/JavaEngineBuilder.java
deleted file mode 100644
index 21f66c7..0000000
--- a/core/src/main/java/io/prediction/controller/java/JavaEngineBuilder.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/**
- * Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.controller.java;
-
-import io.prediction.controller.Params;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/**
- * A convenient builder class for linking up Data Source, Preparator, Algorithm,
- * and Serving classes as an Engine.
- *
- * @param <TD> Training Data
- * @param <EI> Evaluation Info
- * @param <PD> Prepared Data
- * @param <Q> Input Query
- * @param <P> Output Prediction
- * @param <A> Actual Value
- */
-public class JavaEngineBuilder<TD, EI, PD, Q, P, A> {
-  /** Data Source class. Default to null. */
-  protected Class<? extends LJavaDataSource<TD, EI, Q, A>> dataSourceClass = null;
-  /** Preparator class. Default to null. */
-  protected Class<? extends LJavaPreparator<TD, PD>> preparatorClass = null;
-  /** Map of Algorithm names to respective classes. Default to empty Map. */
-  protected Map<String, Class<? extends LJavaAlgorithm<PD, ?, Q, P>>>
-    algorithmClassMap = new HashMap <> ();
-  /** Serving class. Default to null. */
-  protected Class<? extends LJavaServing<Q, P>> servingClass = null;
-
-  /**
-   * Instantiate an empty Java-based Engine builder.
-   */
-  public JavaEngineBuilder() {}
-
-  /**
-   * Set the Data Source class of this Engine.
-   */
-  public JavaEngineBuilder<TD, EI, PD, Q, P, A> dataSourceClass(
-      Class<? extends LJavaDataSource<TD, EI, Q, A>> cls) {
-    dataSourceClass = cls;
-    return this;
-  }
-
-  /**
-   * Set the Preparator class of this Engine.
-   */
-  public JavaEngineBuilder<TD, EI, PD, Q, P, A> preparatorClass(
-      Class<? extends LJavaPreparator<TD, PD>> cls) {
-    preparatorClass = cls;
-    return this;
-  }
-
-  /**
-   * Add an Algorithm class to this Engine.
-   */
-  public JavaEngineBuilder<TD, EI, PD, Q, P, A> addAlgorithmClass(
-      String name, Class<? extends LJavaAlgorithm<PD, ?, Q, P>> cls) {
-    algorithmClassMap.put(name, cls);
-    return this;
-  }
-
-  /**
-   * Set the Serving class of this Engine.
-   */
-  public JavaEngineBuilder<TD, EI, PD, Q, P, A> servingClass(
-      Class<? extends LJavaServing<Q, P>> cls) {
-    servingClass = cls;
-    return this;
-  }
-
-  /**
-   * Build and return an Engine instance.
-   */
-  public JavaEngine<TD, EI, PD, Q, P, A> build() {
-    return new JavaEngine<> (dataSourceClass, preparatorClass, algorithmClassMap, servingClass);
-  }
-
-  @Override public String toString() {
-    return "JavaEngineBuilder ds=" + dataSourceClass + " p=" + preparatorClass + " algo=" +
-      algorithmClassMap + " s=" + servingClass;
-  }
-
-}
diff --git a/core/src/main/java/io/prediction/controller/java/JavaEngineParamsBuilder.java b/core/src/main/java/io/prediction/controller/java/JavaEngineParamsBuilder.java
deleted file mode 100644
index 6e7ad23..0000000
--- a/core/src/main/java/io/prediction/controller/java/JavaEngineParamsBuilder.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.controller.java;
-
-import io.prediction.controller.Params;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import scala.Tuple2;
-
-/**
- * Convenient builder class for assembling different types of parameters into a
- * single container of EngineParams.
- */
-public class JavaEngineParamsBuilder {
-  private Params dataSourceParams = new EmptyParams();
-  private Params preparatorParams = new EmptyParams();
-  private List<Tuple2<String, Params>> algorithmParamsList =
-    new ArrayList<Tuple2<String, Params>>();
-  private Params servingParams = new EmptyParams();
-
-  /**
-   * Instantiates an empty EngineParams builder.
-   */
-  public JavaEngineParamsBuilder() {}
-
-  /**
-   * Set the Data Source parameters of the Engine.
-   */
-  public JavaEngineParamsBuilder dataSourceParams(Params p) {
-    dataSourceParams = p;
-    return this;
-  }
-
-  /**
-   * Set the Preparator parameters of the Engine.
-   */
-  public JavaEngineParamsBuilder preparatorParams(Params p) {
-    preparatorParams = p;
-    return this;
-  }
-
-  /**
-   * Add an Algorithm parameter to the Engine.
-   */
-  public JavaEngineParamsBuilder addAlgorithmParams(String algorithmName, Params p) {
-    algorithmParamsList.add(new Tuple2<String, Params>(algorithmName, p));
-    return this;
-  }
-
-  /**
-   * Set the Serving parameters of the Engine.
-   */
-  public JavaEngineParamsBuilder servingParams(Params p) {
-    servingParams = p;
-    return this;
-  }
-
-  /**
-   * Build and return an instance of EngineParams.
-   */
-  public JavaEngineParams build() {
-    return new JavaEngineParams(
-        dataSourceParams, preparatorParams, algorithmParamsList, servingParams);
-  }
-}
diff --git a/core/src/main/java/io/prediction/controller/java/JavaSimpleEngineBuilder.java b/core/src/main/java/io/prediction/controller/java/JavaSimpleEngineBuilder.java
deleted file mode 100644
index fae72e6..0000000
--- a/core/src/main/java/io/prediction/controller/java/JavaSimpleEngineBuilder.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.controller.java;
-
-import io.prediction.controller.Params;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/**
- * A convenient builder class for linking up Data Source, Preparator, Algorithm,
- * and Serving classes as an Engine. This builder further simplify the process
- * by supplying default identity data preparator and first serving classes.
- *
- * @param <TD> Training Data
- * @param <EI> Evaluation Info
- * @param <Q> Input Query
- * @param <P> Output Prediction
- * @param <A> Actual Value (for evaluation)
- */
-public class JavaSimpleEngineBuilder<TD, EI, Q, P, A>
-  extends JavaEngineBuilder<TD, EI, TD, Q, P, A> {
-
-  /**
-   * Set the Data Source class of this Engine.
-   */
-  @Override
-  public JavaSimpleEngineBuilder<TD, EI, Q, P, A> dataSourceClass(
-      Class<? extends LJavaDataSource<TD, EI, Q, A>> cls) {
-    super.dataSourceClass = cls;
-    return this;
-  }
-
-  /**
-   * Set the Preparator class of this Engine.
-   */
-  @Override
-  public JavaSimpleEngineBuilder<TD, EI, Q, P, A> preparatorClass(
-      Class<? extends LJavaPreparator<TD, TD>> cls) {
-    super.preparatorClass = cls;
-    return this;
-  }
-
-  /**
-   * Set the Preparator class of this Engine as IdentityPreparator
-   */
-  public JavaSimpleEngineBuilder<TD, EI, Q, P, A> preparatorClass() {
-    super.preparatorClass = LJavaIdentityPreparator.apply(this);
-    return this;
-  }
-
-  /**
-   * Add an Algorithm class to this Engine. If the engine does not already have
-   * a preparator, it will add an identity preparator to the engine.
-   */
-  @Override
-  public JavaSimpleEngineBuilder<TD, EI, Q, P, A> addAlgorithmClass(
-      String name, Class<? extends LJavaAlgorithm<TD, ?, Q, P>> cls) {
-    super.algorithmClassMap.put(name, cls);
-    return this;
-  }
-
-  /**
-   * Set the Serving class of this Engine.
-   */
-  @Override
-  public JavaSimpleEngineBuilder<TD, EI, Q, P, A> servingClass(
-      Class<? extends LJavaServing<Q, P>> cls) {
-    super.servingClass = cls;
-    return this;
-  }
-
-  /**
-   * Set the Serving class of this Engine as FirstServing.
-   */
-  public JavaSimpleEngineBuilder<TD, EI, Q, P, A> servingClass() {
-    super.servingClass = LJavaFirstServing.apply(this);
-    return this;
-  }
-
-  /**
-   * Build and return an Engine instance.
-   */
-  @Override
-  public JavaSimpleEngine<TD, EI, Q, P, A> build() {
-    return new JavaSimpleEngine<TD, EI, Q, P, A> (
-      super.dataSourceClass, super.preparatorClass, super.algorithmClassMap, super.servingClass);
-  }
-
-}
diff --git a/core/src/main/java/io/prediction/controller/java/PJavaEngineBuilder.java b/core/src/main/java/io/prediction/controller/java/PJavaEngineBuilder.java
deleted file mode 100644
index 18223e1..0000000
--- a/core/src/main/java/io/prediction/controller/java/PJavaEngineBuilder.java
+++ /dev/null
@@ -1,86 +0,0 @@
-package io.prediction.controller.java;
-
-import io.prediction.controller.Params;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/**
- * A convenient builder class for linking up Data Source, Preparator, Algorithm,
- * and Serving classes as an Engine.
- *
- * @param <TD> Training Data
- * @param <EI> Evaluation Info
- * @param <PD> Prepared Data
- * @param <Q> Input Query
- * @param <P> Output Prediction
- * @param <A> Actual Value
- */
-public class PJavaEngineBuilder<TD, EI, PD, Q, P, A> {
-  /** Data Source class. Default to null. */
-  protected Class<? extends PJavaDataSource<TD, EI, Q, A>>
-      dataSourceClass = null;
-  /** Preparator class. Default to null. */
-  protected Class<? extends PJavaPreparator<TD, PD>>
-      preparatorClass = null;
-  /** Map of Algorithm names to respective classes. Default to empty Map. */
-  protected Map<String, Class<? extends PJavaAlgorithm<PD, ?, Q, P>>>
-      algorithmClassMap = new HashMap <> ();
-  /** Serving class. Default to null. */
-  protected Class<? extends LJavaServing<Q, P>>
-      servingClass = null;
-
-  /**
-   * Instantiate an empty Java-based Engine builder.
-   */
-  public PJavaEngineBuilder() {}
-
-  /**
-   * Set the Data Source class of this Engine.
-   */
-  public PJavaEngineBuilder<TD, EI, PD, Q, P, A> dataSourceClass(
-      Class<? extends PJavaDataSource<TD, EI, Q, A>> cls) {
-    dataSourceClass = cls;
-    return this;
-  }
-
-  /**
-   * Set the Preparator class of this Engine.
-   */
-  public PJavaEngineBuilder<TD, EI, PD, Q, P, A> preparatorClass(
-      Class<? extends PJavaPreparator<TD, PD>> cls) {
-    preparatorClass = cls;
-    return this;
-  }
-
-  /**
-   * Add an Algorithm class to this Engine.
-   */
-  public PJavaEngineBuilder<TD, EI, PD, Q, P, A> addAlgorithmClass(
-      String name, Class<? extends PJavaAlgorithm<PD, ?, Q, P>> cls) {
-    algorithmClassMap.put(name, cls);
-    return this;
-  }
-
-  /**
-   * Set the Serving class of this Engine.
-   */
-  public PJavaEngineBuilder<TD, EI, PD, Q, P, A> servingClass(
-      Class<? extends LJavaServing<Q, P>> cls) {
-    servingClass = cls;
-    return this;
-  }
-
-  /**
-   * Build and return an Engine instance.
-   */
-  public PJavaEngine<TD, EI, PD, Q, P, A> build() {
-    return new PJavaEngine<> (dataSourceClass, preparatorClass, algorithmClassMap, servingClass);
-  }
-
-  @Override public String toString() {
-    return "PJavaEngineBuilder ds=" + dataSourceClass + " p=" + preparatorClass + " algo=" +
-      algorithmClassMap + " s=" + servingClass;
-  }
-
-}
diff --git a/core/src/main/java/io/prediction/controller/java/WorkflowParamsBuilder.java b/core/src/main/java/io/prediction/controller/java/WorkflowParamsBuilder.java
deleted file mode 100644
index f025bd9..0000000
--- a/core/src/main/java/io/prediction/controller/java/WorkflowParamsBuilder.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.controller.java;
-
-import io.prediction.controller.WorkflowParams;
-
-/**
- * Builder for WorkflowParams.
- */
-public class WorkflowParamsBuilder {
-  private String batch = "";
-  private int verbose = 2;
-  private boolean saveModel = false;
-
-  /**
-   * Initializes a builder with default values.
-   */
-  public WorkflowParamsBuilder() {}
-
-  /**
-   * Sets the batch label.
-   */
-  public WorkflowParamsBuilder batch(String batch) {
-    this.batch = batch;
-    return this;
-  }
-
-  /**
-   * Sets the verbosity level.
-   */
-  public WorkflowParamsBuilder verbose(int verbose) {
-    this.verbose = verbose;
-    return this;
-  }
-
-  /**
-   * Sets whether trained models are persisted.
-   */
-  public WorkflowParamsBuilder saveModel(boolean saveModel) {
-    this.saveModel = saveModel;
-    return this;
-  }
-
-  /**
-   * Build a WorkflowParams object.
-   */
-  public WorkflowParams build() {
-    return new WorkflowParams(batch, verbose, saveModel);
-  }
-}
diff --git a/core/src/main/java/io/prediction/controller/java/package-info.java b/core/src/main/java/io/prediction/controller/java/package-info.java
deleted file mode 100644
index 9756e4e..0000000
--- a/core/src/main/java/io/prediction/controller/java/package-info.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Provides building blocks for writing a complete prediction engine consisting
- * of DataSource, Preparator, Algorithm, Serving, and Evaluator.
- */
-package io.prediction.controller.java;
diff --git a/core/src/main/java/io/prediction/controller/package-info.java b/core/src/main/java/io/prediction/controller/package-info.java
deleted file mode 100644
index 2ae2146..0000000
--- a/core/src/main/java/io/prediction/controller/package-info.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Provides the backend of the Java controller API. Some interfaces can be used
- * directly by Java.
- */
-package io.prediction.controller;
diff --git a/core/src/main/scala/io/prediction/controller/Algorithm.scala b/core/src/main/scala/io/prediction/controller/Algorithm.scala
deleted file mode 100644
index 20489e2..0000000
--- a/core/src/main/scala/io/prediction/controller/Algorithm.scala
+++ /dev/null
@@ -1,265 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseAlgorithm
-// import io.prediction.core.LModelAlgorithm
-import io.prediction.core.WithBaseQuerySerializer
-import io.prediction.workflow.PersistentModelManifest
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-import scala.reflect.runtime.universe._
-
-/** Base class of a parallel algorithm.
-  *
-  * A parallel algorithm can be run in parallel on a cluster and produces a
-  * model that can also be distributed across a cluster.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class PAlgorithm[PD, M, Q : Manifest, P]
-  extends BaseAlgorithm[PD, M, Q, P] {
-
-  /** Do not use directly or override this method, as this is called by
-    * PredictionIO workflow to train a model.
-    */
-  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
-
-  /** Implement this method to produce a model from prepared data.
-    *
-    * @param pd Prepared data for model training.
-    * @return Trained model.
-    */
-  def train(sc: SparkContext, pd: PD): M
-  
-  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)]) 
-  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
-
-  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
-    throw new NotImplementedError("batchPredict not implemented")
-    qs.context.emptyRDD[(Long, P)]
-  }
-
-  /** Do not use directly or override this method, as this is called by
-    * PredictionIO workflow to perform prediction.
-    */
-  def predictBase(baseModel: Any, query: Q): P = {
-    predict(baseModel.asInstanceOf[M], query)
-  }
-
-  /** Implement this method to produce a prediction from a query and trained
-    * model.
-    *
-    * @param model Trained model produced by [[train]].
-    * @param query An input query.
-    * @return A prediction.
-    */
-  def predict(model: M, query: Q): P
- 
-  override
-  def makePersistentModel(sc: SparkContext, modelId: String, 
-    algoParams: Params, bm: Any)
-  : Any = {
-    // In general, a parallel model may contain multiple RDDs. It is not easy to
-    // infer and persist them programmatically since these RDDs may be
-    // potentially huge.
-
-    // Persist is successful only if the model is an instance of
-    // IPersistentModel and save is successful.
-    val m = bm.asInstanceOf[M]
-    if (m.isInstanceOf[IPersistentModel[_]]) {
-      if (m.asInstanceOf[IPersistentModel[Params]].save(
-        modelId, algoParams, sc)) {
-        PersistentModelManifest(className = m.getClass.getName)
-      } else {
-        Unit
-      }
-    } else {
-      Unit
-    }
-  }
-  
-  def isJava: Boolean = false
-  // def isParallel = true
-}
-
-/** Base class of a local algorithm.
-  *
-  * A local algorithm runs locally within a single machine and produces a model
-  * that can fit within a single machine.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class LAlgorithm[PD, M : ClassTag, Q : Manifest, P]
-  extends BaseAlgorithm[RDD[PD], RDD[M], Q, P] {
-
-  /** Do not use directly or override this method, as this is called by
-    * PredictionIO workflow to train a model.
-    */
-  def trainBase(sc: SparkContext, pd: RDD[PD]): RDD[M] = pd.map(train)
-
-  /** Implement this method to produce a model from prepared data.
-    *
-    * @param pd Prepared data for model training.
-    * @return Trained model.
-    */
-  def train(pd: PD): M
-
-  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
-  : RDD[(Long, P)] = {
-    val mRDD = bm.asInstanceOf[RDD[M]]
-    batchPredict(mRDD, qs)
-  }
-
-  def batchPredict(mRDD: RDD[M], qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
-    val glomQs: RDD[Array[(Long, Q)]] = qs.glom()
-    val cartesian: RDD[(M, Array[(Long, Q)])] = mRDD.cartesian(glomQs)
-    cartesian.flatMap { case (m, qArray) => {
-      qArray.map { case (qx, q) => (qx, predict(m, q)) }
-    }}
-  }
-
-  def predictBase(localBaseModel: Any, q: Q): P = {
-    predict(localBaseModel.asInstanceOf[M], q)
-  }
-
-  /** Implement this method to produce a prediction from a query and trained
-    * model.
-    *
-    * @param model Trained model produced by [[train]].
-    * @param query An input query.
-    * @return A prediction.
-    */
-  def predict(m: M, q: Q): P
-  
-  override
-  def makePersistentModel(sc: SparkContext, modelId: String, 
-    algoParams: Params, bm: Any)
-  : Any = {
-    // LAlgo has local model. By default, the model is serialized into our
-    // storage automatically. User can override this by implementing the
-    // IPersistentModel trait, then we call the save method, upon successful, we
-    // return the Manifest, otherwise, Unit. 
-
-    // Check RDD[M].count == 1
-    val m = bm.asInstanceOf[RDD[M]].first
-    if (m.isInstanceOf[IPersistentModel[_]]) {
-      if (m.asInstanceOf[IPersistentModel[Params]].save(
-        modelId, algoParams, sc)) {
-        PersistentModelManifest(className = m.getClass.getName)
-      } else {
-        Unit
-      }
-    } else {
-      m
-    }
-  }
-  
-  def isJava: Boolean = false
-  // def isParallel = true
-}
-
-/** Base class of a parallel-to-local algorithm.
-  *
-  * A parallel-to-local algorithm can be run in parallel on a cluster and
-  * produces a model that can fit within a single machine.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class P2LAlgorithm[PD, M : ClassTag, Q : Manifest, P]
-  extends BaseAlgorithm[PD, M, Q, P] {
-
-  /** Do not use directly or override this method, as this is called by
-    * PredictionIO workflow to train a model.
-    */
-  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
-
-  /** Implement this method to produce a model from prepared data.
-    *
-    * @param pd Prepared data for model training.
-    * @return Trained model.
-    */
-  def train(sc: SparkContext, pd: PD): M
-  
-  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)]) 
-  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
-
-  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
-    qs.mapValues { q => predict(m, q) }
-  }
-
-  def predictBase(bm: Any, q: Q): P = predict(bm.asInstanceOf[M], q)
-
-  /** Implement this method to produce a prediction from a query and trained
-    * model.
-    *
-    * @param model Trained model produced by [[train]].
-    * @param query An input query.
-    * @return A prediction.
-    */
-  def predict(model: M, query: Q): P
-  
-  override
-  def makePersistentModel(sc: SparkContext, modelId: String, 
-    algoParams: Params, bm: Any)
-  : Any = {
-    // P2LAlgo has local model. By default, the model is serialized into our
-    // storage automatically. User can override this by implementing the
-    // IPersistentModel trait, then we call the save method, upon successful, we
-    // return the Manifest, otherwise, Unit. 
-
-    val m = bm.asInstanceOf[M]
-    if (m.isInstanceOf[IPersistentModel[_]]) {
-      if (m.asInstanceOf[IPersistentModel[Params]].save(
-        modelId, algoParams, sc)) {
-        PersistentModelManifest(className = m.getClass.getName)
-      } else {
-        Unit
-      }
-    } else {
-      m
-    }
-  }
-  
-  
-  def isJava: Boolean = false
-  // def isParallel = true
-}
-
-/** Implement in this trait to enable custom json4s serializer.
-  * This is useful when your query requires a custom serializer. The algorithm
-  * classes using this query only need to mix in the trait to enable the custom
-  * serializer.
-  *
-  * @group General
-  */
-trait WithQuerySerializer extends WithBaseQuerySerializer
diff --git a/core/src/main/scala/io/prediction/controller/CustomQuerySerializer.scala b/core/src/main/scala/io/prediction/controller/CustomQuerySerializer.scala
new file mode 100644
index 0000000..fc85879
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/CustomQuerySerializer.scala
@@ -0,0 +1,36 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseQuerySerializer
+
+/** If your query class cannot be automatically serialized/deserialized to/from
+  * JSON, implement a trait by extending this trait, and overriding the
+  * `querySerializer` member with your
+  * [[https://github.com/json4s/json4s#serializing-non-supported-types custom JSON4S serializer]].
+  * Algorithm and serving classes using your query class would only need to mix
+  * in the trait to enable the custom serializer.
+  *
+  * @group Helper
+  */
+trait CustomQuerySerializer extends BaseQuerySerializer
+
+/** DEPRECATED. Use [[CustomQuerySerializer]] instead.
+  *
+  * @group Helper
+  */
+@deprecated("Use CustomQuerySerializer instead.", "0.9.2")
+trait WithQuerySerializer extends CustomQuerySerializer
\ No newline at end of file
diff --git a/core/src/main/scala/io/prediction/controller/DataSource.scala b/core/src/main/scala/io/prediction/controller/DataSource.scala
deleted file mode 100644
index 7378feb..0000000
--- a/core/src/main/scala/io/prediction/controller/DataSource.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseDataSource
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-
-/** Base class of a parallel data source.
-  *
-  * A parallel data source runs locally within a single machine, or in parallel
-  * on a cluster, to return data that is distributed across a cluster.
-  *
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation Info class.
-  * @tparam Q Input query class.
-  * @tparam A Actual value class.
-  * @group Data Source
-  */
-
-abstract class PDataSource[TD, EI, Q, A]
-  extends BaseDataSource[TD, EI, Q, A] {
-
-  def readTrainingBase(sc: SparkContext): TD = readTraining(sc)
-
-  /** Implement this method to only return training data from a data source.
-    */
-  def readTraining(sc: SparkContext): TD
-  
-  def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
-
-  /** Implement this method to return data from a data source. Returned data
-    * can optionally include a sequence of query and actual value pairs for
-    * evaluation purpose.
-    */
-  def readEval(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = {
-    Seq[(TD, EI, RDD[(Q, A)])]()
-  }
-
-  /* Deprecated, use readEval. */
-  def read(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
-}
-
-/** Base class of a local data source.
-  *
-  * A local data source runs locally within a single machine and return data
-  * that can fit within a single machine.
-  *
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation Info class.
-  * @tparam Q Input query class.
-  * @tparam A Actual value class.
-  * @group Data Source
-  */
-abstract class LDataSource[
-    TD : ClassTag,
-    EI : ClassTag,
-    Q,
-    A]
-  extends BaseDataSource[RDD[TD], EI, Q, A] {
-  
-  def readTrainingBase(sc: SparkContext): RDD[TD] = {
-    sc.parallelize(Seq(None)).map(_ => readTraining())
-  }
-
-  /** Implement this method to only return training data from a data source.
-    */
-  def readTraining(): TD
-  
-  /** Implement this method to return data from a data source. Returned data
-    * can optionally include a sequence of query and actual value pairs for
-    * evaluation purpose.
-    */
-  def readEvalBase(sc: SparkContext): Seq[(RDD[TD], EI, RDD[(Q, A)])] = {
-    val localEvalData: Seq[(TD, EI, Seq[(Q, A)])] = readEval()
-
-    localEvalData.map { case (td, ei, qaSeq) => {
-      val tdRDD = sc.parallelize(Seq(None)).map(_ => td)
-      val qaRDD = sc.parallelize(qaSeq)
-      (tdRDD, ei, qaRDD)
-    }}
-  }
-
-  def readEval(): Seq[(TD, EI, Seq[(Q, A)])] = Seq[(TD, EI, Seq[(Q, A)])]()
-  
-  /* Deprecated, use readEval. */
-  def read(): Seq[(TD, EI, Seq[(Q, A)])] = readEval()
-}
diff --git a/core/src/main/scala/io/prediction/controller/Deployment.scala b/core/src/main/scala/io/prediction/controller/Deployment.scala
index e3ee7f9..57ac40d 100644
--- a/core/src/main/scala/io/prediction/controller/Deployment.scala
+++ b/core/src/main/scala/io/prediction/controller/Deployment.scala
@@ -15,42 +15,15 @@
 
 package io.prediction.controller
 
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BasePreparator
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseServing
-import io.prediction.core.Doer
 import io.prediction.core.BaseEngine
-// import io.prediction.workflow.EngineWorkflow
-import io.prediction.workflow.CreateWorkflow
-import io.prediction.workflow.WorkflowUtils
-import io.prediction.workflow.EngineLanguage
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.SparkWorkflowUtils
-import io.prediction.workflow.StopAfterReadInterruption
-import io.prediction.workflow.StopAfterPrepareInterruption
-import io.prediction.data.storage.EngineInstance
-import _root_.java.util.NoSuchElementException
-import io.prediction.data.storage.StorageClientException
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
 
 import scala.language.implicitConversions
 
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-
-import io.prediction.workflow.NameParamsSerializer
-import grizzled.slf4j.Logger
-
 /** Defines a deployment that contains an engine.
   *
   * @group Engine
   */
-trait Deployment extends IEngineFactory {
+trait Deployment extends EngineFactory {
   protected[this] var _engine: BaseEngine[_, _, _, _] = _
   protected[this] var engineSet: Boolean = false
 
@@ -74,5 +47,3 @@
     engineSet = true
   }
 }
-
-
diff --git a/core/src/main/scala/io/prediction/controller/Engine.scala b/core/src/main/scala/io/prediction/controller/Engine.scala
index 72b1189..0df9ab7 100644
--- a/core/src/main/scala/io/prediction/controller/Engine.scala
+++ b/core/src/main/scala/io/prediction/controller/Engine.scala
@@ -15,43 +15,40 @@
 
 package io.prediction.controller
 
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BasePreparator
+import grizzled.slf4j.Logger
 import io.prediction.core.BaseAlgorithm
+import io.prediction.core.BaseDataSource
+import io.prediction.core.BaseEngine
+import io.prediction.core.BasePreparator
 import io.prediction.core.BaseServing
 import io.prediction.core.Doer
-import io.prediction.core.BaseEngine
+import io.prediction.data.storage.EngineInstance
+import io.prediction.data.storage.StorageClientException
 import io.prediction.workflow.CreateWorkflow
-import io.prediction.workflow.WorkflowUtils
 import io.prediction.workflow.EngineLanguage
+import io.prediction.workflow.NameParamsSerializer
 import io.prediction.workflow.PersistentModelManifest
 import io.prediction.workflow.SparkWorkflowUtils
-import io.prediction.workflow.StopAfterReadInterruption
 import io.prediction.workflow.StopAfterPrepareInterruption
-import io.prediction.data.storage.EngineInstance
-import _root_.java.util.NoSuchElementException
-import io.prediction.data.storage.StorageClientException
-
+import io.prediction.workflow.StopAfterReadInterruption
+import io.prediction.workflow.WorkflowParams
+import io.prediction.workflow.WorkflowUtils
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
-
-import scala.language.implicitConversions
-
 import org.json4s._
 import org.json4s.native.JsonMethods._
 import org.json4s.native.Serialization.read
 
-import io.prediction.workflow.NameParamsSerializer
-import grizzled.slf4j.Logger
+import scala.language.implicitConversions
 
 /** This class chains up the entire data process. PredictionIO uses this
   * information to create workflows and deployments. In Scala, you should
-  * implement an object that extends the `IEngineFactory` trait similar to the
+  * implement an object that extends the [[EngineFactory]] trait similar to the
   * following example.
   *
   * {{{
-  * object ItemRankEngine extends IEngineFactory {
+  * object ItemRankEngine extends EngineFactory {
   *   def apply() = {
   *     new Engine(
   *       classOf[ItemRankDataSource],
@@ -65,17 +62,17 @@
   * }
   * }}}
   *
-  * @see [[IEngineFactory]]
+  * @see [[EngineFactory]]
   * @tparam TD Training data class.
   * @tparam EI Evaluation info class.
   * @tparam PD Prepared data class.
   * @tparam Q Input query class.
   * @tparam P Output prediction class.
   * @tparam A Actual value class.
-  * @param dataSourceClassMap Map of data source class.
-  * @param preparatorClassMap Map of preparator class.
+  * @param dataSourceClassMap Map of data source names to class.
+  * @param preparatorClassMap Map of preparator names to class.
   * @param algorithmClassMap Map of algorithm names to classes.
-  * @param servingClassMap Map of serving class.
+  * @param servingClassMap Map of serving names to class.
   * @group Engine
   */
 class Engine[TD, EI, PD, Q, P, A](
@@ -86,12 +83,14 @@
     val servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]])
   extends BaseEngine[EI, Q, P, A] {
 
+  private[prediction]
   implicit lazy val formats = Utils.json4sDefaultFormats +
     new NameParamsSerializer
 
-  @transient lazy val logger = Logger[this.type]
+  @transient lazy protected val logger = Logger[this.type]
 
-  /**
+  /** This auxiliary constructor is provided for backward compatibility.
+    *
     * @param dataSourceClass Data source class.
     * @param preparatorClass Preparator class.
     * @param algorithmClassMap Map of algorithm names to classes.
@@ -108,7 +107,7 @@
       Map("" -> servingClass)
     )
 
-  /** Returns a new Engine instance. Mimic case class's copy method behavior.
+  /** Returns a new Engine instance, mimicking case class's copy method behavior.
     */
   def copy(
     dataSourceClassMap: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]
@@ -126,7 +125,13 @@
       servingClassMap)
   }
 
-  /** Return persistentable models from trained model. */
+  /** Training this engine would return a list of models.
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single training.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of models.
+    */
   def train(
       sc: SparkContext,
       engineParams: EngineParams,
@@ -163,8 +168,9 @@
 
   /** Algorithm models can be persisted before deploy. However, it is also
     * possible that models are not persisted. This method retrains non-persisted
-    * models and return a list of model that can be used directly in deploy.
+    * models and return a list of models that can be used directly in deploy.
     */
+  private[prediction]
   def prepareDeploy(
     sc: SparkContext,
     engineParams: EngineParams,
@@ -238,11 +244,11 @@
 
   /** Extract model for persistent layer.
     *
-    * PredictionIO presist models for future use.  It allows custom
+    * PredictionIO presist models for future use. It allows custom
     * implementation for persisting models. You need to implement the
-    * [[io.prediction.controller.IPersistentModel]] interface. This method
+    * [[io.prediction.controller.PersistentModel]] interface. This method
     * traverses all models in the workflow. If the model is a
-    * [[io.prediction.controller.IPersistentModel]], it calls the save method
+    * [[io.prediction.controller.PersistentModel]], it calls the save method
     * for custom persistence logic.
     *
     * For model doesn't support custom logic, PredictionIO serializes the whole
@@ -251,7 +257,7 @@
     * method return Unit, in which case PredictionIO will retrain the whole
     * model from scratch next time it is used.
     */
-  def makeSerializableModels(
+  private def makeSerializableModels(
     sc: SparkContext,
     engineInstanceId: String,
     // AlgoName, Algo, Model
@@ -271,6 +277,15 @@
     }
   }
 
+  /** This is implemented such that [[io.prediction.controller.Evaluation]] can
+    * use this method to generate inputs for [[io.prediction.controller.Metric]].
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single evaluation.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of evaluation information and RDD of query, predicted
+    *         result, and actual result tuple tuple.
+    */
   def eval(
     sc: SparkContext, 
     engineParams: EngineParams,
@@ -310,8 +325,7 @@
     Engine.eval(sc, dataSource, preparator, algorithms, serving)
   }
 
-  override
-  def jValueToEngineParams(variantJson: JValue): EngineParams = {
+  override def jValueToEngineParams(variantJson: JValue): EngineParams = {
     val engineLanguage = EngineLanguage.Scala
     // Extract EngineParams
     logger.info(s"Extracting datasource params...")
@@ -369,8 +383,8 @@
       servingParams = servingParams)
   }
 
-  def engineInstanceToEngineParams(engineInstance: EngineInstance)
-  : EngineParams = {
+  private[prediction]
+  def engineInstanceToEngineParams(engineInstance: EngineInstance): EngineParams = {
     implicit val formats = DefaultFormats
     val engineLanguage = EngineLanguage.Scala
 
@@ -436,18 +450,35 @@
   }
 }
 
+/** This object contains concrete implementation for some methods of the
+  * [[Engine]] class.
+  *
+  * @group Engine
+  */
 object Engine {
-  type EX = Int
-  type AX = Int
-  type QX = Long
+  private type EX = Int
+  private type AX = Int
+  private type QX = Long
 
-  @transient lazy val logger = Logger[this.type]
+  @transient lazy private val logger = Logger[this.type]
 
+  /** Helper class to accept either a single data source, or a map of data
+    * sources, with a companion object providing implicit conversions, so
+    * using this class directly is not necessary.
+    *
+    * @tparam TD Training data class
+    * @tparam EI Evaluation information class
+    * @tparam Q Input query class
+    * @tparam A Actual result class
+    */
   class DataSourceMap[TD, EI, Q, A](
     val m: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]) {
     def this(c: Class[_ <: BaseDataSource[TD, EI, Q, A]]) = this(Map("" -> c))
   }
 
+  /** Companion object providing implicit conversions, so using this directly
+    * is not necessary.
+    */
   object DataSourceMap {
     implicit def cToMap[TD, EI, Q, A](
       c: Class[_ <: BaseDataSource[TD, EI, Q, A]]):
@@ -457,11 +488,21 @@
       DataSourceMap[TD, EI, Q, A] = new DataSourceMap(m)
   }
 
+  /** Helper class to accept either a single preparator, or a map of
+    * preparators, with a companion object providing implicit conversions, so
+    * using this class directly is not necessary.
+    *
+    * @tparam TD Training data class
+    * @tparam PD Prepared data class
+    */
   class PreparatorMap[TD, PD](
     val m: Map[String, Class[_ <: BasePreparator[TD, PD]]]) {
     def this(c: Class[_ <: BasePreparator[TD, PD]]) = this(Map("" -> c))
   }
 
+  /** Companion object providing implicit conversions, so using this directly
+    * is not necessary.
+    */
   object PreparatorMap {
     implicit def cToMap[TD, PD](
       c: Class[_ <: BasePreparator[TD, PD]]):
@@ -471,11 +512,21 @@
       PreparatorMap[TD, PD] = new PreparatorMap(m)
   }
 
+  /** Helper class to accept either a single serving, or a map of serving, with
+    * a companion object providing implicit conversions, so using this class
+    * directly is not necessary.
+    *
+    * @tparam Q Input query class
+    * @tparam P Predicted result class
+    */
   class ServingMap[Q, P](
     val m: Map[String, Class[_ <: BaseServing[Q, P]]]) {
     def this(c: Class[_ <: BaseServing[Q, P]]) = this(Map("" -> c))
   }
 
+  /** Companion object providing implicit conversions, so using this directly
+    * is not necessary.
+    */
   object ServingMap {
     implicit def cToMap[Q, P](
       c: Class[_ <: BaseServing[Q, P]]): ServingMap[Q, P] =
@@ -485,6 +536,26 @@
         new ServingMap(m)
   }
 
+  /** Convenient method for returning an instance of [[Engine]].
+    *
+    * @param dataSourceMap Accepts either an instance of Class of the data
+    *                      source, or a Map of data source classes (implicitly
+    *                      converted to [[DataSourceMap]].
+    * @param preparatorMap Accepts either an instance of Class of the
+    *                      preparator, or a Map of preparator classes
+    *                      (implicitly converted to [[PreparatorMap]].
+    * @param algorithmClassMap Accepts a Map of algorithm classes.
+    * @param servingMap Accepts either an instance of Class of the serving, or
+    *                   a Map of serving classes (implicitly converted to
+    *                   [[ServingMap]].
+    * @tparam TD Training data class
+    * @tparam EI Evaluation information class
+    * @tparam PD Prepared data class
+    * @tparam Q Input query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    * @return An instance of [[Engine]]
+    */
   def apply[TD, EI, PD, Q, P, A](
     dataSourceMap: DataSourceMap[TD, EI, Q, A],
     preparatorMap: PreparatorMap[TD, PD],
@@ -496,6 +567,19 @@
       servingMap.m
     )
 
+  /** Provides concrete implementation of training for [[Engine]].
+    *
+    * @param sc An instance of SparkContext
+    * @param dataSource An instance of data source
+    * @param preparator An instance of preparator
+    * @param algorithmList A list of algorithm instances
+    * @param params An instance of [[WorkflowParams]] that controls the training
+    *               process.
+    * @tparam TD Training data class
+    * @tparam PD Prepared data class
+    * @tparam Q Input query class
+    * @return A list of trained models
+    */
   def train[TD, PD, Q](
       sc: SparkContext,
       dataSource: BaseDataSource[TD, _, Q, _],
@@ -585,6 +669,22 @@
     models
   }
 
+  /** Provides concrete implementation of evaluation for [[Engine]].
+    *
+    * @param sc An instance of SparkContext
+    * @param dataSource An instance of data source
+    * @param preparator An instance of preparator
+    * @param algorithmList A list of algorithm instances
+    * @param serving An instance of serving
+    * @tparam TD Training data class
+    * @tparam PD Prepared data class
+    * @tparam Q Input query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    * @tparam EI Evaluation information class
+    * @return A list of evaluation information, RDD of query, predicted result,
+    *         and actual result tuple tuple.
+    */
   def eval[TD, PD, Q, P, A, EI](
       sc: SparkContext,
       dataSource: BaseDataSource[TD, EI, Q, A],
@@ -672,54 +772,13 @@
   }
 }
 
-
-/** If you intend to let PredictionIO create workflow and deploy serving
-  * automatically, you will need to implement an object that extends this trait
-  * and return an [[Engine]].
-  *
-  * @group Engine
-  */
-trait IEngineFactory {
-  /** Creates an instance of an [[Engine]]. */
-  def apply(): BaseEngine[_, _, _, _]
-
-  /** Override this method to programmatically return engine parameters. */
-  def engineParams(key: String): EngineParams = EngineParams()
-}
-
-
-/** Defines an engine parameters generator.
-  *
-  * Implementations of this trait can be supplied to "pio eval" as the second
-  * argument.
-  *
-  * @group Evaluation
-  */
-trait EngineParamsGenerator {
-  protected[this] var epList: Seq[EngineParams] = _
-  protected[this] var epListSet: Boolean = false
-
-  /** Returns the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
-  def engineParamsList: Seq[EngineParams] = {
-    assert(epListSet, "EngineParamsList not set")
-    epList
-  }
-
-  /** Sets the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
-  def engineParamsList_=(l: Seq[EngineParams]) {
-    assert(!epListSet, "EngineParamsList can bet set at most once")
-    epList = Seq(l:_*)
-    epListSet = true
-  }
-}
-
-
 /** Mix in this trait for queries that contain prId (PredictedResultId).
   * This is useful when your engine expects queries to also be associated with
   * prId keys when feedback loop is enabled.
   *
-  * @group General
+  * @group Helper
   */
+@deprecated("To be removed in future releases.", "0.9.2")
 trait WithPrId {
   val prId: String = ""
 }
diff --git a/core/src/main/scala/io/prediction/controller/EngineFactory.scala b/core/src/main/scala/io/prediction/controller/EngineFactory.scala
new file mode 100644
index 0000000..5038b2c
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/EngineFactory.scala
@@ -0,0 +1,41 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseEngine
+
+import scala.language.implicitConversions
+
+/** If you intend to let PredictionIO create workflow and deploy serving
+  * automatically, you will need to implement an object that extends this trait
+  * and return an [[Engine]].
+  *
+  * @group Engine
+  */
+trait EngineFactory {
+  /** Creates an instance of an [[Engine]]. */
+  def apply(): BaseEngine[_, _, _, _]
+
+  /** Override this method to programmatically return engine parameters. */
+  def engineParams(key: String): EngineParams = EngineParams()
+}
+
+/** DEPRECATED. Use [[EngineFactory]] instead.
+  *
+  * @group Engine
+  */
+@deprecated("Use EngineFactory instead.", "0.9.2")
+trait IEngineFactory extends EngineFactory
diff --git a/core/src/main/scala/io/prediction/controller/EngineParams.scala b/core/src/main/scala/io/prediction/controller/EngineParams.scala
index 9040ce3..af04652 100644
--- a/core/src/main/scala/io/prediction/controller/EngineParams.scala
+++ b/core/src/main/scala/io/prediction/controller/EngineParams.scala
@@ -16,35 +16,10 @@
 package io.prediction.controller
 
 import io.prediction.core.BaseDataSource
-import io.prediction.core.BasePreparator
 import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseServing
-import io.prediction.core.Doer
-import io.prediction.core.BaseEngine
-import io.prediction.workflow.CreateWorkflow
-import io.prediction.workflow.WorkflowUtils
-import io.prediction.workflow.EngineLanguage
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.SparkWorkflowUtils
-import io.prediction.workflow.StopAfterReadInterruption
-import io.prediction.workflow.StopAfterPrepareInterruption
-import io.prediction.data.storage.EngineInstance
-import _root_.java.util.NoSuchElementException
-import io.prediction.data.storage.StorageClientException
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
 
 import scala.language.implicitConversions
 
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-
-import io.prediction.workflow.NameParamsSerializer
-import grizzled.slf4j.Logger
-
 /** This class serves as a logical grouping of all required engine's parameters.
   *
   * @param dataSourceParams Data Source name-parameters tuple.
diff --git a/core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala b/core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala
new file mode 100644
index 0000000..a9bf3eb
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala
@@ -0,0 +1,43 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import scala.language.implicitConversions
+
+/** Defines an engine parameters generator.
+  *
+  * Implementations of this trait can be supplied to "pio eval" as the second
+  * command line argument.
+  *
+  * @group Evaluation
+  */
+trait EngineParamsGenerator {
+  protected[this] var epList: Seq[EngineParams] = _
+  protected[this] var epListSet: Boolean = false
+
+  /** Returns the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
+  def engineParamsList: Seq[EngineParams] = {
+    assert(epListSet, "EngineParamsList not set")
+    epList
+  }
+
+  /** Sets the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
+  def engineParamsList_=(l: Seq[EngineParams]) {
+    assert(!epListSet, "EngineParamsList can bet set at most once")
+    epList = Seq(l:_*)
+    epListSet = true
+  }
+}
diff --git a/core/src/main/scala/io/prediction/controller/Evaluation.scala b/core/src/main/scala/io/prediction/controller/Evaluation.scala
index 41e1f41..51ac005 100644
--- a/core/src/main/scala/io/prediction/controller/Evaluation.scala
+++ b/core/src/main/scala/io/prediction/controller/Evaluation.scala
@@ -15,40 +15,13 @@
 
 package io.prediction.controller
 
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BasePreparator
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseServing
 import io.prediction.core.BaseEvaluator
 import io.prediction.core.BaseEvaluatorResult
-import io.prediction.core.Doer
 import io.prediction.core.BaseEngine
 // import io.prediction.workflow.EngineWorkflow
-import io.prediction.workflow.CreateWorkflow
-import io.prediction.workflow.WorkflowUtils
-import io.prediction.workflow.EngineLanguage
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.SparkWorkflowUtils
-import io.prediction.workflow.StopAfterReadInterruption
-import io.prediction.workflow.StopAfterPrepareInterruption
-import io.prediction.data.storage.EngineInstance
-import _root_.java.util.NoSuchElementException
-import io.prediction.data.storage.StorageClientException
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
 
 import scala.language.implicitConversions
 
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.writePretty
-
-import io.prediction.workflow.NameParamsSerializer
-import grizzled.slf4j.Logger
-
 /** Defines an evaluation that contains an engine and a metric.
   *
   * Implementations of this trait can be supplied to "pio eval" as the first
diff --git a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala b/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
index 4928393..831f438 100644
--- a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
+++ b/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
@@ -20,31 +20,17 @@
 import io.prediction.core.BaseAlgorithm
 import io.prediction.core.BaseServing
 import io.prediction.core.Doer
-import io.prediction.core.BaseEngine
-import io.prediction.workflow.CreateWorkflow
-import io.prediction.workflow.WorkflowUtils
-import io.prediction.workflow.EngineLanguage
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.SparkWorkflowUtils
-import io.prediction.workflow.StopAfterReadInterruption
-import io.prediction.workflow.StopAfterPrepareInterruption
-import io.prediction.data.storage.EngineInstance
 import io.prediction.annotation.Experimental
-import _root_.java.util.NoSuchElementException
-import io.prediction.data.storage.StorageClientException
 
+import grizzled.slf4j.Logger
+import io.prediction.workflow.WorkflowParams
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
 import scala.language.implicitConversions
 
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-
-import io.prediction.workflow.NameParamsSerializer
-import grizzled.slf4j.Logger
+import _root_.java.util.NoSuchElementException
 
 import scala.collection.mutable.{ HashMap => MutableHashMap }
 
@@ -63,27 +49,27 @@
   }
 
   case class PreparatorPrefix(
-    val dataSourceParams: (String, Params),
-    val preparatorParams: (String, Params)) {
+    dataSourceParams: (String, Params),
+    preparatorParams: (String, Params)) {
     def this(ap: AlgorithmsPrefix) = {
       this(ap.dataSourceParams, ap.preparatorParams)
     }
   }
   
   case class AlgorithmsPrefix(
-    val dataSourceParams: (String, Params),
-    val preparatorParams: (String, Params),
-    val algorithmParamsList: Seq[(String, Params)]) {
+    dataSourceParams: (String, Params),
+    preparatorParams: (String, Params),
+    algorithmParamsList: Seq[(String, Params)]) {
     def this(sp: ServingPrefix) = {
       this(sp.dataSourceParams, sp.preparatorParams, sp.algorithmParamsList)
     }
   }
 
   case class ServingPrefix(
-    val dataSourceParams: (String, Params),
-    val preparatorParams: (String, Params),
-    val algorithmParamsList: Seq[(String, Params)],
-    val servingParams: (String, Params)) {
+    dataSourceParams: (String, Params),
+    preparatorParams: (String, Params),
+    algorithmParamsList: Seq[(String, Params)],
+    servingParams: (String, Params)) {
     def this(ep: EngineParams) = this(
       ep.dataSourceParams,
       ep.preparatorParams,
diff --git a/core/src/main/scala/io/prediction/controller/IPersistentModel.scala b/core/src/main/scala/io/prediction/controller/IPersistentModel.scala
deleted file mode 100644
index b101529..0000000
--- a/core/src/main/scala/io/prediction/controller/IPersistentModel.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import org.apache.spark.SparkContext
-
-/** Mix in and implement this trait if your model cannot be persisted by
-  * PredictionIO automatically. A companion object extending
-  * IPersistentModelLoader is required for PredictionIO to load the persisted
-  * model automatically during deployment.
-  *
-  * {{{
-  * class MyModel extends IPersistentModel[MyParams] {
-  *   def save(id: String, params: MyParams, sc: SparkContext): Boolean = {
-  *     ...
-  *   }
-  * }
-  *
-  * object MyModel extends IPersistentModelLoader[MyParams, MyModel] {
-  *   def apply(id: String, params: MyParams, sc: Option[SparkContext]): MyModel = {
-  *     ...
-  *   }
-  * }
-  * }}}
-  *
-  * In Java, all you need to do is to implement this interface, and add a static
-  * method with 3 arguments of type String, [[Params]], and SparkContext.
-  *
-  * {{{
-  * public class MyModel implements IPersistentModel<MyParams>, Serializable {
-  *   ...
-  *   public boolean save(String id, MyParams params, SparkContext sc) {
-  *     ...
-  *   }
-  *
-  *   public static MyModel load(String id, Params params, SparkContext sc) {
-  *     ...
-  *   }
-  *   ...
-  * }
-  * }}}
-  *
-  * @tparam AP Algorithm parameters class.
-  * @see [[IPersistentModelLoader]]
-  * @group Algorithm
-  */
-trait IPersistentModel[AP <: Params] {
-  /** Save the model to some persistent storage.
-    *
-    * This method should return true if the model has been saved successfully so
-    * that PredictionIO knows that it can be restored later during deployment.
-    * This method should return false if the model cannot be saved (or should
-    * not be saved due to configuration) so that PredictionIO will re-train the
-    * model during deployment. All arguments of this method are provided by
-    * automatically by PredictionIO.
-    *
-    * @param id ID of the run that trained this model.
-    * @param params Algorithm parameters that were used to train this model.
-    * @param sc An Apache Spark context.
-    */
-  def save(id: String, params: AP, sc: SparkContext): Boolean
-}
-
-/** Implement an object that extends this trait for PredictionIO to support
-  * loading a persisted model during serving deployment.
-  *
-  * @tparam AP Algorithm parameters class.
-  * @tparam M Model class.
-  * @see [[IPersistentModel]]
-  * @group Algorithm
-  */
-trait IPersistentModelLoader[AP <: Params, M] {
-  /** Implement this method to restore a persisted model that extends the
-    * [[IPersistentModel]] trait. All arguments of this method are provided
-    * automatically by PredictionIO.
-    *
-    * @param id ID of the run that trained this model.
-    * @param params Algorithm parameters that were used to train this model.
-    * @param sc An optional Apache Spark context. This will be injected if the
-    *           model was generated by a [[PAlgorithm]].
-    */
-  def apply(id: String, params: AP, sc: Option[SparkContext]): M
-}
-
-/** Mix in this trait if your model cannot be persisted to PredictionIO's
-  * metadata store for any reason and want to have it persisted to local
-  * filesystem instead. These traits contain concrete implementation and need
-  * not be implemented.
-  *
-  * {{{
-  * class MyModel extends IFSPersistentModel[MyParams] {
-  *   ...
-  * }
-  *
-  * object MyModel extends IFSPersistentModelLoader[MyParams, MyModel] {
-  *   ...
-  * }
-  * }}}
-  *
-  * @tparam AP Algorithm parameters class.
-  * @see [[IFSPersistentModelLoader]]
-  * @group Algorithm
-  */
-trait IFSPersistentModel[AP <: Params] extends IPersistentModel[AP] {
-  def save(id: String, params: AP, sc: SparkContext): Boolean = {
-    Utils.save(id, this)
-    true
-  }
-}
-
-/** Implement an object that extends this trait for PredictionIO to support
-  * loading a persisted model from local filesystem during serving deployment.
-  *
-  * @tparam AP Algorithm parameters class.
-  * @tparam M Model class.
-  * @see [[IFSPersistentModel]]
-  * @group Algorithm
-  */
-trait IFSPersistentModelLoader[AP <: Params, M]
-  extends IPersistentModelLoader[AP, M] {
-  def apply(id: String, params: AP, sc: Option[SparkContext]): M = {
-    Utils.load(id).asInstanceOf[M]
-  }
-}
diff --git a/core/src/main/scala/io/prediction/controller/IdentityPreparator.scala b/core/src/main/scala/io/prediction/controller/IdentityPreparator.scala
new file mode 100644
index 0000000..2ad9e39
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/IdentityPreparator.scala
@@ -0,0 +1,93 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseDataSource
+import io.prediction.core.BasePreparator
+import org.apache.spark.SparkContext
+
+import scala.reflect._
+
+/** A helper concrete implementation of [[io.prediction.core.BasePreparator]]
+  * that passes training data through without any special preparation. This can
+  * be used in place for both [[PPreparator]] and [[LPreparator]].
+  *
+  * @tparam TD Training data class.
+  * @group Preparator
+  */
+class IdentityPreparator[TD] extends BasePreparator[TD, TD] {
+  private[prediction]
+  def prepareBase(sc: SparkContext, td: TD): TD = td
+}
+
+/** Companion object of [[IdentityPreparator]] that conveniently returns an
+  * instance of the class of [[IdentityPreparator]] for use with
+  * [[EngineFactory]].
+  *
+  * @group Preparator
+  */
+object IdentityPreparator {
+  /** Produces an instance of the class of [[IdentityPreparator]].
+    *
+    * @param ds Instance of the class of the data source for this preparator.
+    */
+  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
+    classOf[IdentityPreparator[TD]]
+}
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @tparam TD Training data class.
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+class PIdentityPreparator[TD] extends IdentityPreparator[TD]
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+object PIdentityPreparator {
+  /** Produces an instance of the class of [[IdentityPreparator]].
+    *
+    * @param ds Instance of the class of the data source for this preparator.
+    */
+  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
+    classOf[IdentityPreparator[TD]]
+}
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @tparam TD Training data class.
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+class LIdentityPreparator[TD] extends IdentityPreparator[TD]
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+object LIdentityPreparator {
+  /** Produces an instance of the class of [[IdentityPreparator]].
+    *
+    * @param ds Instance of the class of the data source for this preparator.
+    */
+  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
+    classOf[IdentityPreparator[TD]]
+}
diff --git a/core/src/main/scala/io/prediction/controller/LAlgorithm.scala b/core/src/main/scala/io/prediction/controller/LAlgorithm.scala
new file mode 100644
index 0000000..7481a85
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/LAlgorithm.scala
@@ -0,0 +1,112 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseAlgorithm
+import io.prediction.workflow.PersistentModelManifest
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a local algorithm.
+  *
+  * A local algorithm runs locally within a single machine and produces a model
+  * that can fit within a single machine.
+  *
+  * If your input query class requires custom JSON4S serialization, the most
+  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
+  * and mix that into your algorithm class, instead of overriding
+  * [[querySerializer]] directly.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class LAlgorithm[PD, M : ClassTag, Q : Manifest, P]
+  extends BaseAlgorithm[RDD[PD], RDD[M], Q, P] {
+
+  /** Do not use directly or override this method, as this is called by
+    * PredictionIO workflow to train a model.
+    */
+  private[prediction]
+  def trainBase(sc: SparkContext, pd: RDD[PD]): RDD[M] = pd.map(train)
+
+  /** Implement this method to produce a model from prepared data.
+    *
+    * @param pd Prepared data for model training.
+    * @return Trained model.
+    */
+  def train(pd: PD): M
+
+  private[prediction]
+  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
+  : RDD[(Long, P)] = {
+    val mRDD = bm.asInstanceOf[RDD[M]]
+    batchPredict(mRDD, qs)
+  }
+
+  private[prediction]
+  def batchPredict(mRDD: RDD[M], qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
+    val glomQs: RDD[Array[(Long, Q)]] = qs.glom()
+    val cartesian: RDD[(M, Array[(Long, Q)])] = mRDD.cartesian(glomQs)
+    cartesian.flatMap { case (m, qArray) => {
+      qArray.map { case (qx, q) => (qx, predict(m, q)) }
+    }}
+  }
+
+  private[prediction]
+  def predictBase(localBaseModel: Any, q: Q): P = {
+    predict(localBaseModel.asInstanceOf[M], q)
+  }
+
+  /** Implement this method to produce a prediction from a query and trained
+    * model.
+    *
+    * @param m Trained model produced by [[train]].
+    * @param q An input query.
+    * @return A prediction.
+    */
+  def predict(m: M, q: Q): P
+
+  private[prediction]
+  override
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = {
+    // LAlgo has local model. By default, the model is serialized into our
+    // storage automatically. User can override this by implementing the
+    // IPersistentModel trait, then we call the save method, upon successful, we
+    // return the Manifest, otherwise, Unit. 
+
+    // Check RDD[M].count == 1
+    val m = bm.asInstanceOf[RDD[M]].first
+    if (m.isInstanceOf[PersistentModel[_]]) {
+      if (m.asInstanceOf[PersistentModel[Params]].save(
+        modelId, algoParams, sc)) {
+        PersistentModelManifest(className = m.getClass.getName)
+      } else {
+        Unit
+      }
+    } else {
+      m
+    }
+  }
+}
diff --git a/core/src/main/scala/io/prediction/controller/LAverageServing.scala b/core/src/main/scala/io/prediction/controller/LAverageServing.scala
new file mode 100644
index 0000000..80981ab
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/LAverageServing.scala
@@ -0,0 +1,41 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseAlgorithm
+
+/** A concrete implementation of [[LServing]] returning the average of all
+  * algorithms' predictions, where their classes are expected to be all Double.
+  *
+  * @group Serving
+  */
+class LAverageServing[Q] extends LServing[Q, Double] {
+  /** Returns the average of all algorithms' predictions. */
+  def serve(query: Q, predictions: Seq[Double]): Double = {
+    predictions.sum / predictions.length
+  }
+}
+
+/** A concrete implementation of [[LServing]] returning the average of all
+  * algorithms' predictions, where their classes are expected to be all Double.
+  *
+  * @group Serving
+  */
+object LAverageServing {
+  /** Returns an instance of [[LAverageServing]]. */
+  def apply[Q](a: Class[_ <: BaseAlgorithm[_, _, Q, _]]): Class[LAverageServing[Q]] =
+    classOf[LAverageServing[Q]]
+}
diff --git a/core/src/main/scala/io/prediction/controller/LDataSource.scala b/core/src/main/scala/io/prediction/controller/LDataSource.scala
new file mode 100644
index 0000000..703bb20
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/LDataSource.scala
@@ -0,0 +1,70 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseDataSource
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a local data source.
+  *
+  * A local data source runs locally within a single machine and return data
+  * that can fit within a single machine.
+  *
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation Info class.
+  * @tparam Q Input query class.
+  * @tparam A Actual value class.
+  * @group Data Source
+  */
+abstract class LDataSource[TD : ClassTag, EI : ClassTag, Q, A]
+  extends BaseDataSource[RDD[TD], EI, Q, A] {
+
+  private[prediction]
+  def readTrainingBase(sc: SparkContext): RDD[TD] = {
+    sc.parallelize(Seq(None)).map(_ => readTraining())
+  }
+
+  /** Implement this method to only return training data from a data source.
+    */
+  def readTraining(): TD
+
+  private[prediction]
+  def readEvalBase(sc: SparkContext): Seq[(RDD[TD], EI, RDD[(Q, A)])] = {
+    val localEvalData: Seq[(TD, EI, Seq[(Q, A)])] = readEval()
+
+    localEvalData.map { case (td, ei, qaSeq) => {
+      val tdRDD = sc.parallelize(Seq(None)).map(_ => td)
+      val qaRDD = sc.parallelize(qaSeq)
+      (tdRDD, ei, qaRDD)
+    }}
+  }
+
+  /** To provide evaluation feature for your engine, your must override this
+    * method to return data for evaluation from a data source. Returned data can
+    * optionally include a sequence of query and actual value pairs for
+    * evaluation purpose.
+    *
+    * The default implementation returns an empty sequence as a stub, so that
+    * an engine can be compiled without implementing evaluation.
+    */
+  def readEval(): Seq[(TD, EI, Seq[(Q, A)])] = Seq[(TD, EI, Seq[(Q, A)])]()
+
+  @deprecated("Use readEval() instead.", "0.9.0")
+  def read(): Seq[(TD, EI, Seq[(Q, A)])] = readEval()
+}
diff --git a/core/src/main/scala/io/prediction/controller/LFirstServing.scala b/core/src/main/scala/io/prediction/controller/LFirstServing.scala
new file mode 100644
index 0000000..970815e
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/LFirstServing.scala
@@ -0,0 +1,39 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseAlgorithm
+
+/** A concrete implementation of [[LServing]] returning the first algorithm's
+  * prediction result directly without any modification.
+  *
+  * @group Serving
+  */
+class LFirstServing[Q, P] extends LServing[Q, P] {
+  /** Returns the first algorithm's prediction. */
+  def serve(query: Q, predictions: Seq[P]): P = predictions.head
+}
+
+/** A concrete implementation of [[LServing]] returning the first algorithm's
+  * prediction result directly without any modification.
+  *
+  * @group Serving
+  */
+object LFirstServing {
+  /** Returns an instance of [[LFirstServing]]. */
+  def apply[Q, P](a: Class[_ <: BaseAlgorithm[_, _, Q, P]]): Class[LFirstServing[Q, P]] =
+    classOf[LFirstServing[Q, P]]
+}
diff --git a/core/src/main/scala/io/prediction/controller/LPreparator.scala b/core/src/main/scala/io/prediction/controller/LPreparator.scala
new file mode 100644
index 0000000..926999f
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/LPreparator.scala
@@ -0,0 +1,47 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BasePreparator
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a local preparator.
+  *
+  * A local preparator runs locally within a single machine and produces
+  * prepared data that can fit within a single machine.
+  *
+  * @tparam TD Training data class.
+  * @tparam PD Prepared data class.
+  * @group Preparator
+  */
+abstract class LPreparator[TD, PD : ClassTag]
+  extends BasePreparator[RDD[TD], RDD[PD]] {
+
+  private[prediction]
+  def prepareBase(sc: SparkContext, rddTd: RDD[TD]): RDD[PD] = {
+    rddTd.map(prepare)
+  }
+
+  /** Implement this method to produce prepared data that is ready for model
+    * training.
+    *
+    * @param trainingData Training data to be prepared.
+    */
+  def prepare(trainingData: TD): PD
+}
diff --git a/core/src/main/scala/io/prediction/controller/LServing.scala b/core/src/main/scala/io/prediction/controller/LServing.scala
new file mode 100644
index 0000000..6496b99
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/LServing.scala
@@ -0,0 +1,39 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseServing
+
+/** Base class of serving. 
+  *
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Serving
+  */
+abstract class LServing[Q, P] extends BaseServing[Q, P] {
+  private[prediction]
+  def serveBase(q: Q, ps: Seq[P]): P = {
+    serve(q, ps)
+  }
+
+  /** Implement this method to combine multiple algorithms' predictions to
+    * produce a single final prediction.
+    *
+    * @param query Input query.
+    * @param predictions A list of algorithms' predictions.
+    */
+  def serve(query: Q, predictions: Seq[P]): P
+}
diff --git a/core/src/main/scala/io/prediction/controller/LocalFileSystemPersistentModel.scala b/core/src/main/scala/io/prediction/controller/LocalFileSystemPersistentModel.scala
new file mode 100644
index 0000000..e9f0592
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/LocalFileSystemPersistentModel.scala
@@ -0,0 +1,74 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import org.apache.spark.SparkContext
+
+/** This trait is a convenience helper for persisting your model to the local
+  * filesystem. This trait and [[LocalFileSystemPersistentModelLoader]] contain
+  * concrete implementation and need not be implemented.
+  *
+  * The underlying implementation is [[Utils.save]].
+  *
+  * {{{
+  * class MyModel extends LocalFileSystemPersistentModel[MyParams] {
+  *   ...
+  * }
+  *
+  * object MyModel extends LocalFileSystemPersistentModelLoader[MyParams, MyModel] {
+  *   ...
+  * }
+  * }}}
+  *
+  * @tparam AP Algorithm parameters class.
+  * @see [[LocalFileSystemPersistentModelLoader]]
+  * @group Algorithm
+  */
+trait LocalFileSystemPersistentModel[AP <: Params] extends PersistentModel[AP] {
+  def save(id: String, params: AP, sc: SparkContext): Boolean = {
+    Utils.save(id, this)
+    true
+  }
+}
+
+/** Implement an object that extends this trait for PredictionIO to support
+  * loading a persisted model from local filesystem during serving deployment.
+  *
+  * The underlying implementation is [[Utils.load]].
+  *
+  * @tparam AP Algorithm parameters class.
+  * @tparam M Model class.
+  * @see [[LocalFileSystemPersistentModel]]
+  * @group Algorithm
+  */
+trait LocalFileSystemPersistentModelLoader[AP <: Params, M]
+  extends PersistentModelLoader[AP, M] {
+  def apply(id: String, params: AP, sc: Option[SparkContext]): M = {
+    Utils.load(id).asInstanceOf[M]
+  }
+}
+
+/** DEPRECATED. Use [[LocalFileSystemPersistentModel]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use LocalFileSystemPersistentModel instead.", "0.9.2")
+trait IFSPersistentModel[AP <: Params] extends LocalFileSystemPersistentModel[AP]
+
+/** DEPRECATED. Use [[LocalFileSystemPersistentModelLoader]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use LocalFileSystemPersistentModelLoader instead.", "0.9.2")
+trait IFSPersistentModelLoader[AP <: Params, M] extends LocalFileSystemPersistentModelLoader[AP, M]
diff --git a/core/src/main/scala/io/prediction/controller/Metric.scala b/core/src/main/scala/io/prediction/controller/Metric.scala
index 97c8e6e..3e84e1c 100644
--- a/core/src/main/scala/io/prediction/controller/Metric.scala
+++ b/core/src/main/scala/io/prediction/controller/Metric.scala
@@ -20,8 +20,9 @@
 import org.apache.spark.rdd.RDD
 
 import scala.reflect._
-import scala.reflect.runtime.universe._
-import grizzled.slf4j.Logger
+
+import Numeric.Implicits._   
+import org.apache.spark.util.StatCounter
 
 /** Base class of a [[Metric]].
   *
@@ -35,7 +36,7 @@
 abstract class Metric[EI, Q, P, A, R](implicit rOrder: Ordering[R])
 extends Serializable {
   /** Class name of this [[Metric]]. */
-  def header: String = this.getClass.getName
+  def header: String = this.getClass.getSimpleName
 
   /** Calculates the result of this [[Metric]]. */
   def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])]): R
@@ -44,6 +45,36 @@
   def compare(r0: R, r1: R): Int = rOrder.compare(r0, r1)
 }
 
+private [prediction] trait StatsMetricHelper[EI, Q, P, A] {
+  def calculate(q: Q, p: P, a: A): Double
+
+  def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : StatCounter = {
+    val doubleRDD = sc.union(
+      evalDataSet.map { case (_, qpaRDD) => 
+        qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
+      }
+    )
+   
+    doubleRDD.stats()
+  }
+}
+
+private [prediction] trait StatsOptionMetricHelper[EI, Q, P, A] {
+  def calculate(q: Q, p: P, a: A): Option[Double]
+
+  def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : StatCounter = {
+    val doubleRDD = sc.union(
+      evalDataSet.map { case (_, qpaRDD) => 
+        qpaRDD.flatMap { case (q, p, a) => calculate(q, p, a) }
+      }
+    )
+   
+    doubleRDD.stats()
+  }
+}
+
 /** Returns the global average of the score returned by the calculate method.
   *
   * @tparam EI Evaluation information
@@ -54,7 +85,9 @@
   * @group Evaluation
   */
 abstract class AverageMetric[EI, Q, P, A]
-extends Metric[EI, Q, P, A, Double] {
+    extends Metric[EI, Q, P, A, Double]
+    with StatsMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Double] {
   /** Implement this method to return a score that will be used for averaging
     * across all QPA tuples.
     */
@@ -62,45 +95,7 @@
 
   def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : Double = {
-    // TODO(yipjustin): Parallelize
-    val r: Seq[(Double, Long)] = evalDataSet
-    .map { case (_, qpaRDD) =>
-      val s = qpaRDD.map { case (q, p, a) => calculate(q, p, a) }.reduce(_ + _)
-      val c = qpaRDD.count
-      (s, c)
-    }
-
-    (r.map(_._1).sum / r.map(_._2).sum)
-  }
-}
-
-/** Returns the sum of the score returned by the calculate method.
-  *
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  *
-  * @group Evaluation
-  */
-abstract class SumMetric[EI, Q, P, A]
-extends Metric[EI, Q, P, A, Double] {
-  /** Implement this method to return a score that will be used for summing
-    * across all QPA tuples.
-    */
-  def calculate(q: Q, p: P, a: A): Double
-
-  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : Double = {
-    // TODO(yipjustin): Parallelize
-    val r: Seq[Double] = evalDataSet
-    .map { case (_, qpaRDD) => 
-      qpaRDD
-      .map { case (q, p, a) => calculate(q, p, a) }
-      .aggregate[Double](0.0)(_ + _, _ + _) 
-    }
-
-    r.sum
+    calculateStats(sc, evalDataSet).mean
   }
 }
 
@@ -115,7 +110,9 @@
   * @group Evaluation
   */
 abstract class OptionAverageMetric[EI, Q, P, A]
-extends Metric[EI, Q, P, A, Double] {
+    extends Metric[EI, Q, P, A, Double] 
+    with StatsOptionMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Option[Double]] {
   /** Implement this method to return a score that will be used for averaging
     * across all QPA tuples.
     */
@@ -123,26 +120,99 @@
 
   def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : Double = {
-    val r: Seq[(Double, Long)] = evalDataSet
-    .par
-    .map { case (_, qpaRDD) =>
-      val scores: RDD[Double]  = qpaRDD
-      .map { case (q, p, a) => calculate(q, p, a) }
-      .filter(!_.isEmpty)
-      .map(_.get)
-
-      scores.aggregate((0.0, 0L))( 
-        (u, v) => (u._1 + v, u._2 + 1),
-        (u, v) => (u._1 + v._1, u._2 + v._2))
-    }
-    .seq
-
-    val c = r.map(_._2).sum
-    if (c > 0) {
-      (r.map(_._1).sum / r.map(_._2).sum)
-    } else {
-      Double.NegativeInfinity
-    }
+    calculateStats(sc, evalDataSet).mean
   }
 }
 
+/** Returns the global stdev of the score returned by the calculate method.
+  *
+  * This method uses [[org.apache.spark.util.StatCounter]] library, a one pass
+  * method is used for calculation.
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  *
+  * @group Evaluation
+  */
+abstract class StdevMetric[EI, Q, P, A]
+    extends Metric[EI, Q, P, A, Double]
+    with StatsMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Double] {
+  /** Implement this method to return a score that will be used for calculating
+    * the stdev
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): Double
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : Double = {
+    calculateStats(sc, evalDataSet).stdev
+  }
+}
+
+/** Returns the global stdev of the non-None score returned by the calculate method.
+  *
+  * This method uses [[org.apache.spark.util.StatCounter]] library, a one pass
+  * method is used for calculation.
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  *
+  * @group Evaluation
+  */
+abstract class OptionStdevMetric[EI, Q, P, A]
+    extends Metric[EI, Q, P, A, Double]
+    with StatsOptionMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Option[Double]] {
+  /** Implement this method to return a score that will be used for calculating
+    * the stdev
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): Option[Double]
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : Double = {
+    calculateStats(sc, evalDataSet).stdev
+  }
+}
+
+/** Returns the sum of the score returned by the calculate method. 
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  * @tparam R Result, output of the function calculate, must be Numeric
+  *
+  * @group Evaluation
+  */
+abstract class SumMetric[EI, Q, P, A, R: ClassTag](implicit num: Numeric[R])
+    extends Metric[EI, Q, P, A, R]()(num)
+    with QPAMetric[Q, P, A, R] {
+  /** Implement this method to return a score that will be used for summing
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): R
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : R = {
+    val union: RDD[R] = sc.union(
+      evalDataSet.map { case (_, qpaRDD) => 
+        qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
+      }
+    )
+
+    union.aggregate[R](num.zero)(_ + _, _ + _)
+  }
+}
+
+
+/** Trait for metric which returns a score based on Query, PredictedResult, and ActualResult.
+  */
+trait QPAMetric[Q, P, A, R] {
+  def calculate(q: Q, p: P, a: A): R
+}
diff --git a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala b/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
index 813f28b..cc9af35 100644
--- a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
+++ b/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
@@ -15,55 +15,36 @@
 
 package io.prediction.controller
 
-import com.github.nscala_time.time.Imports.DateTime
-import com.twitter.chill.KryoInjection
-import grizzled.slf4j.{ Logger, Logging }
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BaseEngine
 import io.prediction.core.BaseEvaluator
 import io.prediction.core.BaseEvaluatorResult
-import io.prediction.core.BasePreparator
-import io.prediction.core.BaseServing
-import io.prediction.core.Doer
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EngineInstances
-import io.prediction.data.storage.Model
 import io.prediction.data.storage.Storage
 import io.prediction.workflow.NameParamsSerializer
-import _root_.java.io.FileInputStream
-import _root_.java.io.FileOutputStream
-import _root_.java.io.ObjectInputStream
-import _root_.java.io.ObjectOutputStream
-import _root_.java.io.PrintWriter
-import _root_.java.io.File
-import _root_.java.lang.{ Iterable => JIterable }
-import _root_.java.util.{ HashMap => JHashMap, Map => JMap }
-import org.apache.spark.SparkConf
+
+import com.github.nscala_time.time.Imports.DateTime
+import grizzled.slf4j.Logger
+import io.prediction.workflow.WorkflowParams
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
-import org.json4s._
 import org.json4s.native.Serialization.write
 import org.json4s.native.Serialization.writePretty
-import scala.collection.JavaConversions._
+
 import scala.language.existentials
-import scala.reflect.ClassTag
-import scala.reflect.Manifest
-import scala.collection.mutable.StringBuilder
+
+import _root_.java.io.PrintWriter
+import _root_.java.io.File
 
 case class MetricScores[R](
-  val score: R, 
-  val otherScores: Seq[Any])
+  score: R,
+  otherScores: Seq[Any])
 
 case class MetricEvaluatorResult[R](
-  val bestScore: MetricScores[R],
-  val bestEngineParams: EngineParams,
-  val bestIdx: Int,
-  val metricHeader: String,
-  val otherMetricHeaders: Seq[String],
-  val engineParamsScores: Seq[(EngineParams, MetricScores[R])],
-  val outputPath: Option[String])
+  bestScore: MetricScores[R],
+  bestEngineParams: EngineParams,
+  bestIdx: Int,
+  metricHeader: String,
+  otherMetricHeaders: Seq[String],
+  engineParamsScores: Seq[(EngineParams, MetricScores[R])],
+  outputPath: Option[String])
 extends BaseEvaluatorResult {
 
   override def toOneLiner(): String = {
@@ -187,7 +168,7 @@
 
     implicit lazy val formats = Utils.json4sDefaultFormats
 
-    logger.info("Writing best variant params to disk ($outputPath)...")
+    logger.info(s"Writing best variant params to disk ($outputPath)...")
     val writer = new PrintWriter(new File(outputPath))
     writer.write(writePretty(variant))
     writer.close
@@ -209,7 +190,7 @@
       (engineParams, metricScores)
     }
     .seq
-
+    
     implicit lazy val formats = Utils.json4sDefaultFormats +
       new NameParamsSerializer
 
@@ -227,7 +208,7 @@
     }
 
     // save engine params if it is set.
-    outputPath.map { path => saveEngineJson(evaluation, bestEngineParams, path) }
+    outputPath.foreach { path => saveEngineJson(evaluation, bestEngineParams, path) }
 
     MetricEvaluatorResult(
       bestScore = bestScore,
diff --git a/core/src/main/scala/io/prediction/controller/P2LAlgorithm.scala b/core/src/main/scala/io/prediction/controller/P2LAlgorithm.scala
new file mode 100644
index 0000000..1b4b024
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/P2LAlgorithm.scala
@@ -0,0 +1,103 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseAlgorithm
+import io.prediction.workflow.PersistentModelManifest
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a parallel-to-local algorithm.
+  *
+  * A parallel-to-local algorithm can be run in parallel on a cluster and
+  * produces a model that can fit within a single machine.
+  *
+  * If your input query class requires custom JSON4S serialization, the most
+  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
+  * and mix that into your algorithm class, instead of overriding
+  * [[querySerializer]] directly.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class P2LAlgorithm[PD, M : ClassTag, Q : Manifest, P]
+  extends BaseAlgorithm[PD, M, Q, P] {
+
+  /** Do not use directly or override this method, as this is called by
+    * PredictionIO workflow to train a model.
+    */
+  private[prediction]
+  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
+
+  /** Implement this method to produce a model from prepared data.
+    *
+    * @param pd Prepared data for model training.
+    * @return Trained model.
+    */
+  def train(sc: SparkContext, pd: PD): M
+
+  private[prediction]
+  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
+  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
+
+  private[prediction]
+  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
+    qs.mapValues { q => predict(m, q) }
+  }
+
+  private[prediction]
+  def predictBase(bm: Any, q: Q): P = predict(bm.asInstanceOf[M], q)
+
+  /** Implement this method to produce a prediction from a query and trained
+    * model.
+    *
+    * @param model Trained model produced by [[train]].
+    * @param query An input query.
+    * @return A prediction.
+    */
+  def predict(model: M, query: Q): P
+
+  private[prediction]
+  override
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = {
+    // P2LAlgo has local model. By default, the model is serialized into our
+    // storage automatically. User can override this by implementing the
+    // IPersistentModel trait, then we call the save method, upon successful, we
+    // return the Manifest, otherwise, Unit. 
+
+    val m = bm.asInstanceOf[M]
+    if (m.isInstanceOf[PersistentModel[_]]) {
+      if (m.asInstanceOf[PersistentModel[Params]].save(
+        modelId, algoParams, sc)) {
+        PersistentModelManifest(className = m.getClass.getName)
+      } else {
+        Unit
+      }
+    } else {
+      m
+    }
+  }
+}
diff --git a/core/src/main/scala/io/prediction/controller/PAlgorithm.scala b/core/src/main/scala/io/prediction/controller/PAlgorithm.scala
new file mode 100644
index 0000000..a78acb5
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/PAlgorithm.scala
@@ -0,0 +1,121 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseAlgorithm
+import io.prediction.workflow.PersistentModelManifest
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a parallel algorithm.
+  *
+  * A parallel algorithm can be run in parallel on a cluster and produces a
+  * model that can also be distributed across a cluster.
+  *
+  * If your input query class requires custom JSON4S serialization, the most
+  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
+  * and mix that into your algorithm class, instead of overriding
+  * [[querySerializer]] directly.
+  *
+  * To provide evaluation feature, one must override and implement the
+  * [[batchPredict]] method. Otherwise, an exception will be thrown when pio eval`
+  * is used.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class PAlgorithm[PD, M, Q : Manifest, P]
+  extends BaseAlgorithm[PD, M, Q, P] {
+
+  /** Do not use directly or override this method, as this is called by
+    * PredictionIO workflow to train a model.
+    */
+  private[prediction]
+  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
+
+  /** Implement this method to produce a model from prepared data.
+    *
+    * @param pd Prepared data for model training.
+    * @return Trained model.
+    */
+  def train(sc: SparkContext, pd: PD): M
+
+  private[prediction]
+  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
+  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
+
+
+  /** To provide evaluation feature, one must override and implement this method
+    * to generate many predictions in batch. Otherwise, an exception will be
+    * thrown when `pio eval` is used.
+    *
+    * The default implementation throws an exception.
+    *
+    * @param m Trained model produced by [[train]].
+    * @param qs An RDD of index-query tuples. The index is used to keep track of
+    *           predicted results with corresponding queries.
+    */
+  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] =
+    throw new NotImplementedError("batchPredict not implemented")
+
+  /** Do not use directly or override this method, as this is called by
+    * PredictionIO workflow to perform prediction.
+    */
+  private[prediction]
+  def predictBase(baseModel: Any, query: Q): P = {
+    predict(baseModel.asInstanceOf[M], query)
+  }
+
+  /** Implement this method to produce a prediction from a query and trained
+    * model.
+    *
+    * @param model Trained model produced by [[train]].
+    * @param query An input query.
+    * @return A prediction.
+    */
+  def predict(model: M, query: Q): P
+
+  private[prediction]
+  override
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = {
+    // In general, a parallel model may contain multiple RDDs. It is not easy to
+    // infer and persist them programmatically since these RDDs may be
+    // potentially huge.
+
+    // Persist is successful only if the model is an instance of
+    // IPersistentModel and save is successful.
+    val m = bm.asInstanceOf[M]
+    if (m.isInstanceOf[PersistentModel[_]]) {
+      if (m.asInstanceOf[PersistentModel[Params]].save(
+        modelId, algoParams, sc)) {
+        PersistentModelManifest(className = m.getClass.getName)
+      } else {
+        Unit
+      }
+    } else {
+      Unit
+    }
+  }
+}
diff --git a/core/src/main/scala/io/prediction/controller/PDataSource.scala b/core/src/main/scala/io/prediction/controller/PDataSource.scala
new file mode 100644
index 0000000..5467a98
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/PDataSource.scala
@@ -0,0 +1,60 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BaseDataSource
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** Base class of a parallel data source.
+  *
+  * A parallel data source runs locally within a single machine, or in parallel
+  * on a cluster, to return data that is distributed across a cluster.
+  *
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation Info class.
+  * @tparam Q Input query class.
+  * @tparam A Actual value class.
+  * @group Data Source
+  */
+
+abstract class PDataSource[TD, EI, Q, A]
+  extends BaseDataSource[TD, EI, Q, A] {
+
+  private[prediction]
+  def readTrainingBase(sc: SparkContext): TD = readTraining(sc)
+
+  /** Implement this method to only return training data from a data source.
+    */
+  def readTraining(sc: SparkContext): TD
+
+  private[prediction]
+  def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
+
+  /** To provide evaluation feature for your engine, your must override this
+    * method to return data for evaluation from a data source. Returned data can
+    * optionally include a sequence of query and actual value pairs for
+    * evaluation purpose.
+    *
+    * The default implementation returns an empty sequence as a stub, so that
+    * an engine can be compiled without implementing evaluation.
+    */
+  def readEval(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] =
+    Seq[(TD, EI, RDD[(Q, A)])]()
+
+  @deprecated("Use readEval() instead.", "0.9.0")
+  def read(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
+}
diff --git a/core/src/main/scala/io/prediction/controller/PPreparator.scala b/core/src/main/scala/io/prediction/controller/PPreparator.scala
new file mode 100644
index 0000000..1f31eab
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/PPreparator.scala
@@ -0,0 +1,45 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.core.BasePreparator
+import org.apache.spark.SparkContext
+
+/** Base class of a parallel preparator.
+  *
+  * A parallel preparator can be run in parallel on a cluster and produces a
+  * prepared data that is distributed across a cluster.
+  *
+  * @tparam TD Training data class.
+  * @tparam PD Prepared data class.
+  * @group Preparator
+  */
+abstract class PPreparator[TD, PD]
+  extends BasePreparator[TD, PD] {
+
+  private[prediction]
+  def prepareBase(sc: SparkContext, td: TD): PD = {
+    prepare(sc, td)
+  }
+
+  /** Implement this method to produce prepared data that is ready for model
+    * training.
+    *
+    * @param sc An Apache Spark context.
+    * @param trainingData Training data to be prepared.
+    */
+  def prepare(sc: SparkContext, trainingData: TD): PD
+}
diff --git a/core/src/main/scala/io/prediction/controller/Params.scala b/core/src/main/scala/io/prediction/controller/Params.scala
index 8dd6229..0d5d149 100644
--- a/core/src/main/scala/io/prediction/controller/Params.scala
+++ b/core/src/main/scala/io/prediction/controller/Params.scala
@@ -18,25 +18,13 @@
 /** Base trait for all kinds of parameters that will be passed to constructors
   * of different controller classes.
   *
-  * @group General
+  * @group Helper
   */
 trait Params extends Serializable {}
 
-/** Mix in this trait for parameters that contain app ID. Only engines that
-  * take data source parameters with this trait would be able to use the
-  * feedback loop.
-  *
-  * @group General
-  */
-/*
-trait ParamsWithAppId extends Serializable {
-  val appId: Int
-}
-*/
-
 /** A concrete implementation of [[Params]] representing empty parameters.
   *
-  * @group General
+  * @group Helper
   */
 case class EmptyParams() extends Params {
   override def toString(): String = "Empty"
diff --git a/core/src/main/scala/io/prediction/controller/PersistentModel.scala b/core/src/main/scala/io/prediction/controller/PersistentModel.scala
new file mode 100644
index 0000000..0722418
--- /dev/null
+++ b/core/src/main/scala/io/prediction/controller/PersistentModel.scala
@@ -0,0 +1,95 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import org.apache.spark.SparkContext
+
+/** Mix in and implement this trait if your model cannot be persisted by
+  * PredictionIO automatically. A companion object extending
+  * IPersistentModelLoader is required for PredictionIO to load the persisted
+  * model automatically during deployment.
+  *
+  * Notice that models generated by [[PAlgorithm]] cannot be persisted
+  * automatically by nature and must implement these traits if model persistence
+  * is desired.
+  *
+  * {{{
+  * class MyModel extends PersistentModel[MyParams] {
+  *   def save(id: String, params: MyParams, sc: SparkContext): Boolean = {
+  *     ...
+  *   }
+  * }
+  *
+  * object MyModel extends PersistentModelLoader[MyParams, MyModel] {
+  *   def apply(id: String, params: MyParams, sc: Option[SparkContext]): MyModel = {
+  *     ...
+  *   }
+  * }
+  * }}}
+  *
+  * @tparam AP Algorithm parameters class.
+  * @see [[PersistentModelLoader]]
+  * @group Algorithm
+  */
+trait PersistentModel[AP <: Params] {
+  /** Save the model to some persistent storage.
+    *
+    * This method should return true if the model has been saved successfully so
+    * that PredictionIO knows that it can be restored later during deployment.
+    * This method should return false if the model cannot be saved (or should
+    * not be saved due to configuration) so that PredictionIO will re-train the
+    * model during deployment. All arguments of this method are provided by
+    * automatically by PredictionIO.
+    *
+    * @param id ID of the run that trained this model.
+    * @param params Algorithm parameters that were used to train this model.
+    * @param sc An Apache Spark context.
+    */
+  def save(id: String, params: AP, sc: SparkContext): Boolean
+}
+
+/** Implement an object that extends this trait for PredictionIO to support
+  * loading a persisted model during serving deployment.
+  *
+  * @tparam AP Algorithm parameters class.
+  * @tparam M Model class.
+  * @see [[PersistentModel]]
+  * @group Algorithm
+  */
+trait PersistentModelLoader[AP <: Params, M] {
+  /** Implement this method to restore a persisted model that extends the
+    * [[PersistentModel]] trait. All arguments of this method are provided
+    * automatically by PredictionIO.
+    *
+    * @param id ID of the run that trained this model.
+    * @param params Algorithm parameters that were used to train this model.
+    * @param sc An optional Apache Spark context. This will be injected if the
+    *           model was generated by a [[PAlgorithm]].
+    */
+  def apply(id: String, params: AP, sc: Option[SparkContext]): M
+}
+
+/** DEPRECATED. Use [[PersistentModel]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use PersistentModel instead.", "0.9.2")
+trait IPersistentModel[AP <: Params] extends PersistentModel[AP]
+
+/** DEPRECATED. Use [[PersistentModelLoader]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use PersistentModelLoader instead.", "0.9.2")
+trait IPersistentModelLoader[AP <: Params, M] extends PersistentModelLoader[AP, M]
diff --git a/core/src/main/scala/io/prediction/controller/Preparator.scala b/core/src/main/scala/io/prediction/controller/Preparator.scala
deleted file mode 100644
index 4eaee43..0000000
--- a/core/src/main/scala/io/prediction/controller/Preparator.scala
+++ /dev/null
@@ -1,152 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BasePreparator
-import io.prediction.controller.java.JavaUtils
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-import scala.reflect.runtime.universe._
-
-/** Base class of a parallel preparator.
-  *
-  * A parallel preparator can be run in parallel on a cluster and produces a
-  * prepared data that is distributed across a cluster.
-  *
-  * @tparam TD Training data class.
-  * @tparam PD Prepared data class.
-  * @group Preparator
-  */
-abstract class PPreparator[TD, PD]
-  extends BasePreparator[TD, PD] {
-
-  def prepareBase(sc: SparkContext, td: TD): PD = {
-    prepare(sc, td)
-  }
-
-  /** Implement this method to produce prepared data that is ready for model
-    * training.
-    *
-    * @param sc An Apache Spark context.
-    * @param trainingData Training data to be prepared.
-    */
-  def prepare(sc: SparkContext, trainingData: TD): PD
-}
-
-/** Base class of a local preparator.
-  *
-  * A local preparator runs locally within a single machine and produces
-  * prepared data that can fit within a single machine.
-  *
-  * @tparam TD Training data class.
-  * @tparam PD Prepared data class.
-  * @group Preparator
-  */
-abstract class LPreparator[TD, PD : ClassTag]
-  extends BasePreparator[RDD[TD], RDD[PD]] {
-
-  def prepareBase(sc: SparkContext, rddTd: RDD[TD]): RDD[PD] = {
-    rddTd.map(prepare)
-  }
-
-  /** Implement this method to produce prepared data that is ready for model
-    * training.
-    *
-    * @param trainingData Training data to be prepared.
-    */
-  def prepare(trainingData: TD): PD
-}
-
-
-/** A helper concrete implementation of [[io.prediction.core.BasePreparator]]
-  * that pass training data through without any special preparation.
-  *
-  * @group Preparator
-  */
-private[prediction] 
-class IdentityPreparator[TD] extends BasePreparator[TD, TD] {
-  def prepareBase(sc: SparkContext, td: TD): TD = td
-}
-
-/** A helper concrete implementation of [[io.prediction.core.BasePreparator]]
-  * that pass training data through without any special preparation.
-  *
-  * @group Preparator
-  */
-private[prediction] 
-object IdentityPreparator {
-  /** Produces an instance of [[IdentityPreparator]].
-    *
-    * @param ds Data source.
-    */
-  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
-    classOf[IdentityPreparator[TD]]
-}
-
-/** A helper concrete implementation of [[io.prediction.controller.PPreparator]]
-  * that pass training data through without any special preparation.
-  *
-  * @group Preparator
-  */
-class PIdentityPreparator[TD] extends PPreparator[TD, TD] {
-  def prepare(sc: SparkContext, td: TD): TD = td
-}
-
-/** A helper concrete implementation of [[io.prediction.controller.PPreparator]]
-  * that pass training data through without any special preparation.
-  *
-  * @group Preparator
-  */
-object PIdentityPreparator {
-  /** Produces an instance of [[PIdentityPreparator]].
-    *
-    * @param ds Data source.
-    */
-  def apply[TD](ds: Class[_ <: PDataSource[TD, _, _, _]]): Class[PIdentityPreparator[TD]] =
-    classOf[PIdentityPreparator[TD]]
-}
-
-
-/** A helper concrete implementation of [[io.prediction.controller.LPreparator]]
-  * that pass training data through without any special preparation.
-  *
-  * @group Preparator
-  */
-class LIdentityPreparator[TD]
-extends LPreparator[TD, TD]()(JavaUtils.fakeClassTag[TD]) {
-  def prepare(td: TD): TD = td
-}
-
-/** A helper concrete implementation of [[io.prediction.controller.LPreparator]]
-  * that pass training data through without any special preparation.
-  *
-  * @group Preparator
-  */
-object LIdentityPreparator {
-  /** Produces an instance of [[LIdentityPreparator]].
-    *
-    * @param ds Data source.
-    */
-  def apply[TD](ds: Class[_ <: LDataSource[TD, _, _, _]]): Class[LIdentityPreparator[TD]] =
-    classOf[LIdentityPreparator[TD]]
-}
-
-
diff --git a/core/src/main/scala/io/prediction/controller/SanityCheck.scala b/core/src/main/scala/io/prediction/controller/SanityCheck.scala
index 2f55a16..bb5342f 100644
--- a/core/src/main/scala/io/prediction/controller/SanityCheck.scala
+++ b/core/src/main/scala/io/prediction/controller/SanityCheck.scala
@@ -19,7 +19,7 @@
   * automatically perform sanity check on your data classes during training.
   * This is very useful when you need to debug your engine.
   *
-  * @group General
+  * @group Helper
   */
 trait SanityCheck {
   /** Implement this method to perform checks on your data. This method should
diff --git a/core/src/main/scala/io/prediction/controller/Serving.scala b/core/src/main/scala/io/prediction/controller/Serving.scala
deleted file mode 100644
index cb98d01..0000000
--- a/core/src/main/scala/io/prediction/controller/Serving.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseServing
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-import scala.reflect.runtime.universe._
-
-/** Base class of serving. 
-  *
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Serving
-  */
-abstract class LServing[Q, P] extends BaseServing[Q, P] {
-  def serveBase(q: Q, ps: Seq[P]): P = {
-    serve(q, ps)
-  }
-
-  /** Implement this method to combine multiple algorithms' predictions to
-    * produce a single final prediction.
-    *
-    * @param query Input query.
-    * @param predictions A list of algorithms' predictions.
-    */
-  def serve(query: Q, predictions: Seq[P]): P
-}
-
-/** A concrete implementation of [[LServing]] returning the first algorithm's
-  * prediction result directly without any modification.
-  *
-  * @group Serving
-  */
-class LFirstServing[Q, P] extends LServing[Q, P] {
-  /** Returns the first algorithm's prediction. */
-  def serve(query: Q, predictions: Seq[P]): P = predictions.head
-}
-
-/** A concrete implementation of [[LServing]] returning the first algorithm's
-  * prediction result directly without any modification.
-  *
-  * @group Serving
-  */
-object LFirstServing {
-  /** Returns an instance of [[LFirstServing]]. */
-  def apply[Q, P](a: Class[_ <: BaseAlgorithm[_, _, Q, P]]): Class[LFirstServing[Q, P]] =
-    classOf[LFirstServing[Q, P]]
-}
-
-/** A concrete implementation of [[LServing]] returning the average of all
-  * algorithms' predictions. The output prediction class is Double.
-  *
-  * @group Serving
-  */
-class LAverageServing[Q] extends LServing[Q, Double] {
-  /** Returns the average of all algorithms' predictions. */
-  def serve(query: Q, predictions: Seq[Double]): Double = {
-    predictions.sum / predictions.length
-  }
-}
-
-/** A concrete implementation of [[LServing]] returning the average of all
-  * algorithms' predictions. The output prediction class is Double.
-  *
-  * @group Serving
-  */
-object LAverageServing {
-  /** Returns an instance of [[LAverageServing]]. */
-  def apply[Q](a: Class[_ <: BaseAlgorithm[_, _, Q, _]]): Class[LAverageServing[Q]] =
-    classOf[LAverageServing[Q]]
-}
diff --git a/core/src/main/scala/io/prediction/controller/Utils.scala b/core/src/main/scala/io/prediction/controller/Utils.scala
index d4f4229..5098fba 100644
--- a/core/src/main/scala/io/prediction/controller/Utils.scala
+++ b/core/src/main/scala/io/prediction/controller/Utils.scala
@@ -17,7 +17,6 @@
 
 import io.prediction.workflow.KryoInstantiator
 
-import com.twitter.chill.KryoInjection
 import org.json4s._
 import org.json4s.ext.JodaTimeSerializers
 
@@ -28,7 +27,7 @@
 
 /** Controller utilities.
   *
-  * @group General
+  * @group Helper
   */
 object Utils {
   /** Default JSON4S serializers for PredictionIO controllers. */
@@ -42,12 +41,12 @@
     * @param model Model object.
     */
   def save(id: String, model: Any): Unit = {
-    val tmpdir = sys.env.get("PIO_FS_TMPDIR").getOrElse(
-      System.getProperty("java.io.tmpdir"))
+    val tmpdir = sys.env.getOrElse("PIO_FS_TMPDIR", System.getProperty("java.io.tmpdir"))
     val modelFile = tmpdir + File.separator + id
     (new File(tmpdir)).mkdirs
     val fos = new FileOutputStream(modelFile)
-    fos.write(KryoInjection(model))
+    val kryo = KryoInstantiator.newKryoInjection
+    fos.write(kryo(model))
     fos.close
   }
 
@@ -59,12 +58,10 @@
     * @param id Used as the filename of the file.
     */
   def load(id: String): Any = {
-    val tmpdir = sys.env.get("PIO_FS_TMPDIR").getOrElse(
-      System.getProperty("java.io.tmpdir"))
+    val tmpdir = sys.env.getOrElse("PIO_FS_TMPDIR", System.getProperty("java.io.tmpdir"))
     val modelFile = tmpdir + File.separator + id
     val src = Source.fromFile(modelFile)(scala.io.Codec.ISO8859)
-    val kryoInstantiator = new KryoInstantiator(getClass.getClassLoader)
-    val kryo = KryoInjection.instance(kryoInstantiator)
+    val kryo = KryoInstantiator.newKryoInjection
     val m = kryo.invert(src.map(_.toByte).toArray).get
     src.close
     m
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaAlgorithm.scala b/core/src/main/scala/io/prediction/controller/java/JavaAlgorithm.scala
deleted file mode 100644
index 6d06f11..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaAlgorithm.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.core.BaseAlgorithm
-
-import net.jodah.typetools.TypeResolver
-
-import org.apache.spark.SparkContext
-import org.apache.spark.api.java.JavaPairRDD
-import org.apache.spark.api.java.function.PairFunction
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-
-/**
- * Base class of a local algorithm.
- *
- * A local algorithm runs locally within a single machine and produces a model
- * that can fit within a single machine.
- *
- * @param <PD> Prepared Data
- * @param <M> Model
- * @param <Q> Input Query
- * @param <P> Output Prediction
- */
-abstract class LJavaAlgorithm[PD, M, Q, P]
-  extends LAlgorithm[PD, M, Q, P]()(
-    JavaUtils.fakeClassTag[M],
-    JavaUtils.fakeManifest[Q]) {
-  def train(pd: PD): M
-
-  def predict(model: M, query: Q): P
-
-  /** Returns a Class object of Q for internal use. */
-  def queryClass(): Class[Q] = {
-    val typeArgs = TypeResolver.resolveRawArguments(
-      classOf[LJavaAlgorithm[PD, M, Q, P]],
-      getClass)
-    typeArgs(2).asInstanceOf[Class[Q]]
-  }
-
-  override def isJava: Boolean = true
-}
-
-/** Base class of a parallel algorithm.
-  *
-  * A parallel algorithm can be run in parallel on a cluster and produces a
-  * model that can also be distributed across a cluster.
-  *
-  * @param <PD> Prepared data class.
-  * @param <M> Trained model class.
-  * @param <Q> Input query class.
-  * @param <P> Output prediction class.
-  */
-abstract class PJavaAlgorithm[PD, M, Q, P]
-  extends BaseAlgorithm[PD, M, Q, P]()(
-    JavaUtils.fakeManifest[Q]) {
-
-  /** Do not use directly or override this method, as this is called by
-    * PredictionIO workflow to train a model.
-    */
-  def trainBase(sc: SparkContext, pd: PD): M = train(pd)
-
-  /** Implement this method to produce a model from prepared data.
-    *
-    * @param pd Prepared data for model training.
-    * @return Trained model.
-    */
-  def train(pd: PD): M
-
-  /** Do not use directly or override this method, as this is called by
-    * PredictionIO workflow to perform batch prediction.
-    */
-  override def batchPredictBase(
-      sc: SparkContext, baseModel: Any, indexedQueries: RDD[(Long, Q)])
-  : RDD[(Long, P)] = {
-    implicit val fakeQTag: ClassTag[Q] = JavaUtils.fakeClassTag[Q]
-    batchPredict(baseModel.asInstanceOf[M],
-        JavaPairRDD.fromRDD(indexedQueries)).rdd
-  }
-
-  /** Implement this method to produce predictions from batch queries.
-    *
-    * Evaluation call this method. Since in PJavaAlgorithm, M may contain RDDs,
-    * it is impossible to call `predict(model, query)` without localizing
-    * queries (which is very inefficient). Hence, engine builders using
-    * PJavaAlgorithm need to implement this method for evaluation purpose.
-    *
-    * @param model Trained model produced by [[train]].
-    * @param indexedQueries Batch of queries with indices.
-    * @return An RDD of indexed predictions.
-    */
-  def batchPredict(model: M, indexedQueries: JavaPairRDD[Long, Q])
-  : JavaPairRDD[Long, P]
-
-  /** Do not use directly or override this method, as this is called by
-    * PredictionIO workflow to perform prediction.
-    */
-  def predictBase(baseModel: Any, query: Q): P = {
-    predict(baseModel.asInstanceOf[M], query)
-  }
-
-  /** Implement this method to produce a prediction from a query and trained
-    * model.
-    *
-    * @param model Trained model produced by [[train]].
-    * @param query An input query.
-    * @return A prediction.
-    */
-  def predict(model: M, query: Q): P
-
-  /** Returns a Class object of Q for internal use. */
-  def queryClass(): Class[Q] = {
-    val typeArgs = TypeResolver.resolveRawArguments(
-      classOf[PJavaAlgorithm[PD, M, Q, P]],
-      getClass)
-    typeArgs(2).asInstanceOf[Class[Q]]
-  }
-
-  def isJava: Boolean = true
-  override def isParallel: Boolean = true
-}
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaDataSource.scala b/core/src/main/scala/io/prediction/controller/java/JavaDataSource.scala
deleted file mode 100644
index 9b36e1d..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaDataSource.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.core.BaseDataSource
-import io.prediction.controller.Params
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.api.java.JavaPairRDD
-import org.apache.spark.api.java.JavaSparkContext
-import org.apache.spark.rdd.RDD
-
-import java.util.{ List => JList }
-import java.util.{ ArrayList => JArrayList }
-import java.util.{ Collections => JCollections }
-import java.lang.{ Iterable => JIterable }
-
-import scala.collection.JavaConversions._
-import scala.reflect._
-
-/**
- * Base class of a local data source.
- *
- * A local data source runs locally within a single machine and return data that
- * can fit within a single machine.
- *
- * @param <TD> Training Data
- * @param <EI> Evaluation Info
- * @param <Q> Input Query
- * @param <A> Actual Value
- */
-abstract class LJavaDataSource[TD, EI, Q, A]
-  extends BaseDataSource[RDD[TD], EI, Q, A] {
-  def readBase(sc: SparkContext): Seq[(RDD[TD], EI, RDD[(Q, A)])] = {
-    implicit val fakeTdTag: ClassTag[TD] = JavaUtils.fakeClassTag[TD]
-    val datasets = sc.parallelize(Array(None)).flatMap(_ => read().toSeq).zipWithIndex
-    datasets.cache
-    val eis: Map[Long, EI] = datasets.map(t => t._2 -> t._1._2).collect.toMap
-    eis.map { t =>
-      val dataset = datasets.filter(_._2 == t._1).map(_._1)
-      val ei = t._2
-      val td = dataset.map(_._1)
-      val qa = dataset.map(_._3.toSeq).flatMap(identity)
-      (td, ei, qa)
-    }.toSeq
-  }
-
-  /** Implement this method to only return training data from a data source.
-    */
-  def readTraining(): TD = null.asInstanceOf[TD]
-
-  /** Implement this method to return one set of test data (
-    * an Iterable of query and actual value pairs) from a data source.
-    * Should also implement readTraining to return corresponding training data.
-    */
-  def readTest(): Tuple2[EI, JIterable[Tuple2[Q, A]]] =
-    (null.asInstanceOf[EI], JCollections.emptyList())
-
-  /** Implement this method to return one or more sets of training data
-    * and test data (an Iterable of query and actual value pairs) from a
-    * data source.
-    */
-  def read(): JIterable[Tuple3[TD, EI, JIterable[Tuple2[Q, A]]]] = {
-    val (ei, qa) = readTest()
-    List((readTraining(), ei, qa))
-  }
-
-}
-
-/** Base class of a parallel data source.
-  *
-  * A parallel data source runs locally within a single machine, or in parallel
-  * on a cluster, to return data that is distributed across a cluster.
-  *
-  * @param <TD> Training data class.
-  * @param <EI> Evaluation info class.
-  * @param <Q> Input query class.
-  * @param <A> Actual value class.
-  */
-abstract class PJavaDataSource[TD, EI, Q, A]
-  extends BaseDataSource[TD, EI, Q, A] {
-  def readBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = {
-    implicit val fakeTdTag: ClassTag[TD] = JavaUtils.fakeClassTag[TD]
-    read(new JavaSparkContext(sc)).toSeq.map { case (td, ei, qaRdd) => {
-      // TODO(yipjustin). Maybe do a size check on td, to make sure the user
-      // doesn't supply a huge TD to the driver program.
-      (td, ei, qaRdd.rdd)
-    }}
-  }
-
-  /** Implement this method to return data from a data source. Returned data
-    * can optionally include a sequence of query and actual value pairs for
-    * evaluation purpose.
-    */
-  def read(jsc: JavaSparkContext): JIterable[Tuple3[TD, EI, JavaPairRDD[Q, A]]]
-}
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaEngine.scala b/core/src/main/scala/io/prediction/controller/java/JavaEngine.scala
deleted file mode 100644
index 90f7592..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaEngine.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.Engine
-import io.prediction.controller.Params
-import io.prediction.controller.EngineParams
-import io.prediction.controller.IEngineFactory
-
-import java.lang.{ Iterable => JIterable }
-import java.util.{ Map => JMap }
-
-import scala.collection.JavaConversions._
-
-/**
- * This class chains up the entire data process. PredictionIO uses this
- * information to create workflows and deployments. In Java, use
- * JavaEngineBuilder to conveniently instantiate an instance of this class.
- *
- * @param <TD> Training Data
- * @param <EI> Evaluation Info
- * @param <PD> Prepared Data
- * @param <Q> Input Query
- * @param <P> Output Prediction
- * @param <A> Actual Value
- */
-class JavaEngine[TD, EI, PD, Q, P, A](
-    dataSourceClass: Class[_ <: LJavaDataSource[TD, EI, Q, A]],
-    preparatorClass: Class[_ <: LJavaPreparator[TD, PD]],
-    algorithmClassMap: JMap[String, Class[_ <: LJavaAlgorithm[PD, _, Q, P]]],
-    servingClass: Class[_ <: LJavaServing[Q, P]]
-) extends Engine(
-    dataSourceClass,
-    preparatorClass,
-    Map(algorithmClassMap.toSeq: _*),
-    servingClass)
-
-/**
- * This class serves as a logical grouping of all required engine's parameters.
- */
-class JavaEngineParams(
-    dataSourceParams: Params,
-    preparatorParams: Params,
-    algorithmParamsList: JIterable[(String, Params)],
-    servingParams: Params
-) extends EngineParams(
-    ("", dataSourceParams),
-    ("", preparatorParams),
-    algorithmParamsList.toSeq,
-    ("", servingParams))
-
-/**
- * JavaSimpleEngine has only one algorithm, and uses default preparator and
- * serving layer. Current default preparator is IdentityPreparator and serving
- * is FirstServing.
- */
-class JavaSimpleEngine[TD, EI, Q, P, A](
-    dataSourceClass: Class[_ <: LJavaDataSource[TD, EI, Q, A]],
-    preparatorClass: Class[_ <: LJavaPreparator[TD, TD]],
-    algorithmClassMap
-      : JMap[String, Class[_ <: LJavaAlgorithm[TD, _, Q, P]]],
-    servingClass: Class[_ <: LJavaServing[Q, P]]
-) extends JavaEngine[TD, EI, TD, Q, P, A](
-    dataSourceClass,
-    preparatorClass,
-    Map(algorithmClassMap.toSeq: _*),
-    servingClass)
-
-/** If you intend to let PredictionIO create workflow and deploy serving
-  * automatically, you will need to implement an object that extends this trait
-  * and return an [[Engine]].
-  */
-trait IJavaEngineFactory extends IEngineFactory {
-  /** Creates an instance of an [[Engine]]. */
-  def apply(): Engine[_, _, _, _, _, _]
-}
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaEvaluator.scala b/core/src/main/scala/io/prediction/controller/java/JavaEvaluator.scala
deleted file mode 100644
index 474ea43..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaEvaluator.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-// package io.prediction.controller.java
-//
-// import io.prediction.controller.Params
-// import io.prediction.core.BaseEvaluator
-//
-// import java.util.{ List => JList }
-// import java.lang.{ Iterable => JIterable }
-//
-// import scala.collection.JavaConversions._
-// import scala.reflect._
-//
-// /**
-//  * Base class of metrics.
-//  *
-//  * Evaluator compare predictions with actual known values and produce numerical
-//  * comparisons.
-//  */
-// abstract class JavaEvaluator[DP, Q, P, A, EU, ES, ER <: AnyRef]
-//   extends BaseEvaluator[DP, Q, P, A, ER] {
-//
-//   def evaluateUnitBase(input: (Q, P, A)): EU = {
-//     evaluateUnit(input._1, input._2, input._3)
-//   }
-//
-//   /**
-//    * Implement this method to calculate a unit of metrics, comparing a pair of
-//    * predicted and actual values.
-//    */
-//   def evaluateUnit(query: Q, predicted: P, actual: A): EU
-//
-//   def evaluateSetBase(dataParams: DP, metricUnits: Seq[EU]): ES = {
-//     evaluateSet(dataParams, metricUnits)
-//   }
-//
-//   /**
-//    * Implement this method to calculate metrics results of an evaluation.
-//    */
-//   def evaluateSet(dataParams: DP, metricUnits: JIterable[EU]): ES
-//
-//   def evaluateAllBase(input: Seq[(DP, ES)]): ER = {
-//     evaluateAll(input)
-//   }
-//
-//   /**
-//    * Implement this method to aggregate all metrics results generated by each
-//    * evaluation's {@link JavaEvaluator#evaluateSet} to produce the final result.
-//    */
-//   def evaluateAll(input: JIterable[Tuple2[DP, ES]]): ER
-// }
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaParams.scala b/core/src/main/scala/io/prediction/controller/java/JavaParams.scala
deleted file mode 100644
index dacb680..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaParams.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.Params
-
-/** Parameters base class in Java. */
-trait JavaParams extends Params
-
-/** Empty parameters. */
-class EmptyParams() extends JavaParams {
-  override def toString(): String = "Empty"
-}
-
-/** Empty data source parameters. */
-case class EmptyDataSourceParams() extends EmptyParams
-
-/** Empty data parameters. */
-case class EmptyDataParams() extends AnyRef
-
-/** Empty evaluation info. */
-case class EmptyEvaluationInfo() extends AnyRef
-
-/** Empty preparator parameters. */
-case class EmptyPreparatorParams() extends EmptyParams
-
-/** Empty algorithm parameters. */
-case class EmptyAlgorithmParams() extends EmptyParams
-
-/** Empty serving parameters. */
-case class EmptyServingParams() extends EmptyParams
-
-/** Empty metrics parameters. */
-case class EmptyMetricsParams() extends EmptyParams
-
-/** Empty training data. */
-case class EmptyTrainingData() extends AnyRef
-
-/** Empty prepared data. */
-case class EmptyPreparedData() extends AnyRef
-
-/** Empty model. */
-case class EmptyModel() extends AnyRef
-
-/** Empty actual result. */
-case class EmptyActualResult() extends AnyRef
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaPreparator.scala b/core/src/main/scala/io/prediction/controller/java/JavaPreparator.scala
deleted file mode 100644
index 038e011..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaPreparator.scala
+++ /dev/null
@@ -1,136 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.core.BasePreparator
-import io.prediction.controller.Params
-
-import java.util.{ List => JList }
-import java.lang.{ Iterable => JIterable }
-import scala.collection.JavaConversions._
-
-import org.apache.spark.rdd.RDD
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.api.java.JavaSparkContext
-
-import scala.reflect._
-
-/**
- * Base class of a local preparator.
- *
- * A local preparator runs locally within a single machine and produces prepared
- * data that can fit within a single machine.
- *
- * @param <TD> Training Data
- * @param <PD> Prepared Data
- */
-abstract class LJavaPreparator[TD, PD]
-  extends BasePreparator[RDD[TD], RDD[PD]]{ 
-
-  def prepareBase(sc: SparkContext, td: RDD[TD]): RDD[PD] = {
-    implicit val fakeTdTag: ClassTag[PD] = JavaUtils.fakeClassTag[PD]
-    td.map(prepare)
-  }
-
-  /**
-   * Implement this method to produce prepared data that is ready for model
-   * training.
-   */
-  def prepare(td: TD): PD
-}
-
-/**
- * A helper concrete implementation of {@link LJavaPreparator} that pass
- * training data through without any special preparation.
- *
- * @param <TD> Training Data
- */
-class LJavaIdentityPreparator[TD] extends LJavaPreparator[TD, TD] {
-  override def prepare(td: TD): TD = td
-}
-
-/**
- * A helper concrete implementation of {@link LJavaPreparator} that pass
- * training data through without any special preparation.
- */
-object LJavaIdentityPreparator {
-  /** Produces an instance of {@link LJavaIdentityPreparator}. */
-  def apply[TD](ds: Class[_ <: LJavaDataSource[_, TD, _, _]]): Class[LJavaIdentityPreparator[TD]] =
-    classOf[LJavaIdentityPreparator[TD]]
-
-  /**
-   * Returns an instance of {@link LJavaIdentityPreparator} by taking a {@link
-   * JavaEngineBuilder} as argument.
-   */
-  def apply[TD, B <: JavaEngineBuilder[TD, _, TD, _, _, _]]
-    (b: B): Class[LJavaIdentityPreparator[TD]] = classOf[LJavaIdentityPreparator[TD]]
-
-}
-
-/** Base class of a parallel preparator.
-  *
-  * A parallel preparator can be run in parallel on a cluster and produces a
-  * prepared data that is distributed across a cluster.
-  *
-  * @param <TD> Training data class.
-  * @param <PD> Prepared data class.
-  */
-abstract class PJavaPreparator[TD, PD]
-  extends BasePreparator[TD, PD] {
-  def prepareBase(sc: SparkContext, td: TD): PD = {
-    implicit val fakeTdTag: ClassTag[PD] = JavaUtils.fakeClassTag[PD]
-    prepare(new JavaSparkContext(sc), td)
-  }
-
-  /** Implement this method to produce prepared data that is ready for model
-    * training.
-    *
-    * @param jsc A Java Spark context.
-    * @param trainingData Training data to be prepared.
-    */
-  def prepare(jsc: JavaSparkContext, trainingData: TD): PD
-}
-
-/**
- * A helper concrete implementation of {@link PJavaPreparator} that pass
- * training data through without any special preparation.
- *
- * @param <TD> Training Data
- */
-class PJavaIdentityPreparator[TD] extends PJavaPreparator[TD, TD] {
-  override def prepare(jsc: JavaSparkContext, td: TD): TD = td
-}
-
-/**
- * A helper concrete implementation of {@link PJavaPreparator} that pass
- * training data through without any special preparation.
- */
-object PJavaIdentityPreparator {
-  /** Produces an instance of {@link PJavaIdentityPreparator}. */
-  def apply[TD](ds: Class[_ <: PJavaDataSource[_, TD, _, _]]): Class[PJavaIdentityPreparator[TD]] =
-    classOf[PJavaIdentityPreparator[TD]]
-
-  /**
-   * Returns an instance of {@link PJavaIdentityPreparator} by taking a {@link
-   * PJavaEngineBuilder} as argument.
-   */
-  // TODO tom-chan add this back when the PJavaEngineBuilder is ready
-  /* def apply[TD, B <: PJavaEngineBuilder[TD, _, TD, _, _, _]](b: B) =
-     classOf[PJavaIdentityPreparator[TD]] */
-
-}
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaServing.scala b/core/src/main/scala/io/prediction/controller/java/JavaServing.scala
deleted file mode 100644
index 1819c5a..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaServing.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.Params
-import io.prediction.core.BaseServing
-import io.prediction.core.BaseAlgorithm
-
-import scala.collection.JavaConversions._
-import scala.reflect._
-
-import java.lang.{ Iterable => JIterable }
-import java.util.{ List => JList }
-
-/**
- * Base class of local serving. For deployment, there should only be local
- * serving class.
- *
- * @param <Q> Input Query
- * @param <P> Output Prediction
- */
-abstract class LJavaServing[Q, P]
-  extends BaseServing[Q, P] {
-
-  def serveBase(q: Q, ps: Seq[P]): P = serve(q, seqAsJavaList(ps))
-
-  /**
-   * Implement this method to combine multiple algorithms' predictions to
-   * produce a single final prediction.
-   */
-  def serve(query: Q, predictions: JIterable[P]): P
-}
-
-/**
- * A concrete implementation of {@link LJavaServing} returning the first
- * algorithm's prediction result directly without any modification.
- *
- * @param <Q> Input Query
- * @param <P> Output Prediction
- */
-class LJavaFirstServing[Q, P] extends LJavaServing[Q, P] {
-  override def serve(query: Q, predictions: JIterable[P]): P = {
-    predictions.iterator().next()
-  }
-}
-
-/**
- * A concrete implementation of {@link LJavaServing} returning the first
- * algorithm's prediction result directly without any modification.
- */
-object LJavaFirstServing {
-  /** Returns an instance of {@link LJavaFirstServing}. */
-  def apply[Q, P](a: Class[_ <: BaseAlgorithm[_, _, Q, P]]): Class[LJavaFirstServing[Q, P]] =
-    classOf[LJavaFirstServing[Q, P]]
-
-  /**
-   * Returns an instance of {@link LJavaFirstServing} by taking a {@link
-   * JavaEngineBuilder} as argument.
-   */
-  def apply[Q, P, B <: JavaEngineBuilder[_, _, _, Q, P, _]](b: B): Class[LJavaFirstServing[Q, P]] =
-    classOf[LJavaFirstServing[Q, P]]
-}
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaUtils.scala b/core/src/main/scala/io/prediction/controller/java/JavaUtils.scala
deleted file mode 100644
index ea78ff4..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaUtils.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import scala.collection.JavaConversions._
-import scala.reflect.ClassTag
-import scala.reflect.runtime.universe._
-
-/** Internal Java utilities. */
-object JavaUtils {
-  // This "fake" tags are adopted from Spark's Java API.
-  // Scala requires manifest or classtag for some data. But Prediction.IO
-  // doesn't really need it as our system is oblivious to the actual data. We
-  // pass a fake ClassTag / Manifest to keep the scala compiler happy.
-  def fakeClassTag[T]: ClassTag[T] = {
-    ClassTag.AnyRef.asInstanceOf[ClassTag[T]]
-  }
-
-  def fakeManifest[T]: Manifest[T] = {
-    manifest[AnyRef].asInstanceOf[Manifest[T]]
-  }
-}
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaWorkflow.scala b/core/src/main/scala/io/prediction/controller/java/JavaWorkflow.scala
deleted file mode 100644
index 1976c14..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaWorkflow.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java;
-
-// import io.prediction.workflow.JavaCoreWorkflow;
-import io.prediction.controller.Engine
-import io.prediction.controller.EngineParams
-import io.prediction.controller.WorkflowParams
-import io.prediction.core.BaseEvaluator
-import io.prediction.controller.Params
-import scala.collection.JavaConversions._
-
-/** Collection of workflow creation methods. */
-object JavaWorkflow {
-  /** Creates a workflow that runs an engine.
-    *
-    * @tparam EI Evaluation Info class.
-    * @tparam TD Training data class.
-    * @tparam PD Prepared data class.
-    * @tparam Q Input query class.
-    * @tparam P Output prediction class.
-    * @tparam A Actual value class.
-    *
-    * @param engine An instance of [[Engine]].
-    * @param engineParams Engine parameters.
-    * @param params Workflow parameters.
-    */
-  def runEngine[EI, TD, PD, Q, P, A](
-    engine: Engine[TD, EI, PD, Q, P, A],
-    engineParams: EngineParams,
-    params: WorkflowParams
-  ) {
-  /*
-    runEngine(
-      engine = engine,
-      engineParams = engineParams,
-      evaluatorClass = null,
-      evaluatorParams = null,
-      params = params
-    )
-  */
-  }
-
-  /** Creates a workflow that runs an engine.
-    *
-    * @tparam EI Evaluation Info class.
-    * @tparam TD Training data class.
-    * @tparam PD Prepared data class.
-    * @tparam Q Input query class.
-    * @tparam P Output prediction class.
-    * @tparam A Actual value class.
-    *
-    * @param engine An instance of [[Engine]].
-    * @param engineParams Engine parameters.
-    * @param evaluatorClass Evaluator class.
-    * @param evaluatorParams Evaluator parameters.
-    * @param params Workflow parameters.
-    */
-  /*
-  def runEngine[EI, TD, PD, Q, P, A, ER <: AnyRef](
-      engine: Engine[TD, EI, PD, Q, P, A],
-      engineParams: EngineParams,
-      evaluatorClass
-        : Class[_ <: BaseEvaluator[EI, Q, P, A, ER]],
-      evaluatorParams: Params,
-      params: WorkflowParams
-    ) {
-  */
-    /*
-    JavaCoreWorkflow.run(
-      dataSourceClassMap = engine.dataSourceClassMap,
-      dataSourceParams = engineParams.dataSourceParams,
-      preparatorClassMap = engine.preparatorClassMap,
-      preparatorParams = engineParams.preparatorParams,
-      algorithmClassMap = engine.algorithmClassMap,
-      algorithmParamsList = engineParams.algorithmParamsList,
-      servingClassMap = engine.servingClassMap,
-      servingParams = engineParams.servingParams,
-      evaluatorClass = evaluatorClass,
-      evaluatorParams = evaluatorParams,
-      params = params
-    )
-    */
-  //}
-}
diff --git a/core/src/main/scala/io/prediction/controller/java/PJavaEngine.scala b/core/src/main/scala/io/prediction/controller/java/PJavaEngine.scala
deleted file mode 100644
index 492f7fb..0000000
--- a/core/src/main/scala/io/prediction/controller/java/PJavaEngine.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.Engine
-import io.prediction.controller.Params
-import io.prediction.controller.EngineParams
-import io.prediction.controller.IEngineFactory
-
-import java.lang.{ Iterable => JIterable }
-import java.util.{ Map => JMap }
-import java.util.HashMap
-
-import scala.collection.JavaConversions._
-
-/**
- * This class chains up the entire data process. PredictionIO uses this
- * information to create workflows and deployments. In Java, use
- * JavaEngineBuilder to conveniently instantiate an instance of this class.
- * For now it only accepts LJavaServing as the serving class.
- *
- * @param <TD> Training Data
- * @param <EI> EvaluationInfo
- * @param <PD> Prepared Data
- * @param <Q> Input Query
- * @param <P> Output Prediction
- * @param <A> Actual Value
- */
-class PJavaEngine[TD, EI, PD, Q, P, A](
-    dataSourceClass: Class[_ <: PJavaDataSource[TD, EI, Q, A]],
-    preparatorClass: Class[_ <: PJavaPreparator[TD, PD]],
-    algorithmClassMap: JMap[String, Class[_ <: PJavaAlgorithm[PD, _, Q, P]]],
-    servingClass: Class[_ <: LJavaServing[Q, P]]
-) extends Engine(
-    dataSourceClass,
-    preparatorClass,
-    Map(algorithmClassMap.toSeq: _*),
-    servingClass)
diff --git a/core/src/main/scala/io/prediction/controller/package.scala b/core/src/main/scala/io/prediction/controller/package.scala
deleted file mode 100644
index 24bddb9..0000000
--- a/core/src/main/scala/io/prediction/controller/package.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction
-
-import io.prediction.controller.EmptyParams
-
-/** Provides building blocks for writing a complete prediction engine
-  * consisting of DataSource, Preparator, Algorithm, Serving, and Metrics.
-  */
-package object controller {
-
-  /** Empty data source parameters.
-    * @group General
-    */
-  type EmptyDataSourceParams = EmptyParams
-
-  /** Empty data parameters.
-    * @group General
-    */
-  type EmptyDataParams = EmptyParams
-  
-  /** Empty evaluation info.
-    * @group General
-    */
-  type EmptyEvaluationInfo = EmptyParams
-
-  /** Empty preparator parameters.
-    * @group General
-    */
-  type EmptyPreparatorParams = EmptyParams
-
-  /** Empty algorithm parameters.
-    * @group General
-    */
-  type EmptyAlgorithmParams = EmptyParams
-
-  /** Empty serving parameters.
-    * @group General
-    */
-  type EmptyServingParams = EmptyParams
-
-  /** Empty metrics parameters.
-    * @group General
-    */
-  type EmptyMetricsParams = EmptyParams
-
-  /** Empty training data.
-    * @group General
-    */
-  type EmptyTrainingData = AnyRef
-
-  /** Empty prepared data.
-    * @group General
-    */
-  type EmptyPreparedData = AnyRef
-
-  /** Empty model.
-    * @group General
-    */
-  type EmptyModel = AnyRef
-
-  /** Empty actual result.
-    * @group General
-    */
-  type EmptyActualResult = AnyRef
-
-}
diff --git a/core/src/main/scala/io/prediction/core/AbstractDoer.scala b/core/src/main/scala/io/prediction/core/AbstractDoer.scala
index 55d7149..222864d 100644
--- a/core/src/main/scala/io/prediction/core/AbstractDoer.scala
+++ b/core/src/main/scala/io/prediction/core/AbstractDoer.scala
@@ -15,17 +15,11 @@
 
 package io.prediction.core
 
+import grizzled.slf4j.Logging
 import io.prediction.controller.Params
 
-import grizzled.slf4j.Logging
-import org.json4s._
-import org.json4s.ext.JodaTimeSerializers
-import org.json4s.native.JsonMethods._
-
-import scala.reflect._
-
 // Base class for all controllers.
-abstract class AbstractDoer extends Serializable 
+abstract class AbstractDoer extends Serializable
 
 object Doer extends Logging {
   def apply[C <: AbstractDoer] (
@@ -37,11 +31,11 @@
     // First try (1), if failed, try (2).
     try {
       val constr = cls.getConstructor(params.getClass)
-      return constr.newInstance(params).asInstanceOf[C]
+      constr.newInstance(params)
     } catch {
       case e: NoSuchMethodException => try {
         val zeroConstr = cls.getConstructor()
-        return zeroConstr.newInstance().asInstanceOf[C]
+        zeroConstr.newInstance()
       } catch {
         case e: NoSuchMethodException =>
           error(s"${params.getClass.getName} was used as the constructor " +
diff --git a/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala b/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
index c5e6a8f..5d7e864 100644
--- a/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
+++ b/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
@@ -15,36 +15,38 @@
 
 package io.prediction.core
 
-import io.prediction.controller.Utils
 import io.prediction.controller.Params
-
+import io.prediction.controller.Utils
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
+
 import scala.reflect._
 
-trait WithBaseQuerySerializer {
+trait BaseQuerySerializer {
   @transient lazy val querySerializer = Utils.json4sDefaultFormats
 }
 
 abstract class BaseAlgorithm[PD, M, Q : Manifest, P]
-  extends AbstractDoer with WithBaseQuerySerializer {
+  extends AbstractDoer with BaseQuerySerializer {
+  private[prediction]
   def trainBase(sc: SparkContext, pd: PD): M
 
   // Used by Evaluation
+  private[prediction]
   def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
   : RDD[(Long, P)]
 
   // Used by Deploy
+  private[prediction]
   def predictBase(bm: Any, q: Q): P
 
+  private[prediction]
   def queryManifest(): Manifest[Q] = manifest[Q]
-  
-  def makePersistentModel(sc: SparkContext, modelId: String, 
-    algoParams: Params, bm: Any)
-  : Any = Unit
 
-  // TODO(yipjustin): obsolete as of 0.8.7. cleanup.
-  def isJava: Boolean
-  def isParallel: Boolean = true
+  private[prediction]
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = Unit
 }
diff --git a/core/src/main/scala/io/prediction/core/BaseDataSource.scala b/core/src/main/scala/io/prediction/core/BaseDataSource.scala
index c0e712e..9c02db9 100644
--- a/core/src/main/scala/io/prediction/core/BaseDataSource.scala
+++ b/core/src/main/scala/io/prediction/core/BaseDataSource.scala
@@ -15,14 +15,15 @@
 
 package io.prediction.core
 
-import org.apache.spark.rdd.RDD
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import scala.reflect._
+import org.apache.spark.rdd.RDD
 
-abstract class BaseDataSource[TD, EI, Q, A]
-  extends AbstractDoer {
+abstract class BaseDataSource[TD, EI, Q, A] extends AbstractDoer {
+
+  private[prediction]
   def readTrainingBase(sc: SparkContext): TD
 
+  private[prediction]
   def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])]
+
 }
diff --git a/core/src/main/scala/io/prediction/core/BaseEngine.scala b/core/src/main/scala/io/prediction/core/BaseEngine.scala
index f92c5c0..e058b32 100644
--- a/core/src/main/scala/io/prediction/core/BaseEngine.scala
+++ b/core/src/main/scala/io/prediction/core/BaseEngine.scala
@@ -15,34 +15,50 @@
 
 package io.prediction.core
 
-import io.prediction.controller.Utils
 import io.prediction.controller.EngineParams
-import io.prediction.controller.WorkflowParams
+import io.prediction.workflow.WorkflowParams
 
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
-import scala.reflect._
 import org.json4s.JValue
 
 abstract class BaseEngine[EI, Q, P, A] extends Serializable {
-  // Return Persist-able models
+  /** Implement this method so that training this engine would return a list of
+    * models.
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single training.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of models.
+    */
   def train(
     sc: SparkContext, 
     engineParams: EngineParams,
     engineInstanceId: String,
     params: WorkflowParams): Seq[Any]
 
+  /** Implement this method so that [[io.prediction.controller.Evaluation]] can
+    * use this method to generate inputs for [[io.prediction.controller.Metric]].
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single evaluation.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of evaluation information and RDD of query, predicted
+    *         result, and actual result tuple tuple.
+    */
   def eval(
     sc: SparkContext, 
     engineParams: EngineParams,
-    params: WorkflowParams
-  )
-  : Seq[(EI, RDD[(Q, P, A)])]
+    params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])]
 
-  /** Can subclass this method to optimize the case where multiple evaluation is
-    * called (during tuning, for example). By default, it call [[eval]] for each
-    * element in the engine params list.
+  /** Override this method to further optimize the process that runs multiple
+    * evaluations (during tuning, for example). By default, this method calls
+    * [[eval]] for each element in the engine parameters list.
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParamsList A list of [[EngineParams]] for running batch evaluation.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of engine parameters and evaluation result (from [[eval]]) tuples.
     */
   def batchEval(
     sc: SparkContext, 
@@ -54,8 +70,12 @@
     }
   }
 
-  def jValueToEngineParams(variantJson: JValue): EngineParams = {
-    throw new NotImplementedError("json to EngineParams is not implemented.")
-    EngineParams()  
-  }
+  /** Implement this method to convert a JValue (read from an engine variant
+    * JSON file) to an instance of [[EngineParams]].
+    *
+    * @param variantJson Content of the engine variant JSON as JValue.
+    * @return An instance of [[EngineParams]] converted from JSON.
+    */
+  def jValueToEngineParams(variantJson: JValue): EngineParams =
+    throw new NotImplementedError("JSON to EngineParams is not implemented.")
 }
diff --git a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala b/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
index c47e71c..e2e65f2 100644
--- a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
+++ b/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
@@ -15,12 +15,12 @@
 
 package io.prediction.core
 
+import io.prediction.annotation.Experimental   
 import io.prediction.controller.EngineParams
 import io.prediction.controller.Evaluation
-import io.prediction.controller.WorkflowParams
-import scala.reflect._
+import io.prediction.workflow.WorkflowParams
+
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
 abstract class BaseEvaluator[EI, Q, P, A, ER <: BaseEvaluatorResult]
@@ -41,4 +41,9 @@
   
   /** JSON portion of the rendered evaluator results. */
   def toJSON(): String = ""
+
+  /** :: Experimental ::
+    * Indicate if this result is inserted into database. */
+  @Experimental
+  val noSave: Boolean = false 
 }
diff --git a/core/src/main/scala/io/prediction/core/BasePreparator.scala b/core/src/main/scala/io/prediction/core/BasePreparator.scala
index 03af7f1..209ab43 100644
--- a/core/src/main/scala/io/prediction/core/BasePreparator.scala
+++ b/core/src/main/scala/io/prediction/core/BasePreparator.scala
@@ -15,13 +15,11 @@
 
 package io.prediction.core
 
-import org.apache.spark.rdd.RDD
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import scala.reflect._
 
 // Probably will add an extra parameter for ad hoc json formatter.
 abstract class BasePreparator[TD, PD]
   extends AbstractDoer {
+  private[prediction]
   def prepareBase(sc: SparkContext, td: TD): PD
 }
diff --git a/core/src/main/scala/io/prediction/core/BaseServing.scala b/core/src/main/scala/io/prediction/core/BaseServing.scala
index 6086219..c4f0317 100644
--- a/core/src/main/scala/io/prediction/core/BaseServing.scala
+++ b/core/src/main/scala/io/prediction/core/BaseServing.scala
@@ -15,12 +15,8 @@
 
 package io.prediction.core
 
-import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import scala.reflect._
-
 abstract class BaseServing[Q, P]
   extends AbstractDoer {
+  private[prediction]
   def serveBase(q: Q, ps: Seq[P]): P
 }
diff --git a/core/src/main/scala/io/prediction/package.scala b/core/src/main/scala/io/prediction/package.scala
new file mode 100644
index 0000000..93db4a7
--- /dev/null
+++ b/core/src/main/scala/io/prediction/package.scala
@@ -0,0 +1,158 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction
+
+/** Provides building blocks for writing a complete prediction engine
+  * consisting of DataSource, Preparator, Algorithm, Serving, and Metrics.
+  *
+  * == Start Building an Engine ==
+  * The starting point of a prediction engine is the [[Engine]] class.
+  *
+  * == The DASE Paradigm ==
+  * The building blocks together form the DASE paradigm. Learn more about DASE
+  * [[http://docs.prediction.io/customize/ here]].
+  *
+  * == Types of Building Blocks ==
+  * Depending on the problem you are solving, you would need to pick appropriate
+  * flavors of building blocks.
+  *
+  * === Engines ===
+  * There are 3 typical engine configurations:
+  *
+  *  1. [[PDataSource]], [[PPreparator]], [[P2LAlgorithm]], [[LServing]]
+  *  1. [[PDataSource]], [[PPreparator]], [[PAlgorithm]], [[LServing]]
+  *  1. [[LDataSource]], [[LPreparator]], [[LAlgorithm]], [[LServing]]
+  *
+  * In both configurations 1 and 2, data is sourced and prepared in a
+  * parallelized fashion, with data type as RDD.
+  *
+  * The difference between configurations 1 and 2 come at the algorithm stage.
+  * In configuration 1, the algorithm operates on potentially large data as RDDs
+  * in the Spark cluster, and eventually outputs a model that is small enough to
+  * fit in a single machine.
+  *
+  * On the other hand, configuration 2 outputs a model that is potentially too
+  * large to fit in a single machine, and must reside in the Spark cluster as
+  * RDD(s).
+  *
+  * With configuration 1 ([[P2LAlgorithm]]), PredictionIO will automatically
+  * try to persist the model to local disk or HDFS if the model is serializable.
+  *
+  * With configuration 2 ([[PAlgorithm]]), PredictionIO will not automatically
+  * try to persist the model, unless the model implements the [[PersistentModel]]
+  * trait.
+  *
+  * In special circumstances where both the data and the model are small,
+  * configuration 3 may be used. Beware that RDDs cannot be used with
+  * configuration 3.
+  *
+  * === Data Source ===
+  * [[PDataSource]] is probably the most used data source base class with the
+  * ability to process RDD-based data. [[LDataSource]] '''cannot''' handle
+  * RDD-based data. Use only when you have a special requirement.
+  *
+  * === Preparator ===
+  * With [[PDataSource]], you must pick [[PPreparator]]. The same applies to
+  * [[LDataSource]] and [[LPreparator]].
+  *
+  * === Algorithm ===
+  * The workhorse of the engine comes in 3 different flavors.
+  *
+  * ==== P2LAlgorithm ====
+  * Produces a model that is small enough to fit in a single machine from
+  * [[PDataSource]] and [[PPreparator]]. The model '''cannot''' contain any RDD.
+  * If the produced model is serializable, PredictionIO will try to
+  * automatically persist it. In addition, [[P2LAlgorithm#batchPredict]] is
+  * already implemented for [[Evaluation]] purpose.
+  *
+  * ==== PAlgorithm ====
+  * Produces a model that could contain RDDs from [[PDataSource]] and
+  * [[PPreparator]]. PredictionIO will not try to persist it automatically
+  * unless the model implements [[PersistentModel]]. [[PAlgorithm.batchPredict]]
+  * must be implemented for [[Evaluation]].
+  *
+  * ==== LAlgorithm ====
+  * Produces a model that is small enough to fit in a single machine from
+  * [[LDataSource]] and [[LPreparator]]. The model '''cannot''' contain any RDD.
+  * If the produced model is serializable, PredictionIO will try to
+  * automatically persist it. In addition, [[LAlgorithm#batchPredict]] is
+  * already implemented for [[Evaluation]] purpose.
+  *
+  * === Serving ===
+  * The serving component comes with only 1 flavor--[[LServing]]. At the serving
+  * stage, it is assumed that the result being served is already at a human-
+  * consumable size.
+  */
+package object controller {
+
+  class SerializableClass() extends Serializable
+
+  /** Empty data source parameters.
+    * @group Helper
+    */
+  type EmptyDataSourceParams = EmptyParams
+
+  /** Empty data parameters.
+    * @group Helper
+    */
+  type EmptyDataParams = EmptyParams
+  
+  /** Empty evaluation info.
+    * @group Helper
+    */
+  type EmptyEvaluationInfo = SerializableClass
+
+  /** Empty preparator parameters.
+    * @group Helper
+    */
+  type EmptyPreparatorParams = EmptyParams
+
+  /** Empty algorithm parameters.
+    * @group Helper
+    */
+  type EmptyAlgorithmParams = EmptyParams
+
+  /** Empty serving parameters.
+    * @group Helper
+    */
+  type EmptyServingParams = EmptyParams
+
+  /** Empty metrics parameters.
+    * @group Helper
+    */
+  type EmptyMetricsParams = EmptyParams
+
+  /** Empty training data.
+    * @group Helper
+    */
+  type EmptyTrainingData = SerializableClass
+
+  /** Empty prepared data.
+    * @group Helper
+    */
+  type EmptyPreparedData = SerializableClass
+
+  /** Empty model.
+    * @group Helper
+    */
+  type EmptyModel = SerializableClass
+
+  /** Empty actual result.
+    * @group Helper
+    */
+  type EmptyActualResult = SerializableClass
+
+}
diff --git a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala b/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
index 40edd98..d444224 100644
--- a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
@@ -15,12 +15,8 @@
 
 package io.prediction.workflow
 
-import com.github.nscala_time.time.Imports.DateTime
-import com.twitter.chill.KryoInjection
-import grizzled.slf4j.Logger
 import io.prediction.controller.EngineParams
 import io.prediction.controller.Evaluation
-import io.prediction.controller.WorkflowParams
 import io.prediction.core.BaseEngine
 import io.prediction.core.BaseEvaluator
 import io.prediction.core.BaseEvaluatorResult
@@ -29,6 +25,9 @@
 import io.prediction.data.storage.Model
 import io.prediction.data.storage.Storage
 
+import com.github.nscala_time.time.Imports.DateTime
+import grizzled.slf4j.Logger
+
 import scala.language.existentials
 
 /** CoreWorkflow handles PredictionIO metadata and environment variables of
@@ -67,10 +66,12 @@
 
       val instanceId = Storage.getMetaDataEngineInstances
 
+      val kryo = KryoInstantiator.newKryoInjection
+      
       logger.info("Inserting persistent model")
       Storage.getModelDataModels.insert(Model(
         id = engineInstance.id,
-        models = KryoInjection(models)))
+        models = kryo(models)))
 
       logger.info("Updating engine instance")
       val engineInstances = Storage.getMetaDataEngineInstances
@@ -123,18 +124,23 @@
       engineParamsList,
       evaluator,
       params)
-    val evaluatedEvaluationInstance = evaluationInstance.copy(
-      status = "EVALCOMPLETED",
-      id = evaluationInstanceId,
-      endTime = DateTime.now,
-      evaluatorResults = evaluatorResult.toOneLiner,
-      evaluatorResultsHTML = evaluatorResult.toHTML,
-      evaluatorResultsJSON = evaluatorResult.toJSON
-    )
 
-    logger.info(s"Updating evaluation instance with result: $evaluatorResult")
+    if (evaluatorResult.noSave) { 
+      logger.info(s"This evaluation result is not inserted into database: $evaluatorResult")
+    } else {
+      val evaluatedEvaluationInstance = evaluationInstance.copy(
+        status = "EVALCOMPLETED",
+        id = evaluationInstanceId,
+        endTime = DateTime.now,
+        evaluatorResults = evaluatorResult.toOneLiner,
+        evaluatorResultsHTML = evaluatorResult.toHTML,
+        evaluatorResultsJSON = evaluatorResult.toJSON
+      )
 
-    evaluationInstances.update(evaluatedEvaluationInstance)
+      logger.info(s"Updating evaluation instance with result: $evaluatorResult")
+
+      evaluationInstances.update(evaluatedEvaluationInstance)
+    }
 
     logger.debug("Stop SparkContext")
 
diff --git a/core/src/main/scala/io/prediction/workflow/CreateServer.scala b/core/src/main/scala/io/prediction/workflow/CreateServer.scala
index 7745f30..b72f718 100644
--- a/core/src/main/scala/io/prediction/workflow/CreateServer.scala
+++ b/core/src/main/scala/io/prediction/workflow/CreateServer.scala
@@ -15,6 +15,17 @@
 
 package io.prediction.workflow
 
+import io.prediction.controller.Engine
+import io.prediction.controller.Params
+import io.prediction.controller.Utils
+import io.prediction.controller.WithPrId
+import io.prediction.core.BaseAlgorithm
+import io.prediction.core.BaseServing
+import io.prediction.core.Doer
+import io.prediction.data.storage.EngineInstance
+import io.prediction.data.storage.EngineManifest
+import io.prediction.data.storage.Storage
+
 import akka.actor._
 import akka.event.Logging
 import akka.io.IO
@@ -22,24 +33,12 @@
 import akka.util.Timeout
 import com.github.nscala_time.time.Imports.DateTime
 import com.google.gson.Gson
+import com.twitter.bijection.Injection
 import com.twitter.chill.KryoBase
 import com.twitter.chill.KryoInjection
 import com.twitter.chill.ScalaKryoInstantiator
+import de.javakaffee.kryoserializers.SynchronizedCollectionsSerializer
 import grizzled.slf4j.Logging
-import io.prediction.controller.Engine
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import io.prediction.controller.WithPrId
-import io.prediction.controller.WorkflowParams
-import io.prediction.controller.java.LJavaAlgorithm
-import io.prediction.controller.java.LJavaServing
-import io.prediction.controller.java.PJavaAlgorithm
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseServing
-import io.prediction.core.Doer
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EngineManifest
-import io.prediction.data.storage.Storage
 import org.json4s._
 import org.json4s.native.JsonMethods._
 import org.json4s.native.Serialization.write
@@ -57,26 +56,33 @@
 import scala.util.Random
 import scala.util.Success
 
-import java.io.PrintWriter
-import java.io.StringWriter
+import java.io.{Serializable, PrintWriter, StringWriter}
 
 class KryoInstantiator(classLoader: ClassLoader) extends ScalaKryoInstantiator {
   override def newKryo(): KryoBase = {
     val kryo = super.newKryo()
     kryo.setClassLoader(classLoader)
+    SynchronizedCollectionsSerializer.registerSerializers(kryo)
     kryo
   }
 }
 
+object KryoInstantiator extends Serializable {
+  def newKryoInjection : Injection[Any, Array[Byte]] = {
+    val kryoInstantiator = new KryoInstantiator(getClass.getClassLoader)
+    KryoInjection.instance(kryoInstantiator)
+  }
+}
+
 case class ServerConfig(
   batch: String = "",
   engineInstanceId: String = "",
   engineId: Option[String] = None,
   engineVersion: Option[String] = None,
-  ip: String = "localhost",
+  ip: String = "0.0.0.0",
   port: Int = 8000,
   feedback: Boolean = false,
-  eventServerIp: String = "localhost",
+  eventServerIp: String = "0.0.0.0",
   eventServerPort: Int = 7070,
   accessKey: Option[String] = None,
   logUrl: Option[String] = None,
@@ -110,7 +116,7 @@
       } text("Engine version.")
       opt[String]("ip") action { (x, c) =>
         c.copy(ip = x)
-      } text("IP to bind to (default: localhost).")
+      }
       opt[Int]("port") action { (x, c) =>
         c.copy(port = x)
       } text("Port to bind to (default: 8000).")
@@ -122,7 +128,7 @@
       } text("Enable feedback loop to event server.")
       opt[String]("event-server-ip") action { (x, c) =>
         c.copy(eventServerIp = x)
-      } text("Event server IP. Default: localhost")
+      }
       opt[Int]("event-server-port") action { (x, c) =>
         c.copy(eventServerPort = x)
       } text("Event server port. Default: 7070")
@@ -190,8 +196,7 @@
 
     val engineParams = engine.engineInstanceToEngineParams(engineInstance)
 
-    val kryoInstantiator = new KryoInstantiator(getClass.getClassLoader)
-    val kryo = KryoInjection.instance(kryoInstantiator)
+    val kryo = KryoInstantiator.newKryoInjection
 
     val modelsFromEngineInstance =
       kryo.invert(modeldata.get(engineInstance.id).get.models).get.
@@ -387,7 +392,6 @@
   val serverStartTime = DateTime.now
   lazy val gson = new Gson
   val log = Logging(context.system, this)
-  val (javaAlgorithms, scalaAlgorithms) = algorithms.partition(_.isJava)
 
   var requestCount: Int = 0
   var avgServingSec: Double = 0.0
@@ -463,37 +467,16 @@
               val servingStartTime = DateTime.now
 
               val queryTime = DateTime.now
-              val javaQuery = javaAlgorithms.headOption map { alg =>
-                val queryClass = if (
-                  alg.isInstanceOf[LJavaAlgorithm[_, _, Q, P]]) {
-                  alg.asInstanceOf[LJavaAlgorithm[_, _, Q, P]].queryClass
-                } else {
-                  alg.asInstanceOf[PJavaAlgorithm[_, _, Q, P]].queryClass
-                }
-                gson.fromJson(queryString, queryClass)
-              }
-              val scalaQuery = scalaAlgorithms.headOption map { alg =>
-                Extraction.extract(parse(queryString))(
-                  alg.querySerializer, alg.queryManifest)
-              }
+              val query = Extraction.extract(parse(queryString))(
+                algorithms.head.querySerializer, algorithms.head.queryManifest())
               val predictions = algorithms.zipWithIndex.map { case (a, ai) =>
-                if (a.isJava) {
-                  a.predictBase(models(ai), javaQuery.get)
-                } else {
-                  a.predictBase(models(ai), scalaQuery.get)
-                }
+                a.predictBase(models(ai), query)
               }
-              val r = if (serving.isInstanceOf[LJavaServing[Q, P]]) {
-                val prediction = serving.serveBase(javaQuery.get, predictions)
-                // parse to Json4s JObject for later merging with prId
-                (parse(gson.toJson(prediction)), prediction, javaQuery.get)
-              } else {
-                val prediction = serving.serveBase(scalaQuery.get, predictions)
-                (Extraction.decompose(prediction)(
-                  scalaAlgorithms.head.querySerializer),
-                  prediction,
-                  scalaQuery.get)
-              }
+              val prediction = serving.serveBase(query, predictions)
+              val r = (Extraction.decompose(prediction)(
+                algorithms.head.querySerializer),
+                prediction,
+                query)
               /** Handle feedback to Event Server
                 * Send the following back to the Event Server
                 * - appId
@@ -504,25 +487,27 @@
                 */
               val result = if (feedbackEnabled) {
                 implicit val formats =
-                  if (!scalaAlgorithms.isEmpty) {
-                    scalaAlgorithms.head.querySerializer
-                  } else {
+                  algorithms.headOption map { alg =>
+                    alg.querySerializer
+                  } getOrElse {
                     Utils.json4sDefaultFormats
                   }
                 // val genPrId = Random.alphanumeric.take(64).mkString
                 def genPrId: String = Random.alphanumeric.take(64).mkString
-                val newPrId = if (r._2.isInstanceOf[WithPrId]) {
-                  val org = r._2.asInstanceOf[WithPrId].prId
-                  if (org.isEmpty) genPrId else org
-                } else genPrId
+                val newPrId = r._2 match {
+                  case id: WithPrId =>
+                    val org = id.prId
+                    if (org.isEmpty) genPrId else org
+                  case _ => genPrId
+                }
 
                 // also save Query's prId as prId of this pio_pr predict events
                 val queryPrId =
-                  if (r._3.isInstanceOf[WithPrId]) {
-                    Map("prId" ->
-                      r._3.asInstanceOf[WithPrId].prId)
-                  } else {
-                    Map()
+                  r._3 match {
+                    case id: WithPrId =>
+                      Map("prId" -> id.prId)
+                    case _ =>
+                      Map()
                   }
                 val data = Map(
                   // "appId" -> dataSourceParams.asInstanceOf[ParamsWithAppId].appId,
diff --git a/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala b/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala
index 5932598..519ce1e 100644
--- a/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala
@@ -15,17 +15,16 @@
 
 package io.prediction.workflow
 
-import com.github.nscala_time.time.Imports._
-import com.google.common.io.ByteStreams
-import grizzled.slf4j.Logging
 import io.prediction.controller.Engine
 import io.prediction.controller.Utils
-import io.prediction.controller.Workflow
-import io.prediction.controller.WorkflowParams
 import io.prediction.core.BaseEngine
 import io.prediction.data.storage.EngineInstance
 import io.prediction.data.storage.EvaluationInstance
 import io.prediction.data.storage.Storage
+
+import com.github.nscala_time.time.Imports._
+import com.google.common.io.ByteStreams
+import grizzled.slf4j.Logging
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.FileSystem
 import org.apache.hadoop.fs.Path
diff --git a/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala b/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala
index 05f76c2..ed70d87 100644
--- a/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala
@@ -15,55 +15,16 @@
 
 package io.prediction.workflow
 
-import io.prediction.controller.EmptyParams
-import io.prediction.controller.Engine
 import io.prediction.controller.EngineParams
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.LAlgorithm
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Metric
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import io.prediction.controller.NiceRendering
-import io.prediction.controller.SanityCheck
-import io.prediction.controller.WorkflowParams
 import io.prediction.controller.Evaluation
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseDataSource
 import io.prediction.core.BaseEvaluator
 import io.prediction.core.BaseEvaluatorResult
-import io.prediction.core.BasePreparator
-import io.prediction.core.BaseServing
 import io.prediction.core.BaseEngine
-import io.prediction.core.Doer
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EngineInstances
-import io.prediction.data.storage.Model
-import io.prediction.data.storage.Storage
 
-import com.github.nscala_time.time.Imports.DateTime
-import com.twitter.chill.KryoInjection
-import grizzled.slf4j.{ Logger, Logging }
+import grizzled.slf4j.Logger
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-import org.json4s._
-import org.json4s.native.Serialization.write
 
-import scala.collection.JavaConversions._
 import scala.language.existentials
-import scala.reflect.ClassTag
-import scala.reflect.Manifest
-
-import java.io.FileOutputStream
-import java.io.ObjectOutputStream
-import java.io.FileInputStream
-import java.io.ObjectInputStream
-import java.lang.{ Iterable => JIterable }
-import java.util.{ HashMap => JHashMap, Map => JMap }
-import org.json4s._
-import org.json4s.native.Serialization.write
 
 object EvaluationWorkflow {
   @transient lazy val logger = Logger[this.type]
diff --git a/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala b/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
new file mode 100644
index 0000000..4d6e166
--- /dev/null
+++ b/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
@@ -0,0 +1,91 @@
+package io.prediction.workflow
+
+import io.prediction.annotation.Experimental   
+// FIXME(yipjustin): Remove wildcard import.
+import io.prediction.core._
+import io.prediction.controller._
+
+import grizzled.slf4j.Logger
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+
+@Experimental
+private[prediction] class FakeEngine
+extends BaseEngine[EmptyParams, EmptyParams, EmptyParams, EmptyParams] {
+  @transient lazy val logger = Logger[this.type]
+
+  def train(
+    sc: SparkContext, 
+    engineParams: EngineParams,
+    engineInstanceId: String,
+    params: WorkflowParams): Seq[Any] = {
+    throw new StopAfterReadInterruption()
+  }
+
+  def eval(
+    sc: SparkContext, 
+    engineParams: EngineParams,
+    params: WorkflowParams)
+  : Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])] = {
+    return Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])]()
+  }
+}
+
+@Experimental
+private[prediction] class FakeRunner(f: (SparkContext => Unit))
+    extends BaseEvaluator[EmptyParams, EmptyParams, EmptyParams, EmptyParams,
+      FakeEvalResult] {
+  @transient private lazy val logger = Logger[this.type]
+  def evaluateBase(
+    sc: SparkContext,
+    evaluation: Evaluation,
+    engineEvalDataSet: 
+        Seq[(EngineParams, Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])])],
+    params: WorkflowParams): FakeEvalResult = {
+    f(sc)
+    FakeEvalResult()
+  }
+}
+
+@Experimental
+private[prediction] case class FakeEvalResult() extends BaseEvaluatorResult {
+  override val noSave: Boolean = true 
+}
+
+/** FakeRun allows user to implement custom function under the exact enviroment
+  * as other PredictionIO workflow. 
+  *
+  * Useful for developing new features. Only need to extend this trait and 
+  * implement a function: (SparkContext => Unit). For example, the code below 
+  * can be run with `pio eval HelloWorld`.
+  *
+  * {{{
+  * object HelloWorld extends FakeRun {
+  *   // func defines the function pio runs, must have signature (SparkContext => Unit).
+  *   func = f
+  * 
+  *   def f(sc: SparkContext): Unit {
+  *     val logger = Logger[this.type]
+  *     logger.info("HelloWorld")
+  *   }
+  * }
+  * }}} 
+  * 
+  */
+@Experimental
+trait FakeRun extends Evaluation with EngineParamsGenerator {
+  private[this] var _runner: FakeRunner = _
+
+  def runner: FakeRunner = _runner
+  def runner_=(r: FakeRunner) { 
+    engineEvaluator = (new FakeEngine(), r) 
+    engineParamsList = Seq(new EngineParams())
+  }
+
+  def func: (SparkContext => Unit) = { (sc: SparkContext) => Unit }
+  def func_=(f: SparkContext => Unit) {
+    runner = new FakeRunner(f)
+  }
+}
diff --git a/core/src/main/scala/io/prediction/controller/Workflow.scala b/core/src/main/scala/io/prediction/workflow/Workflow.scala
similarity index 79%
rename from core/src/main/scala/io/prediction/controller/Workflow.scala
rename to core/src/main/scala/io/prediction/workflow/Workflow.scala
index 96cd070..5b3390e 100644
--- a/core/src/main/scala/io/prediction/controller/Workflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/Workflow.scala
@@ -13,41 +13,16 @@
   * limitations under the License.
   */
 
-package io.prediction.controller
+package io.prediction.workflow
 
 import io.prediction.annotation.Experimental
+import io.prediction.controller.EngineParams
+import io.prediction.controller.EngineParamsGenerator
+import io.prediction.controller.Evaluation
 import io.prediction.core.BaseEngine
 import io.prediction.core.BaseEvaluator
 import io.prediction.core.BaseEvaluatorResult
 import io.prediction.data.storage.EvaluationInstance
-import io.prediction.workflow.CoreWorkflow
-import io.prediction.workflow.WorkflowUtils
-
-/** Workflow parameters.
-  *
-  * @param batch Batch label of the run.
-  * @param verbose Verbosity level.
-  * @param saveModel Controls whether trained models are persisted.
-  * @param sparkEnv Spark properties that will be set in SparkConf.setAll().
-  * @param skipSanityCheck Skips all data sanity check.
-  * @param stopAfterRead Stops workflow after reading from data source.
-  * @param stopAfterPrepare Stops workflow after data preparation.
-  * @group Workflow
-  */
-case class WorkflowParams(
-  batch: String = "",
-  verbose: Int = 2,
-  saveModel: Boolean = true,
-  sparkEnv: Map[String, String] =
-    Map[String, String]("spark.executor.extraClassPath" -> "."),
-  skipSanityCheck: Boolean = false,
-  stopAfterRead: Boolean = false,
-  stopAfterPrepare: Boolean = false) {
-  // Temporary workaround for WorkflowParamsBuilder for Java. It doesn't support
-  // custom spark environment yet.
-  def this(batch: String, verbose: Int, saveModel: Boolean)
-  = this(batch, verbose, saveModel, Map[String, String]())
-}
 
 /** Collection of workflow creation methods.
   * @group Workflow
diff --git a/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala b/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala
index 52d31e9..264c757 100644
--- a/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala
+++ b/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala
@@ -15,64 +15,11 @@
 
 package io.prediction.workflow
 
-import io.prediction.controller.EmptyParams
-import io.prediction.controller.Engine
-import io.prediction.controller.EngineParams
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.LAlgorithm
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Metric
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import io.prediction.controller.NiceRendering
-import io.prediction.controller.SanityCheck
-/*
-import io.prediction.controller.java.LJavaDataSource
-import io.prediction.controller.java.LJavaPreparator
-import io.prediction.controller.java.LJavaAlgorithm
-import io.prediction.controller.java.LJavaServing
-import io.prediction.controller.java.JavaEvaluator
-import io.prediction.controller.java.JavaUtils
-import io.prediction.controller.java.JavaEngine
-import io.prediction.controller.java.PJavaAlgorithm
-*/
-import io.prediction.controller.WorkflowParams
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BaseEvaluator
-import io.prediction.core.BasePreparator
-import io.prediction.core.BaseServing
-import io.prediction.core.BaseEngine
-import io.prediction.core.Doer
-// import io.prediction.core.LModelAlgorithm
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EngineInstances
-import io.prediction.data.storage.Model
-import io.prediction.data.storage.Storage
-
-import com.github.nscala_time.time.Imports.DateTime
-import com.twitter.chill.KryoInjection
-import grizzled.slf4j.{ Logger, Logging }
+import grizzled.slf4j.Logging
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
 import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-import org.json4s._
-import org.json4s.native.Serialization.write
-import org.json4s.native.Serialization.writePretty
 
-
-import scala.collection.JavaConversions._
 import scala.language.existentials
-import scala.reflect.ClassTag
-import scala.reflect.Manifest
-
-import java.io.FileOutputStream
-import java.io.ObjectOutputStream
-import java.io.FileInputStream
-import java.io.ObjectInputStream
-import java.lang.{ Iterable => JIterable }
-import java.util.{ HashMap => JHashMap, Map => JMap }
 
 // FIXME: move to better location.
 object WorkflowContext extends Logging {
diff --git a/core/src/main/scala/io/prediction/workflow/WorkflowParams.scala b/core/src/main/scala/io/prediction/workflow/WorkflowParams.scala
new file mode 100644
index 0000000..88ec54e
--- /dev/null
+++ b/core/src/main/scala/io/prediction/workflow/WorkflowParams.scala
@@ -0,0 +1,42 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.workflow
+
+/** Workflow parameters.
+  *
+  * @param batch Batch label of the run.
+  * @param verbose Verbosity level.
+  * @param saveModel Controls whether trained models are persisted.
+  * @param sparkEnv Spark properties that will be set in SparkConf.setAll().
+  * @param skipSanityCheck Skips all data sanity check.
+  * @param stopAfterRead Stops workflow after reading from data source.
+  * @param stopAfterPrepare Stops workflow after data preparation.
+  * @group Workflow
+  */
+case class WorkflowParams(
+  batch: String = "",
+  verbose: Int = 2,
+  saveModel: Boolean = true,
+  sparkEnv: Map[String, String] =
+    Map[String, String]("spark.executor.extraClassPath" -> "."),
+  skipSanityCheck: Boolean = false,
+  stopAfterRead: Boolean = false,
+  stopAfterPrepare: Boolean = false) {
+  // Temporary workaround for WorkflowParamsBuilder for Java. It doesn't support
+  // custom spark environment yet.
+  def this(batch: String, verbose: Int, saveModel: Boolean)
+  = this(batch, verbose, saveModel, Map[String, String]())
+}
diff --git a/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala b/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
index 2c7f9db..e94f785 100644
--- a/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
+++ b/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
@@ -15,35 +15,34 @@
 
 package io.prediction.workflow
 
+import java.io.File
+import java.io.FileNotFoundException
+
+import io.prediction.controller.EmptyParams
+import io.prediction.controller.EngineFactory
 import io.prediction.controller.EngineParamsGenerator
 import io.prediction.controller.Evaluation
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.controller.EmptyParams
 import io.prediction.controller.Params
+import io.prediction.controller.PersistentModelLoader
 import io.prediction.controller.Utils
 import io.prediction.core.BuildInfo
 
 import com.google.gson.Gson
 import com.google.gson.JsonSyntaxException
 import grizzled.slf4j.Logging
+import org.apache.log4j.Level
+import org.apache.log4j.LogManager
 import org.apache.spark.SparkContext
-import org.json4s.JValue
+import org.apache.spark.api.java.JavaRDDLike
+import org.apache.spark.rdd.RDD
 import org.json4s.JsonAST.JValue
 import org.json4s._
 import org.json4s.native.JsonMethods._
-import org.apache.log4j.Level
-import org.apache.log4j.LogManager
-import org.apache.spark.api.java.JavaRDDLike
-import org.apache.spark.rdd.RDD
 
 import scala.io.Source
 import scala.language.existentials
 import scala.reflect.runtime.universe
 
-import java.io.File
-import java.io.FileNotFoundException
-
 /** Collection of reusable workflow related utilities. */
 object WorkflowUtils extends Logging {
   @transient private lazy val gson = new Gson
@@ -58,20 +57,20 @@
     * @throws NoSuchMethodException
     *         Thrown when engine factory's apply() method is not implemented.
     */
-  def getEngine(engine: String, cl: ClassLoader): (EngineLanguage.Value, IEngineFactory) = {
+  def getEngine(engine: String, cl: ClassLoader): (EngineLanguage.Value, EngineFactory) = {
     val runtimeMirror = universe.runtimeMirror(cl)
     val engineModule = runtimeMirror.staticModule(engine)
     val engineObject = runtimeMirror.reflectModule(engineModule)
     try {
       (
         EngineLanguage.Scala,
-        engineObject.instance.asInstanceOf[IEngineFactory]
+        engineObject.instance.asInstanceOf[EngineFactory]
       )
     } catch {
       case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
         (
           EngineLanguage.Java,
-          Class.forName(engine).newInstance.asInstanceOf[IEngineFactory]
+          Class.forName(engine).newInstance.asInstanceOf[EngineFactory]
         )
       }
     }
@@ -364,7 +363,7 @@
     val pmmModule = runtimeMirror.staticModule(pmm.className)
     val pmmObject = runtimeMirror.reflectModule(pmmModule)
     try {
-      pmmObject.instance.asInstanceOf[IPersistentModelLoader[AP, M]](
+      pmmObject.instance.asInstanceOf[PersistentModelLoader[AP, M]](
         runId,
         params,
         sc)
diff --git a/core/src/test/scala/io/prediction/controller/EngineTest.scala b/core/src/test/scala/io/prediction/controller/EngineTest.scala
index cae0fbb..42fa218 100644
--- a/core/src/test/scala/io/prediction/controller/EngineTest.scala
+++ b/core/src/test/scala/io/prediction/controller/EngineTest.scala
@@ -1,27 +1,18 @@
 package io.prediction.controller
 
+import io.prediction.workflow.PersistentModelManifest
+import io.prediction.workflow.SharedSparkContext
+import io.prediction.workflow.StopAfterPrepareInterruption
+import io.prediction.workflow.StopAfterReadInterruption
+
+import grizzled.slf4j.Logger
+import io.prediction.workflow.WorkflowParams
+import org.apache.spark.rdd.RDD
+import org.scalatest.Inspectors._
+import org.scalatest.Matchers._
 import org.scalatest.FunSuite
 import org.scalatest.Inside
-import org.scalatest.Matchers._
-import org.scalatest.Inspectors._
 
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-
-import io.prediction.controller._
-import io.prediction.core._
-import io.prediction.workflow.SharedSparkContext
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.StopAfterReadInterruption
-import io.prediction.workflow.StopAfterPrepareInterruption
-import grizzled.slf4j.{ Logger, Logging }
-
-import _root_.java.lang.Thread
-
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.Suite
 import scala.util.Random
 
 class EngineSuite
diff --git a/core/src/test/scala/io/prediction/controller/EvaluationTest.scala b/core/src/test/scala/io/prediction/controller/EvaluationTest.scala
index 685bc12..5dc4c86 100644
--- a/core/src/test/scala/io/prediction/controller/EvaluationTest.scala
+++ b/core/src/test/scala/io/prediction/controller/EvaluationTest.scala
@@ -1,26 +1,14 @@
 package io.prediction.controller
 
+import io.prediction.workflow.SharedSparkContext
+
 import org.scalatest.FunSuite
 import org.scalatest.Inside
 import org.scalatest.Matchers._
-import org.scalatest.Inspectors._
 
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
 import org.apache.spark.rdd.RDD
 
-import _root_.java.lang.Thread
-
-import io.prediction.controller._
-import io.prediction.core._
-import io.prediction.workflow.SharedSparkContext
-import grizzled.slf4j.{ Logger, Logging }
-
-
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.Suite
-
 object EvaluationSuite {
   import io.prediction.controller.TestEvaluator._
 
diff --git a/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala b/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala
index 623c464..c57bd03 100644
--- a/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala
+++ b/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala
@@ -1,31 +1,16 @@
 package io.prediction.controller
 
-import org.scalatest.FunSuite
-import org.scalatest.Inside
-import org.scalatest.Matchers._
-import org.scalatest.Inspectors._
+import io.prediction.core._
+import io.prediction.workflow.WorkflowParams
 
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
 import org.apache.spark.rdd.RDD
 
-import _root_.java.lang.Thread
-
-import io.prediction.controller._
-import io.prediction.core._
-import io.prediction.workflow.SharedSparkContext
-import grizzled.slf4j.{ Logger, Logging }
-
-
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.Suite
-
 object TestEvaluator {
-  case class EvalInfo(val id: Int, val ex: Int) 
-  case class Query(val id: Int, val ex: Int, val qx: Int) 
-  case class Prediction(val id: Int, val ex: Int, val qx: Int)
-  case class Actual(val id: Int, val ex: Int, val qx: Int)
+  case class EvalInfo(id: Int, ex: Int)
+  case class Query(id: Int, ex: Int, qx: Int)
+  case class Prediction(id: Int, ex: Int, qx: Int)
+  case class Actual(id: Int, ex: Int, qx: Int)
 
   class FakeEngine(val id: Int, val en: Int, val qn: Int)
   extends BaseEngine[EvalInfo, Query, Prediction, Actual] {
diff --git a/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala b/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala
index 94ad97b..d196854 100644
--- a/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala
+++ b/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala
@@ -1,29 +1,14 @@
 package io.prediction.controller
 
+import io.prediction.workflow.WorkflowParams
 import org.scalatest.FunSuite
 import org.scalatest.Inside
 import org.scalatest.Matchers._
 import org.scalatest.Inspectors._
 
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-
-import io.prediction.core._
 import io.prediction.workflow.SharedSparkContext
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.StopAfterReadInterruption
-import io.prediction.workflow.StopAfterPrepareInterruption
-import grizzled.slf4j.{ Logger, Logging }
 
-import _root_.java.lang.Thread
-
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.Suite
-import scala.util.Random
-
-class FastEngineDevSuite
+class FastEngineSuite
 extends FunSuite with Inside with SharedSparkContext {
   import io.prediction.controller.Engine0._
   
diff --git a/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala b/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala
new file mode 100644
index 0000000..71fcb88
--- /dev/null
+++ b/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala
@@ -0,0 +1,52 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.workflow.SharedSparkContext
+import io.prediction.workflow.WorkflowParams
+import org.scalatest.FunSuite
+
+object MetricEvaluatorSuite {
+  case class Metric0() extends SumMetric[EmptyParams, Int, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Int = q
+  }
+
+  object Evaluation0 extends Evaluation {}
+}
+
+class MetricEvaluatorDevSuite extends FunSuite with SharedSparkContext {
+  import io.prediction.controller.MetricEvaluatorSuite._
+
+  test("a") {
+    val metricEvaluator = MetricEvaluator(
+      Metric0(),
+      Seq(Metric0(), Metric0())
+    )
+ 
+    val engineEvalDataSet = Seq(
+      (EngineParams(), Seq(
+        (EmptyParams(), sc.parallelize(Seq((1,0,0), (2,0,0)))))),
+      (EngineParams(), Seq(
+        (EmptyParams(), sc.parallelize(Seq((1,0,0), (2,0,0)))))))
+
+    val r = metricEvaluator.evaluateBase(
+      sc,
+      Evaluation0,
+      engineEvalDataSet,
+      WorkflowParams())
+
+  }
+}
diff --git a/core/src/test/scala/io/prediction/controller/MetricTest.scala b/core/src/test/scala/io/prediction/controller/MetricTest.scala
new file mode 100644
index 0000000..b846548
--- /dev/null
+++ b/core/src/test/scala/io/prediction/controller/MetricTest.scala
@@ -0,0 +1,143 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.controller
+
+import io.prediction.workflow.SharedSparkContext
+
+import grizzled.slf4j.Logger
+import org.scalatest.Matchers._
+import org.scalatest.FunSuite
+import org.scalatest.Inside
+
+object MetricDevSuite {
+  class QIntSumMetric extends SumMetric[EmptyParams, Int, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Int = q
+  }
+  
+  class QDoubleSumMetric extends SumMetric[EmptyParams, Int, Int, Int, Double] {
+    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
+  }
+  
+  class QAverageMetric extends AverageMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
+  }
+  
+  class QOptionAverageMetric extends OptionAverageMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Option[Double] = {
+      if (q < 0) { None } else { Some(q.toDouble) }
+    }
+  }
+  
+  class QStdevMetric extends StdevMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
+  }
+  
+  class QOptionStdevMetric extends OptionStdevMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Option[Double] = {
+      if (q < 0) { None } else { Some(q.toDouble) }
+    }
+  }
+  
+}
+
+class MetricDevSuite
+extends FunSuite with Inside with SharedSparkContext {
+  @transient lazy val logger = Logger[this.type] 
+  
+  test("Average Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QAverageMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe (21.0 / 6)
+  }
+  
+  test("Option Average Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((-4, 0, 0), (-5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QOptionAverageMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe (12.0 / 4)
+  }
+  
+  test("Stdev Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (1, 0, 0), (1, 0, 0), (1, 0, 0))
+    val qpaSeq1 = Seq((5, 0, 0), (5, 0, 0), (5, 0, 0), (5, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QStdevMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 2.0
+  }
+  
+  test("Option Stdev Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (1, 0, 0), (1, 0, 0), (1, 0, 0))
+    val qpaSeq1 = Seq((5, 0, 0), (5, 0, 0), (5, 0, 0), (5, 0, 0), (-5, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QOptionStdevMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 2.0
+  }
+
+  test("Sum Metric [Int]") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QIntSumMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 21
+  }
+
+  test("Sum Metric [Double]") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QDoubleSumMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 21.0
+  }
+}
diff --git a/core/src/test/scala/io/prediction/controller/SampleEngine.scala b/core/src/test/scala/io/prediction/controller/SampleEngine.scala
index d6f073f..a59e68d 100644
--- a/core/src/test/scala/io/prediction/controller/SampleEngine.scala
+++ b/core/src/test/scala/io/prediction/controller/SampleEngine.scala
@@ -1,27 +1,14 @@
 package io.prediction.controller
 
-import org.scalatest.FunSuite
-import org.scalatest.Inside
-import org.scalatest.Matchers._
-import org.scalatest.Inspectors._
+import io.prediction.controller.{Params => PIOParams}
+import io.prediction.core._
 
+import grizzled.slf4j.Logger
+import io.prediction.workflow.WorkflowParams
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
 import org.apache.spark.rdd.RDD
 
-import _root_.java.lang.Thread
-
-import io.prediction.controller._
-import io.prediction.core._
-import grizzled.slf4j.{ Logger, Logging }
-
-
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.Suite
-
-import io.prediction.controller.{ Params => PIOParams }
-
 object Engine0 {
   @transient lazy val logger = Logger[this.type] 
 
@@ -250,9 +237,9 @@
   
   object PAlgo3 {
     case class Model(id: Int, pd: ProcessedData)
-    extends IFSPersistentModel[Params]
+    extends LocalFileSystemPersistentModel[Params]
     
-    object Model extends IFSPersistentModelLoader[Params, Model] 
+    object Model extends LocalFileSystemPersistentModelLoader[Params, Model]
 
     case class Params(id: Int) extends PIOParams
   }
@@ -302,12 +289,12 @@
   }
   
   object LAlgo2 {
-    case class Params(val id: Int) extends PIOParams
+    case class Params(id: Int) extends PIOParams
 
     case class Model(id: Int, pd: ProcessedData)
-    extends IFSPersistentModel[EmptyParams]
+    extends LocalFileSystemPersistentModel[EmptyParams]
     
-    object Model extends IFSPersistentModelLoader[EmptyParams, Model] 
+    object Model extends LocalFileSystemPersistentModelLoader[EmptyParams, Model]
   }
 
   class LAlgo2(params: LAlgo2.Params) 
@@ -320,7 +307,7 @@
   }
 
   object LAlgo3 {
-    case class Params(val id: Int) extends PIOParams
+    case class Params(id: Int) extends PIOParams
 
     case class Model(id: Int, pd: ProcessedData)
   }
@@ -364,12 +351,12 @@
   }
   
   object NAlgo2 {
-    case class Params(val id: Int) extends PIOParams
+    case class Params(id: Int) extends PIOParams
 
     case class Model(id: Int, pd: ProcessedData)
-    extends IFSPersistentModel[EmptyParams]
+    extends LocalFileSystemPersistentModel[EmptyParams]
     
-    object Model extends IFSPersistentModelLoader[EmptyParams, Model] 
+    object Model extends LocalFileSystemPersistentModelLoader[EmptyParams, Model]
   }
 
   class NAlgo2(params: NAlgo2.Params) 
@@ -383,7 +370,7 @@
   }
 
   object NAlgo3 {
-    case class Params(val id: Int) extends PIOParams
+    case class Params(id: Int) extends PIOParams
 
     case class Model(id: Int, pd: ProcessedData)
   }
@@ -416,11 +403,11 @@
 }
 
 object Engine1 {
-  case class EvalInfo(val v: Double) extends Serializable
+  case class EvalInfo(v: Double) extends Serializable
   case class Query() extends Serializable
   case class Prediction() extends Serializable
   case class Actual() extends Serializable
-  case class DSP(val v: Double) extends Params
+  case class DSP(v: Double) extends Params
 }
 
 class Engine1 
@@ -453,12 +440,12 @@
     sc: SparkContext, 
     evalDataSet: Seq[(Engine1.EvalInfo, RDD[(Engine1.Query, Engine1.Prediction,
     Engine1.Actual)])]): Double = {
-    return evalDataSet.head._1.v
+    evalDataSet.head._1.v
   }
 }
 
 object Metric1 {
-  case class Result(val c: Int, val v: Double) extends Serializable
+  case class Result(c: Int, v: Double) extends Serializable
 }
 
 class Metric1
@@ -470,7 +457,7 @@
     sc: SparkContext, 
     evalDataSet: Seq[(Engine1.EvalInfo, RDD[(Engine1.Query, Engine1.Prediction,
     Engine1.Actual)])]): Metric1.Result = {
-    return Metric1.Result(0, evalDataSet.head._1.v)
+    Metric1.Result(0, evalDataSet.head._1.v)
   }
 }
 
diff --git a/core/src/test/scala/io/prediction/workflow/BaseTest.scala b/core/src/test/scala/io/prediction/workflow/BaseTest.scala
index 8e2de53..4925558 100644
--- a/core/src/test/scala/io/prediction/workflow/BaseTest.scala
+++ b/core/src/test/scala/io/prediction/workflow/BaseTest.scala
@@ -6,9 +6,7 @@
 import org.scalatest.BeforeAndAfterEach
 import org.scalatest.Suite
 import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
 import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
 
 
 /** Manages a local `sc` {@link SparkContext} variable, correctly stopping it
diff --git a/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala b/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala
index 213d031..7a50d33 100644
--- a/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala
+++ b/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala
@@ -1,27 +1,11 @@
 package io.prediction.workflow
 
-import _root_.java.lang.Thread
+import io.prediction.controller._
 
 import org.scalatest.FunSuite
-import org.scalatest.Inside
 import org.scalatest.Matchers._
-import org.scalatest.Inspectors._
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-
-import io.prediction.controller._
-import io.prediction.core._
-import grizzled.slf4j.{ Logger, Logging }
-
-
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.Suite
 
 class EvaluationWorkflowSuite extends FunSuite with SharedSparkContext {
-  import io.prediction.controller.Engine1._
 
   test("Evaluation return best engine params, simple result type: Double") {
     val engine = new Engine1()
diff --git a/data/README.md b/data/README.md
index 6597a41..cead614 100644
--- a/data/README.md
+++ b/data/README.md
@@ -35,6 +35,19 @@
 $ sbt/sbt "data/test-only io.prediction.data.api.EventServiceSpec"
 ```
 
+- test for LEvents
+
+```
+$ sbt/sbt "data/test-only io.prediction.data.storage.LEventsSpec"
+```
+
+- test for ExampleJson and ExampleForm webhooks
+
+```
+$ sbt/sbt "data/test-only io.prediction.data.webhooks.examplejson.ExampleJsonConnectorSpec"
+$ sbt/sbt "data/test-only io.prediction.data.webhooks.exampleform.ExampleFormConnectorSpec"
+```
+
 ### Upgrade from 0.8.0/0.8.1 to 0.8.2
 
 Experimental upgrade tool (Upgrade HBase schema from 0.8.0/0.8.1 to 0.8.2)
diff --git a/data/src/main/scala/io/prediction/data/api/Common.scala b/data/src/main/scala/io/prediction/data/api/Common.scala
new file mode 100644
index 0000000..6681a1d
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/api/Common.scala
@@ -0,0 +1,80 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.api
+
+import io.prediction.data.webhooks.ConnectorException
+import io.prediction.data.storage.StorageException
+
+import spray.routing._
+import spray.routing.Directives._
+import spray.routing.Rejection
+import spray.http.StatusCodes
+import spray.http.StatusCode
+import spray.httpx.Json4sSupport
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+
+object Common {
+
+  object Json4sProtocol extends Json4sSupport {
+    implicit def json4sFormats: Formats = DefaultFormats
+  }
+
+  import Json4sProtocol._
+
+  val rejectionHandler = RejectionHandler {
+    case MalformedRequestContentRejection(msg, _) :: _ =>
+      complete(StatusCodes.BadRequest, Map("message" -> msg))
+    case MissingQueryParamRejection(msg) :: _ =>
+      complete(StatusCodes.NotFound,
+        Map("message" -> s"missing required query parameter ${msg}."))
+    case AuthenticationFailedRejection(cause, challengeHeaders) :: _ => {
+      val msg = cause match {
+        case AuthenticationFailedRejection.CredentialsRejected =>
+          "Invalid accessKey."
+        case AuthenticationFailedRejection.CredentialsMissing =>
+          "Missing accessKey."
+      }
+      complete(StatusCodes.Unauthorized, challengeHeaders, Map("message" -> msg))
+    }
+    case ChannelRejection(msg) :: _ =>
+      complete(StatusCodes.Unauthorized, Map("message" -> msg))
+    case NonExistentAppRejection(msg) :: _ =>
+      complete(StatusCodes.Unauthorized, Map("message" -> msg))
+  }
+
+  val exceptionHandler = ExceptionHandler {
+    case e: ConnectorException => {
+      val msg = s"${e.getMessage()}"
+      complete(StatusCodes.BadRequest, Map("message" -> msg))
+    }
+    case e: StorageException => {
+      val msg = s"${e.getMessage()}"
+      complete(StatusCodes.InternalServerError, Map("message" -> msg))
+    }
+    case e: Exception => {
+      val msg = s"${e.getMessage()}"
+      complete(StatusCodes.InternalServerError, Map("message" -> msg))
+    }
+  }
+}
+
+/** invalid channel */
+case class ChannelRejection(msg: String) extends Rejection
+
+/** the app doesn't exist */
+case class NonExistentAppRejection(msg: String) extends Rejection
diff --git a/data/src/main/scala/io/prediction/data/api/EventAPI.scala b/data/src/main/scala/io/prediction/data/api/EventAPI.scala
index 930eec5..1050dbd 100644
--- a/data/src/main/scala/io/prediction/data/api/EventAPI.scala
+++ b/data/src/main/scala/io/prediction/data/api/EventAPI.scala
@@ -18,10 +18,11 @@
 import io.prediction.data.Utils
 import io.prediction.data.storage.AccessKey
 import io.prediction.data.storage.AccessKeys
+import io.prediction.data.storage.Channels
 import io.prediction.data.storage.Event
 import io.prediction.data.storage.EventJson4sSupport
+import io.prediction.data.storage.DateTimeJson4sSupport
 import io.prediction.data.storage.LEvents
-import io.prediction.data.storage.StorageError
 import io.prediction.data.storage.Storage
 
 import akka.actor.ActorSystem
@@ -33,9 +34,10 @@
 import akka.util.Timeout
 
 import org.json4s.{Formats, DefaultFormats}
-import org.json4s.ext.JodaTimeSerializers
+import org.json4s.JObject
 import org.json4s.native.JsonMethods.parse
 
+import spray.util.LoggingContext
 import spray.can.Http
 import spray.http.HttpCharsets
 import spray.http.HttpEntity
@@ -43,6 +45,7 @@
 import spray.http.MediaTypes
 import spray.http.StatusCodes
 import spray.http.StatusCode
+import spray.http.FormData
 import spray.httpx.Json4sSupport
 import spray.httpx.unmarshalling.Unmarshaller
 import spray.routing._
@@ -50,80 +53,25 @@
 import spray.routing.Directives._
 
 import scala.concurrent.{ExecutionContext, Future}
-import scala.collection.mutable.{ HashMap => MHashMap }
-import scala.collection.mutable
 
 import java.util.concurrent.TimeUnit
 
 import com.github.nscala_time.time.Imports.DateTime
 
-case class EntityTypesEvent(
-  val entityType: String,
-  val targetEntityType: Option[String],
-  val event: String) {
-
-  def this(e: Event) = this(
-    e.entityType,
-    e.targetEntityType,
-    e.event)
-}
-
-case class KV[K, V](key: K, value: V)
-
-case class StatsSnapshot(
-  val startTime: DateTime,
-  val endTime: Option[DateTime],
-  val basic: Seq[KV[EntityTypesEvent, Long]],
-  val statusCode: Seq[KV[StatusCode, Long]]
-)
-
-
-class Stats(val startTime: DateTime) {
-  private[this] var _endTime: Option[DateTime] = None
-  var statusCodeCount = MHashMap[(Int, StatusCode), Long]().withDefaultValue(0L)
-  var eteCount = MHashMap[(Int, EntityTypesEvent), Long]().withDefaultValue(0L)
-
-  def cutoff(endTime: DateTime) {
-    _endTime = Some(endTime)
-  }
-
-  def update(appId: Int, statusCode: StatusCode, event: Event) {
-    statusCodeCount((appId, statusCode)) += 1
-    eteCount((appId, new EntityTypesEvent(event))) += 1
-  }
-
-  def extractByAppId[K, V](appId: Int, m: mutable.Map[(Int, K), V])
-  : Seq[KV[K, V]] = {
-    m
-    .toSeq
-    .flatMap { case (k, v) =>
-      if (k._1 == appId) { Seq(KV(k._2, v)) } else { Seq() }
-    }
-  }
-
-  def get(appId: Int): StatsSnapshot = {
-    StatsSnapshot(
-      startTime,
-      _endTime,
-      extractByAppId(appId, eteCount),
-      extractByAppId(appId, statusCodeCount)
-    )
-  }
-}
-
 class EventServiceActor(
     val eventClient: LEvents,
     val accessKeysClient: AccessKeys,
+    val channelsClient: Channels,
     val stats: Boolean) extends HttpServiceActor {
 
   object Json4sProtocol extends Json4sSupport {
     implicit def json4sFormats: Formats = DefaultFormats +
-      new EventJson4sSupport.APISerializer ++
-      JodaTimeSerializers.all
+      new EventJson4sSupport.APISerializer +
+      // NOTE: don't use Json4s JodaTimeSerializers since it has issues,
+      // some format not converred, or timezone not correct
+      new DateTimeJson4sSupport.serializer
   }
 
-  import Json4sProtocol._
-
   val log = Logging(context.system, this)
 
   // we use the enclosing ActorContext's or ActorSystem's dispatcher for our
@@ -131,30 +79,34 @@
   implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
   implicit val timeout = Timeout(5, TimeUnit.SECONDS)
 
-  // for better message response
-  val rejectionHandler = RejectionHandler {
-    case MalformedRequestContentRejection(msg, _) :: _ =>
-      complete(StatusCodes.BadRequest, Map("message" -> msg))
-    case MissingQueryParamRejection(msg) :: _ =>
-      complete(StatusCodes.NotFound,
-        Map("message" -> s"missing required query parameter ${msg}."))
-    case AuthenticationFailedRejection(cause, challengeHeaders) :: _ =>
-      complete(StatusCodes.Unauthorized, challengeHeaders,
-        Map("message" -> s"Invalid accessKey."))
-  }
+  val rejectionHandler = Common.rejectionHandler
 
   val jsonPath = """(.+)\.json$""".r
+  val formPath = """(.+)$""".r
+
+  case class AuthData(appId: Int, channelId: Option[Int])
 
   /* with accessKey in query, return appId if succeed */
-  def withAccessKey: RequestContext => Future[Authentication[Int]] = {
+  def withAccessKey: RequestContext => Future[Authentication[AuthData]] = {
     ctx: RequestContext =>
-      val accessKeyOpt = ctx.request.uri.query.get("accessKey")
+      val accessKeyParamOpt = ctx.request.uri.query.get("accessKey")
+      val channelParamOpt = ctx.request.uri.query.get("channel")
       Future {
-        accessKeyOpt.map { accessKey =>
-          val accessKeyOpt = accessKeysClient.get(accessKey)
-          accessKeyOpt match {
-            case Some(k) => Right(k.appid)
-            case None => Left(AuthenticationFailedRejection(
+        accessKeyParamOpt.map { accessKeyParam =>
+          val accessKeyOpt = accessKeysClient.get(accessKeyParam)
+          accessKeyOpt.map { k =>
+            channelParamOpt.map { ch =>
+              val channelMap = channelsClient.getByAppid(k.appid).map(c => (c.name, c.id)).toMap
+              if (channelMap.contains(ch)) {
+                Right(AuthData(k.appid, Some(channelMap(ch))))
+              } else {
+                Left(ChannelRejection(s"Invalid channel '${ch}'."))
+              }
+            }.getOrElse{
+              Right(AuthData(k.appid, None))
+            }
+          }.getOrElse{
+            Left(AuthenticationFailedRejection(
               AuthenticationFailedRejection.CredentialsRejected, List()))
           }
         }.getOrElse { Left(AuthenticationFailedRejection(
@@ -167,6 +119,8 @@
 
   val route: Route =
     pathSingleSlash {
+      import Json4sProtocol._
+
       get {
         respondWithMediaType(MediaTypes.`application/json`) {
           complete(Map("status" -> "alive"))
@@ -174,52 +128,50 @@
       }
     } ~
     path("events" / jsonPath ) { eventId =>
+
+      import Json4sProtocol._
+
       get {
-        handleRejections(rejectionHandler) {
-          authenticate(withAccessKey) { appId =>
-            respondWithMediaType(MediaTypes.`application/json`) {
-              complete {
-                log.debug(s"GET event ${eventId}.")
-                val data = eventClient.futureGet(eventId, appId).map { r =>
-                  r match {
-                    case Left(StorageError(message)) =>
-                      (StatusCodes.InternalServerError,
-                        Map("message" -> message))
-                    case Right(eventOpt) => {
-                      eventOpt.map( event =>
-                        (StatusCodes.OK, event)
-                      ).getOrElse(
-                        (StatusCodes.NotFound, Map("message" -> "Not Found"))
-                      )
-                    }
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  log.debug(s"GET event ${eventId}.")
+                  val data = eventClient.futureGet(eventId, appId, channelId).map { eventOpt =>
+                    eventOpt.map( event =>
+                      (StatusCodes.OK, event)
+                    ).getOrElse(
+                      (StatusCodes.NotFound, Map("message" -> "Not Found"))
+                    )
                   }
+                  data
                 }
-                data
               }
             }
           }
         }
       } ~
       delete {
-        handleRejections(rejectionHandler) {
-          authenticate(withAccessKey) { appId =>
-            respondWithMediaType(MediaTypes.`application/json`) {
-              complete {
-                log.debug(s"DELETE event ${eventId}.")
-                val data = eventClient.futureDelete(eventId, appId).map { r =>
-                  r match {
-                    case Left(StorageError(message)) =>
-                      (StatusCodes.InternalServerError,
-                        Map("message" -> message))
-                    case Right(found) =>
-                      if (found) {
-                        (StatusCodes.OK, Map("message" -> "Found"))
-                      } else {
-                        (StatusCodes.NotFound, Map("message" -> "Not Found"))
-                      }
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  log.debug(s"DELETE event ${eventId}.")
+                  val data = eventClient.futureDelete(eventId, appId, channelId).map { found =>
+                    if (found) {
+                      (StatusCodes.OK, Map("message" -> "Found"))
+                    } else {
+                      (StatusCodes.NotFound, Map("message" -> "Not Found"))
+                    }
                   }
+                  data
                 }
-                data
               }
             }
           }
@@ -227,92 +179,92 @@
       }
     } ~
     path("events.json") {
+
+      import Json4sProtocol._
+
       post {
-        handleRejections(rejectionHandler) {
-          authenticate(withAccessKey) { appId =>
-            entity(as[Event]) { event =>
-              complete {
-                log.debug(s"POST events")
-                val data = eventClient.futureInsert(event, appId).map { r =>
-                  val result = r match {
-                    case Left(StorageError(message)) =>
-                      (StatusCodes.InternalServerError,
-                        Map("message" -> message))
-                    case Right(id) =>
-                      (StatusCodes.Created, Map("eventId" -> s"${id}"))
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              entity(as[Event]) { event =>
+                complete {
+                  log.debug(s"POST events")
+                  val data = eventClient.futureInsert(event, appId, channelId).map { id =>
+                    val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
+                    if (stats) {
+                      statsActorRef ! Bookkeeping(appId, result._1, event)
+                    }
+                    result
                   }
-                  if (stats) {
-                    statsActorRef ! Bookkeeping(appId, result._1, event)
-                  }
-                  result
+                  data
                 }
-                data
               }
             }
           }
         }
       } ~
       get {
-        handleRejections(rejectionHandler) {
-          authenticate(withAccessKey) { appId =>
-            parameters(
-              'startTime.as[Option[String]],
-              'untilTime.as[Option[String]],
-              'entityType.as[Option[String]],
-              'entityId.as[Option[String]],
-              'event.as[Option[String]],
-              'targetEntityType.as[Option[String]],
-              'targetEntityId.as[Option[String]],
-              'limit.as[Option[Int]],
-              'reversed.as[Option[Boolean]]) {
-              (startTimeStr, untilTimeStr, entityType, entityId,
-                eventName,  // only support one event name
-                targetEntityType, targetEntityId,
-                limit, reversed) =>
-              respondWithMediaType(MediaTypes.`application/json`) {
-                complete {
-                  log.debug(
-                    s"GET events of appId=${appId} " +
-                    s"st=${startTimeStr} ut=${untilTimeStr} " +
-                    s"et=${entityType} eid=${entityId} " +
-                    s"li=${limit} rev=${reversed} ")
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              parameters(
+                'startTime.as[Option[String]],
+                'untilTime.as[Option[String]],
+                'entityType.as[Option[String]],
+                'entityId.as[Option[String]],
+                'event.as[Option[String]],
+                'targetEntityType.as[Option[String]],
+                'targetEntityId.as[Option[String]],
+                'limit.as[Option[Int]],
+                'reversed.as[Option[Boolean]]) {
+                (startTimeStr, untilTimeStr, entityType, entityId,
+                  eventName,  // only support one event name
+                  targetEntityType, targetEntityId,
+                  limit, reversed) =>
+                respondWithMediaType(MediaTypes.`application/json`) {
+                  complete {
+                    log.debug(
+                      s"GET events of appId=${appId} " +
+                      s"st=${startTimeStr} ut=${untilTimeStr} " +
+                      s"et=${entityType} eid=${entityId} " +
+                      s"li=${limit} rev=${reversed} ")
 
-                  val parseTime = Future {
-                    val startTime = startTimeStr.map(Utils.stringToDateTime(_))
-                    val untilTime = untilTimeStr.map(Utils.stringToDateTime(_))
-                    (startTime, untilTime)
-                  }
+                    val parseTime = Future {
+                      val startTime = startTimeStr.map(Utils.stringToDateTime(_))
+                      val untilTime = untilTimeStr.map(Utils.stringToDateTime(_))
+                      (startTime, untilTime)
+                    }
 
-                  parseTime.flatMap { case (startTime, untilTime) =>
-                    val data = eventClient.futureFind(
-                      appId = appId,
-                      startTime = startTime,
-                      untilTime = untilTime,
-                      entityType = entityType,
-                      entityId = entityId,
-                      eventNames = eventName.map(List(_)),
-                      targetEntityType = targetEntityType.map(Some(_)),
-                      targetEntityId = targetEntityId.map(Some(_)),
-                      limit = limit.orElse(Some(20)),
-                      reversed = reversed)
-                      .map { r =>
-                        r match {
-                          case Left(StorageError(message)) =>
-                            (StatusCodes.InternalServerError,
-                              Map("message" -> message))
-                          case Right(eventIter) =>
-                            if (eventIter.hasNext) {
-                              (StatusCodes.OK, eventIter.toArray)
-                            } else {
-                              (StatusCodes.NotFound,
-                                Map("message" -> "Not Found"))
-                            }
+                    parseTime.flatMap { case (startTime, untilTime) =>
+                      val data = eventClient.futureFind(
+                        appId = appId,
+                        channelId = channelId,
+                        startTime = startTime,
+                        untilTime = untilTime,
+                        entityType = entityType,
+                        entityId = entityId,
+                        eventNames = eventName.map(List(_)),
+                        targetEntityType = targetEntityType.map(Some(_)),
+                        targetEntityId = targetEntityId.map(Some(_)),
+                        limit = limit.orElse(Some(20)),
+                        reversed = reversed)
+                        .map { eventIter =>
+                          if (eventIter.hasNext) {
+                            (StatusCodes.OK, eventIter.toArray)
+                          } else {
+                            (StatusCodes.NotFound,
+                              Map("message" -> "Not Found"))
+                          }
                         }
-                      }
-                    data
-                  }.recover {
-                    case e: Exception =>
-                      (StatusCodes.BadRequest, Map("message" -> s"${e}"))
+                      data
+                    }.recover {
+                      case e: Exception =>
+                        (StatusCodes.BadRequest, Map("message" -> s"${e}"))
+                    }
                   }
                 }
               }
@@ -322,77 +274,142 @@
       }
     } ~
     path("stats.json") {
+
+      import Json4sProtocol._
+
       get {
-        handleRejections(rejectionHandler) {
-          authenticate(withAccessKey) { appId =>
-            respondWithMediaType(MediaTypes.`application/json`) {
-              if (stats) {
-                complete {
-                  statsActorRef ? GetStats(appId) map {
-                    _.asInstanceOf[Map[String, StatsSnapshot]]
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                if (stats) {
+                  complete {
+                    statsActorRef ? GetStats(appId) map {
+                      _.asInstanceOf[Map[String, StatsSnapshot]]
+                    }
                   }
+                } else {
+                  complete(
+                    StatusCodes.NotFound,
+                    parse("""{"message": "To see stats, launch Event Server """ +
+                      """with --stats argument."}"""))
                 }
-              } else {
-                complete(
-                  StatusCodes.NotFound,
-                  parse("""{"message": "To see stats, launch Event Server """ +
-                    """with --stats argument."}"""))
               }
             }
           }
         }
       }  // stats.json get
+    } ~
+    path("webhooks" / jsonPath ) { web =>
+
+      import Json4sProtocol._
+
+      post {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                entity(as[JObject]) { jObj =>
+                  complete {
+                    Webhooks.postJson(
+                      appId = appId,
+                      channelId = channelId,
+                      web = web,
+                      data = jObj,
+                      eventClient = eventClient,
+                      log = log,
+                      stats = stats,
+                      statsActorRef = statsActorRef)
+                  }
+                }
+              }
+            }
+          }
+        }
+      } ~
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  Webhooks.getJson(
+                    appId = appId,
+                    channelId = channelId,
+                    web = web,
+                    log = log)
+                }
+              }
+            }
+          }
+        }
+      }
+    } ~
+    path("webhooks" / formPath ) { web =>
+      post {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                entity(as[FormData]){ formData =>
+                  //log.debug(formData.toString)
+                  complete {
+                    // respond with JSON
+                    import Json4sProtocol._
+
+                    Webhooks.postForm(
+                      appId = appId,
+                      channelId = channelId,
+                      web = web,
+                      data = formData,
+                      eventClient = eventClient,
+                      log = log,
+                      stats = stats,
+                      statsActorRef = statsActorRef)
+                  }
+                }
+              }
+            }
+          }
+        }
+      } ~
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  // respond with JSON
+                  import Json4sProtocol._
+
+                  Webhooks.getForm(
+                    appId = appId,
+                    channelId = channelId,
+                    web = web,
+                    log = log)
+                }
+              }
+            }
+          }
+        }
+      }
+
     }
 
   def receive: Actor.Receive = runRoute(route)
 
 }
 
-case class Bookkeeping(val appId: Int, statusCode: StatusCode, event: Event)
-case class GetStats(val appId: Int)
 
-class StatsActor extends Actor {
-  implicit val system = context.system
-  val log = Logging(system, this)
-
-  def getCurrent: DateTime = {
-    DateTime.now.
-      withMinuteOfHour(0).
-      withSecondOfMinute(0).
-      withMillisOfSecond(0)
-  }
-
-  var longLiveStats = new Stats(DateTime.now)
-  var hourlyStats = new Stats(getCurrent)
-
-  var prevHourlyStats = new Stats(getCurrent.minusHours(1))
-  prevHourlyStats.cutoff(hourlyStats.startTime)
-
-  def bookkeeping(appId: Int, statusCode: StatusCode, event: Event) {
-    val current = getCurrent
-    // If the current hour is different from the stats start time, we create
-    // another stats instance, and move the current to prev.
-    if (current != hourlyStats.startTime) {
-      prevHourlyStats = hourlyStats
-      prevHourlyStats.cutoff(current)
-      hourlyStats = new Stats(current)
-    }
-
-    hourlyStats.update(appId, statusCode, event)
-    longLiveStats.update(appId, statusCode, event)
-  }
-
-  def receive: Actor.Receive = {
-    case Bookkeeping(appId, statusCode, event) =>
-      bookkeeping(appId, statusCode, event)
-    case GetStats(appId) => sender() ! Map(
-      "time" -> DateTime.now,
-      "currentHour" -> hourlyStats.get(appId),
-      "prevHour" -> prevHourlyStats.get(appId),
-      "longLive" -> longLiveStats.get(appId))
-    case _ => log.error("Unknown message.")
-  }
-}
 
 /* message */
 case class StartServer(
@@ -402,10 +419,15 @@
 class EventServerActor(
     val eventClient: LEvents,
     val accessKeysClient: AccessKeys,
+    val channelsClient: Channels,
     val stats: Boolean) extends Actor {
   val log = Logging(context.system, this)
   val child = context.actorOf(
-    Props(classOf[EventServiceActor], eventClient, accessKeysClient, stats),
+    Props(classOf[EventServiceActor],
+      eventClient,
+      accessKeysClient,
+      channelsClient,
+      stats),
     "EventServiceActor")
   implicit val system = context.system
 
@@ -429,13 +451,15 @@
     implicit val system = ActorSystem("EventServerSystem")
 
     val eventClient = Storage.getLEvents()
-    val accessKeysClient = Storage.getMetaDataAccessKeys
+    val accessKeysClient = Storage.getMetaDataAccessKeys()
+    val channelsClient = Storage.getMetaDataChannels()
 
     val serverActor = system.actorOf(
       Props(
         classOf[EventServerActor],
         eventClient,
         accessKeysClient,
+        channelsClient,
         config.stats),
       "EventServerActor")
     if (config.stats) system.actorOf(Props[StatsActor], "StatsActor")
@@ -448,7 +472,7 @@
 
   def main (args: Array[String]) {
     EventServer.createEventServer(EventServerConfig(
-      ip = "localhost",
+      ip = "0.0.0.0",
       port = 7070))
   }
 
diff --git a/data/src/main/scala/io/prediction/data/api/Stats.scala b/data/src/main/scala/io/prediction/data/api/Stats.scala
new file mode 100644
index 0000000..ca5f05e
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/api/Stats.scala
@@ -0,0 +1,79 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.api
+
+import io.prediction.data.storage.Event
+
+import spray.http.StatusCode
+
+import scala.collection.mutable.{ HashMap => MHashMap }
+import scala.collection.mutable
+
+import com.github.nscala_time.time.Imports.DateTime
+
+case class EntityTypesEvent(
+  val entityType: String,
+  val targetEntityType: Option[String],
+  val event: String) {
+
+  def this(e: Event) = this(
+    e.entityType,
+    e.targetEntityType,
+    e.event)
+}
+
+case class KV[K, V](key: K, value: V)
+
+case class StatsSnapshot(
+  val startTime: DateTime,
+  val endTime: Option[DateTime],
+  val basic: Seq[KV[EntityTypesEvent, Long]],
+  val statusCode: Seq[KV[StatusCode, Long]]
+)
+
+
+class Stats(val startTime: DateTime) {
+  private[this] var _endTime: Option[DateTime] = None
+  var statusCodeCount = MHashMap[(Int, StatusCode), Long]().withDefaultValue(0L)
+  var eteCount = MHashMap[(Int, EntityTypesEvent), Long]().withDefaultValue(0L)
+
+  def cutoff(endTime: DateTime) {
+    _endTime = Some(endTime)
+  }
+
+  def update(appId: Int, statusCode: StatusCode, event: Event) {
+    statusCodeCount((appId, statusCode)) += 1
+    eteCount((appId, new EntityTypesEvent(event))) += 1
+  }
+
+  def extractByAppId[K, V](appId: Int, m: mutable.Map[(Int, K), V])
+  : Seq[KV[K, V]] = {
+    m
+    .toSeq
+    .flatMap { case (k, v) =>
+      if (k._1 == appId) { Seq(KV(k._2, v)) } else { Seq() }
+    }
+  }
+
+  def get(appId: Int): StatsSnapshot = {
+    StatsSnapshot(
+      startTime,
+      _endTime,
+      extractByAppId(appId, eteCount),
+      extractByAppId(appId, statusCodeCount)
+    )
+  }
+}
diff --git a/data/src/main/scala/io/prediction/data/api/StatsActor.scala b/data/src/main/scala/io/prediction/data/api/StatsActor.scala
new file mode 100644
index 0000000..857352f
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/api/StatsActor.scala
@@ -0,0 +1,74 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.api
+
+import io.prediction.data.storage.Event
+
+import spray.http.StatusCode
+
+import akka.actor.Actor
+import akka.event.Logging
+
+import com.github.nscala_time.time.Imports.DateTime
+
+/* message to StatsActor */
+case class Bookkeeping(val appId: Int, statusCode: StatusCode, event: Event)
+
+/* message to StatsActor */
+case class GetStats(val appId: Int)
+
+class StatsActor extends Actor {
+  implicit val system = context.system
+  val log = Logging(system, this)
+
+  def getCurrent: DateTime = {
+    DateTime.now.
+      withMinuteOfHour(0).
+      withSecondOfMinute(0).
+      withMillisOfSecond(0)
+  }
+
+  var longLiveStats = new Stats(DateTime.now)
+  var hourlyStats = new Stats(getCurrent)
+
+  var prevHourlyStats = new Stats(getCurrent.minusHours(1))
+  prevHourlyStats.cutoff(hourlyStats.startTime)
+
+  def bookkeeping(appId: Int, statusCode: StatusCode, event: Event) {
+    val current = getCurrent
+    // If the current hour is different from the stats start time, we create
+    // another stats instance, and move the current to prev.
+    if (current != hourlyStats.startTime) {
+      prevHourlyStats = hourlyStats
+      prevHourlyStats.cutoff(current)
+      hourlyStats = new Stats(current)
+    }
+
+    hourlyStats.update(appId, statusCode, event)
+    longLiveStats.update(appId, statusCode, event)
+  }
+
+  def receive: Actor.Receive = {
+    case Bookkeeping(appId, statusCode, event) =>
+      bookkeeping(appId, statusCode, event)
+    case GetStats(appId) => sender() ! Map(
+      "time" -> DateTime.now,
+      "currentHour" -> hourlyStats.get(appId),
+      "prevHour" -> prevHourlyStats.get(appId),
+      "longLive" -> longLiveStats.get(appId))
+    case _ => log.error("Unknown message.")
+  }
+}
diff --git a/data/src/main/scala/io/prediction/data/api/Webhooks.scala b/data/src/main/scala/io/prediction/data/api/Webhooks.scala
new file mode 100644
index 0000000..7928bfe
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/api/Webhooks.scala
@@ -0,0 +1,151 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.api
+
+import io.prediction.data.webhooks.JsonConnector
+import io.prediction.data.webhooks.FormConnector
+import io.prediction.data.webhooks.ConnectorUtil
+import io.prediction.data.storage.Event
+import io.prediction.data.storage.EventJson4sSupport
+import io.prediction.data.storage.LEvents
+
+import spray.routing._
+import spray.routing.Directives._
+import spray.http.StatusCodes
+import spray.http.StatusCode
+import spray.http.FormData
+import spray.httpx.Json4sSupport
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+
+import akka.event.LoggingAdapter
+import akka.actor.ActorSelection
+
+import scala.concurrent.{ExecutionContext, Future}
+
+
+private[prediction] object Webhooks {
+
+  def postJson(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    data: JObject,
+    eventClient: LEvents,
+    log: LoggingAdapter,
+    stats: Boolean,
+    statsActorRef: ActorSelection
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+
+    val eventFuture = Future {
+      WebhooksConnectors.json.get(web).map { connector =>
+        ConnectorUtil.toEvent(connector, data)
+      }
+    }
+
+    eventFuture.flatMap { eventOpt =>
+      if (eventOpt.isEmpty) {
+        Future {
+          val message = s"webhooks connection for ${web} is not supported."
+          (StatusCodes.NotFound, Map("message" -> message))
+        }
+      } else {
+        val event = eventOpt.get
+        val data = eventClient.futureInsert(event, appId, channelId).map { id =>
+          val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
+
+          if (stats) {
+            statsActorRef ! Bookkeeping(appId, result._1, event)
+          }
+          result
+        }
+        data
+      }
+    }
+  }
+
+  def getJson(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    log: LoggingAdapter
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+    Future {
+      WebhooksConnectors.json.get(web).map { connector =>
+        (StatusCodes.OK, Map("message" -> "Ok"))
+      }.getOrElse {
+        val message = s"webhooks connection for ${web} is not supported."
+        (StatusCodes.NotFound, Map("message" -> message))
+      }
+    }
+  }
+
+  def postForm(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    data: FormData,
+    eventClient: LEvents,
+    log: LoggingAdapter,
+    stats: Boolean,
+    statsActorRef: ActorSelection
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+    val eventFuture = Future {
+      WebhooksConnectors.form.get(web).map { connector =>
+        ConnectorUtil.toEvent(connector, data.fields.toMap)
+      }
+    }
+
+    eventFuture.flatMap { eventOpt =>
+      if (eventOpt.isEmpty) {
+        Future {
+          val message = s"webhooks connection for ${web} is not supported."
+          (StatusCodes.NotFound, Map("message" -> message))
+        }
+      } else {
+        val event = eventOpt.get
+        val data = eventClient.futureInsert(event, appId, channelId).map { id =>
+          val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
+
+          if (stats) {
+            statsActorRef ! Bookkeeping(appId, result._1, event)
+          }
+          result
+        }
+        data
+      }
+    }
+  }
+
+  def getForm(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    log: LoggingAdapter
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+    Future {
+      WebhooksConnectors.form.get(web).map { connector =>
+        (StatusCodes.OK, Map("message" -> "Ok"))
+      }.getOrElse {
+        val message = s"webhooks connection for ${web} is not supported."
+        (StatusCodes.NotFound, Map("message" -> message))
+      }
+    }
+  }
+
+}
diff --git a/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala b/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala
new file mode 100644
index 0000000..97c9775
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala
@@ -0,0 +1,34 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.api
+
+import io.prediction.data.webhooks.JsonConnector
+import io.prediction.data.webhooks.FormConnector
+
+import io.prediction.data.webhooks.segmentio.SegmentIOConnector
+import io.prediction.data.webhooks.mailchimp.MailChimpConnector
+
+private[prediction] object WebhooksConnectors {
+
+  val json: Map[String, JsonConnector] = Map(
+    "segmentio" -> SegmentIOConnector
+  )
+
+  val form: Map[String, FormConnector] = Map(
+    "mailchimp" -> MailChimpConnector
+  )
+
+}
diff --git a/data/src/main/scala/io/prediction/data/examples/HBLEventsTest.scala b/data/src/main/scala/io/prediction/data/examples/HBLEventsTest.scala
deleted file mode 100644
index 062db55..0000000
--- a/data/src/main/scala/io/prediction/data/examples/HBLEventsTest.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.examples
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.hbase.HBLEvents
-import io.prediction.data.storage.hbase.StorageClient
-
-import scala.concurrent.ExecutionContext.Implicits.global
-
-object HBLEventsTest {
-  def main(args: Array[String]) {
-    val appId = args(0).toInt
-    val eventDb = new HBLEvents(
-      new StorageClient(new StorageClientConfig(Seq(), Seq(), false)).client,
-      "predictionio_eventdata")
-    eventDb.aggregateProperties(appId, "user").right.get.foreach(println(_))
-  }
-}
diff --git a/data/src/main/scala/io/prediction/data/examples/HBPEventsTest.scala b/data/src/main/scala/io/prediction/data/examples/HBPEventsTest.scala
deleted file mode 100644
index ec8b02f..0000000
--- a/data/src/main/scala/io/prediction/data/examples/HBPEventsTest.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.examples
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.hbase.HBPEvents
-import io.prediction.data.storage.hbase.StorageClient
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext._
-
-object HBPEventsTest {
-
-  def main(args: Array[String]) {
-
-    val fromAppId = args(0).toInt
-    val toAppId = args(1).toInt
-
-    val sparkConf = new SparkConf().setAppName("HBaseTest")
-    val sc = new SparkContext(sparkConf)
-    val eventClient = new HBPEvents(
-      new StorageClient(new StorageClientConfig(Seq(), Seq(), true)).client,
-      "predictionio_eventdata")
-    val e1: RDD[Event] = eventClient.find(fromAppId)(sc).cache()
-
-    eventClient.write(e1, toAppId)(sc)
-    println("Finished writing.")
-
-    // read back from toAppId
-    val e2: RDD[Event] = eventClient.find(toAppId)(sc)
-
-    val c1 = e1.count()
-    val c2 = e2.count()
-
-    // check data
-    val a = e1.map { event => (event.eventId.getOrElse(""), event) }
-    val b = e2.map { event => (event.eventId.getOrElse(""), event) }
-
-    val notMatched1 = a.leftOuterJoin(b).map { case (k, (v, wOpt)) =>
-      ((Some(v) == wOpt), (Some(v), wOpt))
-    }.filter{ case (m, d) => (!m) }
-    .cache()
-
-    val notMatched2 = b.leftOuterJoin(a).map { case (k, (v, wOpt)) =>
-      ((Some(v) == wOpt), (Some(v), wOpt))
-    }.filter{ case (m, d) => (!m) }
-    .cache()
-
-    val notMatchedCount1 = notMatched1.count()
-    if (notMatchedCount1 != 0) {
-      notMatched1.take(2).foreach(println(_))
-    }
-
-    val notMatchedCount2 = notMatched2.count()
-    if (notMatchedCount2 != 0) {
-      notMatched2.take(2).foreach(println(_))
-    }
-
-    println(s"count1: ${c1}, count2: ${c2}")
-    println(s"count match = ${c1 == c2}")
-    println(s"data match1 = ${notMatchedCount1 == 0}")
-    println(s"data match2 = ${notMatchedCount2 == 0}")
-  }
-}
diff --git a/data/src/main/scala/io/prediction/data/examples/PBatchViewTest.scala b/data/src/main/scala/io/prediction/data/examples/PBatchViewTest.scala
deleted file mode 100644
index d2b65ca..0000000
--- a/data/src/main/scala/io/prediction/data/examples/PBatchViewTest.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.examples
-
-import io.prediction.data.storage.Event
-import io.prediction.data.view.PBatchView
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-
-object PBatchViewTest {
-
-  def main(args: Array[String]) {
-
-    val appId = args(0).toInt
-    val sparkConf = new SparkConf().setAppName("HBaseTest")
-    val sc = new SparkContext(sparkConf)
-
-    val batchView = new PBatchView(appId, None, None, sc)
-
-    val rdd = batchView.aggregateProperties("pio_user")
-
-    println(rdd.collect().mkString(","))
-
-  }
-}
diff --git a/data/src/main/scala/io/prediction/data/examples/README.md b/data/src/main/scala/io/prediction/data/examples/README.md
deleted file mode 100644
index 5277856..0000000
--- a/data/src/main/scala/io/prediction/data/examples/README.md
+++ /dev/null
@@ -1,41 +0,0 @@
-Temporary place for examples and tests
-======================================
-
-HBPeventsTest
-```
-$ cd $PIO_HOME
-$ ./make-distribution.sh
-$ $SPARK_HOME/bin/spark-submit \
-  --class "io.prediction.data.storage.examples.HBPEventsTest" \
-  --master local[4] \
-  assembly/pio-assembly-0.9.0.jar <from appId> <to appId>
-```
-
-PBatchViewTest
-```
-$ cd $PIO_HOME
-$ ./make-distribution.sh
-$ $SPARK_HOME/bin/spark-submit \
-  --class "io.prediction.data.storage.examples.PBatchViewTest" \
-  --master local[4] \
-  assembly/pio-assembly-0.8.2.jar <appId>
-```
-
-TestEvents
-```
-$ sbt/sbt "data/compile"
-$ set -a
-$ source conf/pio-env.sh
-$ set +a
-$ sbt/sbt "data/run-main io.prediction.data.examples.TestEvents HB"
-```
-
-
-HBLEventsTest
-```
-$ sbt/sbt "data/compile"
-$ set -a
-$ source conf/pio-env.sh
-$ set +a
-$ sbt/sbt "data/run-main io.prediction.data.examples.HBLEventsTest 2"
-```
diff --git a/data/src/main/scala/io/prediction/data/examples/Test.scala b/data/src/main/scala/io/prediction/data/examples/Test.scala
deleted file mode 100644
index 994fcc4..0000000
--- a/data/src/main/scala/io/prediction/data/examples/Test.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.examples
-
-import org.json4s._
-import org.json4s.JsonDSL._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization
-import org.json4s.native.Serialization.{ read, write }
-import org.json4s.ext.JodaTimeSerializers
-import com.github.nscala_time.time.Imports._
-
-// some quick test code for testing json serializer
-
-object SerializerTest {
-  case class Event(
-    val entityId: String,
-    val targetEntityId: Option[String],
-    val event: String,
-    val properties: JObject, // TODO: don't use JObject
-    val time: DateTime,
-    val tags: Seq[String],
-    val appId: Int,
-    val prId: Option[String]
-  )
-
-  case class A(a: Int, b: String, c: Option[Boolean])
-
-  class EventSeriliazer extends CustomSerializer[Event](format => (
-    {
-      case jv: JValue => {
-        implicit val formats = DefaultFormats.lossless
-        val entityId = (jv \ "entityId").extract[String]
-        val targetEntityId = (jv \ "targetEntityId").extract[Option[String]]
-        val event = (jv \ "event").extract[String]
-        val properties = (jv \ "properties").extract[JObject]
-        val time = DateTime.now // TODO (jv \ "time").extract[]
-        val tags = (jv \ "tags").extract[Seq[String]]
-        val appId = (jv \ "appId").extract[Int]
-        val prId = (jv \ "prId").extract[Option[String]]
-        Event(
-          entityId = entityId,
-          targetEntityId = targetEntityId,
-          event = event,
-          properties = properties,
-          time = time,
-          tags = tags,
-          appId = appId,
-          prId = prId)
-      }
-      /* case JObject(
-         JField("a", JInt(a)) ::
-         JField("b", JString(b)) ::
-         Nil) => new A(a.intValue,b) */
-    },
-    {
-      case d: Event =>
-        JObject(
-          JField("entityId", JString(d.entityId)) ::
-          JField("targetEntityId",
-            d.targetEntityId.map(JString(_)).getOrElse(JNothing)) ::
-          JField("event", JString(d.event)) ::
-          JField("properties", d.properties) ::
-          JField("time", JString("TODO")) ::
-          JField("tags", JArray(d.tags.toList.map(JString(_)))) ::
-          JField("appId", JInt(d.appId)) ::
-          JField("prId",
-            d.prId.map(JString(_)).getOrElse(JNothing)) ::
-          Nil)
-    }
-  ))
-  def main(args: Array[String]) {
-    test()
-  }
-
-  def test(): Unit = {
-    implicit val formats = DefaultFormats.lossless +
-      new EventSeriliazer
-
-    val j = parse("""{
-      "a": 1, "b": "some"} """)
-
-    /* val w = read[A]("""{"b": 1,
-       "a": 1 } """) */
-    val w = read[Event]("""{
-      "event" : "my_event",
-      "entityId" : "my_entity_id",
-      "targetEntityId" : "my_target_entity_id",
-      "properties" : {
-        "prop1" : "value1",
-        "prop2" : "value2"
-      }
-      "time" : "2004-12-13T21:39:45.618-08:00",
-      "tags" : ["tag1", "tag2"],
-      "appId" : 4,
-      "prId" : "my_predicted_result_id"
-    }""")
-    println(w)
-    println(write(w))
-    println("test")
-
-  }
-}
-
-
-object Test {
-  case class TestEvent(
-    val d: Int,
-    val e: Map[String, Any]
-  )
-  def main(args: Array[String]) {
-    implicit val formats = DefaultFormats // Brings in default date formats etc.
-
-    val json = parse("""
-    {"d": 1, "e" : {"a": "some_string", "b": true, "c" : 4}}""")
-    println(json)
-    val d = json.extract[TestEvent]
-    println(d)
-  }
-}
diff --git a/data/src/main/scala/io/prediction/data/examples/TestEvents.scala b/data/src/main/scala/io/prediction/data/examples/TestEvents.scala
deleted file mode 100644
index 44b6bdd..0000000
--- a/data/src/main/scala/io/prediction/data/examples/TestEvents.scala
+++ /dev/null
@@ -1,178 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.examples
-
-import  io.prediction.data.storage.LEvents
-import  io.prediction.data.storage.Event
-import  io.prediction.data.storage.DataMap
-import  io.prediction.data.storage.hbase
-import  io.prediction.data.storage.StorageClientConfig
-import  io.prediction.data.storage.Storage
-
-// import io.prediction.data.storage.elasticsearch.ESStorageClient
-// import io.prediction.data.storage.hbase.HBStorageClient
-
-import io.prediction.data.storage.elasticsearch.ESLEvents
-import io.prediction.data.storage.hbase.HBLEvents
-
-import org.json4s.JObject
-import org.json4s.native.JsonMethods.parse
-import org.json4s.native.Serialization.{ read, write }
-
-import scala.concurrent.ExecutionContext.Implicits.global
-
-import org.joda.time.DateTime
-
-object TestLEvents {
-
-  def main(args: Array[String]) {
-    args(0) match {
-      case "HB" => testHBLEvents()
-      case "ES" => testESLEvents()
-    }
-  }
-
-  val e = Event(
-    event = "$set",
-    entityType = "axx",
-    entityId = "abc",
-    targetEntityType = None,
-    targetEntityId = None,
-    properties = DataMap(parse("""
-      { "numbers" : [1, 2, 3, 4],
-        "abc" : "some_string",
-        "def" : 4, "k" : false
-      } """).asInstanceOf[JObject]),
-    eventTime = DateTime.now,
-    tags = List("tag1", "tag2"),
-    prId = None
-  )
-
-  val e2 = e.copy(properties=DataMap())
-
-  def testESLEvents() {
-
-    /* val config = StorageClientConfig(Seq("localhost"), Seq(9300))
-       val storageClient = new ESStorageClient(config)
-       val client = storageClient.client
-       val eventConnector = storageClient.eventClient */
-    val eventConnector = Storage.getDataObject[LEvents]("predictionio_events_es").
-      asInstanceOf[ESLEvents]
-    implicit val formats = eventConnector.formats
-
-    // client.prepareGet("testindex", "events", "Abcdef").get()
-
-    val x = write(e)
-    println(x)
-    println(x.getClass)
-
-    val de = eventConnector.insert(e, 4)
-    println(de)
-    de match {
-      case Right(d) => {
-        val e2 = eventConnector.get(d, 4)
-        println(e2)
-        val k = eventConnector.delete(d, 4)
-        println(k)
-        val k2 = eventConnector.delete(d, 4)
-        println(k2)
-      }
-      case _ => {println("match error")}
-    }
-
-    val i1 = eventConnector.insert(e, 4)
-    println(i1)
-    val i2 = eventConnector.insert(e, 4)
-    println(i2)
-    val i3 = eventConnector.insert(e, 4)
-    println(i3)
-
-    // force refresh index for testing, else get may not have result
-    // client.admin().indices().prepareRefresh("testindex").get()
-
-    val all = eventConnector.getByAppId(4)
-    println(all.right.map{ x =>
-      val l = x.toList
-      s"size ${l.size}, ${l}"
-    })
-
-    val delAll = eventConnector.deleteByAppId(4)
-    println(delAll)
-    val all2 = eventConnector.getByAppId(4)
-    println(all2)
-    // client.close()
-  }
-
-  def testHBLEvents(): Unit = {
-
-    println("testHBLEvents")
-
-   /* val config = StorageClientConfig(Seq("localhost"), Seq(9300))
-      val storageClient = new HBStorageClient(config)
-      val client = storageClient.client
-      val eventConnector = storageClient.eventClient */
-    val storageClient = new hbase.StorageClient(StorageClientConfig(
-      hosts = Seq("localhost"),
-      ports = Seq(1234)))
-
-    val eventConnector = new hbase.HBLEvents(storageClient.client,
-      "predictionio_events_hb")
-
-    val appId = 4
-
-    eventConnector.init(appId)
-    val de = eventConnector.insert(e, appId)
-    println(de)
-    de match {
-      case Right(d) => {
-        val e2 = eventConnector.get(d, appId)
-        println(e2)
-        val k = eventConnector.delete(d, appId)
-        println(k)
-        val k2 = eventConnector.delete(d, appId)
-        println(k2)
-      }
-      case _ => {println("match error")}
-    }
-
-    val i1 = eventConnector.insert(e, appId)
-    println(i1)
-    val i2 = eventConnector.insert(e, appId)
-    println(i2)
-    val i3 = eventConnector.insert(e, appId)
-    println(i3)
-
-    val all = eventConnector.getByAppId(appId)
-    println(all.right.map{ x =>
-      val l = x.toList
-      s"size ${l.size}, ${l}"
-    })
-
-    val delAll = eventConnector.deleteByAppId(appId)
-    println(delAll)
-    val all2 = eventConnector.getByAppId(appId)
-    println(all2)
-
-    (eventConnector.insert(e2, appId) match {
-      case Right(id) => eventConnector.get(id, appId)
-      case Left(x) => Left(x)
-    }) match {
-      case Right(e) => println(e)
-      case _ => println("error")
-    }
-
-  }
-}
diff --git a/data/src/main/scala/io/prediction/data/examples/TestHBase.scala b/data/src/main/scala/io/prediction/data/examples/TestHBase.scala
deleted file mode 100644
index fe15531..0000000
--- a/data/src/main/scala/io/prediction/data/examples/TestHBase.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.examples
-
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.hbase.HBaseConfiguration
-import org.apache.hadoop.hbase.client.HConnectionManager
-import org.apache.hadoop.hbase.client.HConnection
-import org.apache.hadoop.hbase.client.HBaseAdmin
-
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.Event
-
-import scala.concurrent.ExecutionContext.Implicits.global
-
-// sbt/sbt "data/run-main io.prediction.data.storage.examples.TestHBase"
-object TestHBase {
-
-  def main(arg: Array[String]) {
-
-    /*
-    val conf = HBaseConfiguration.create()
-    conf.set("hbase.client.retries.number", "1")
-    conf.set("zookeeper.recovery.retry", "1")
-
-    HBaseAdmin.checkHBaseAvailable(conf)
-    val a = new HBaseAdmin(conf)
-    a.listTables()
-    */
-    println("  Verifying Event Data Backend")
-    val eventsDb = Storage.getLEvents()
-    eventsDb.init(0)
-    eventsDb.insert(Event(
-      event="test",
-      entityType="test",
-      entityId="test"), 0)
-    eventsDb.remove(0)
-    eventsDb.close()
-
-  }
-}
diff --git a/data/src/main/scala/io/prediction/data/storage/Apps.scala b/data/src/main/scala/io/prediction/data/storage/Apps.scala
index 7a56560..bb156d9 100644
--- a/data/src/main/scala/io/prediction/data/storage/Apps.scala
+++ b/data/src/main/scala/io/prediction/data/storage/Apps.scala
@@ -51,4 +51,5 @@
 
   /** Delete an App. */
   def delete(id: Int): Boolean
+
 }
diff --git a/data/src/main/scala/io/prediction/data/storage/Channels.scala b/data/src/main/scala/io/prediction/data/storage/Channels.scala
new file mode 100644
index 0000000..67aad6a
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/storage/Channels.scala
@@ -0,0 +1,65 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.storage
+
+/**
+ * Channel object.
+ *
+ * Stores mapping of channel IDs, names and app ID
+ *
+ * @param id ID of the channel
+ * @param name Name of the channel (must be unique within the same app).
+ * @param appid ID of the app which this channel belongs to
+ */
+private[prediction] case class Channel(
+  id: Int,
+  name: String, // must be unique within the same app
+  appid: Int
+) {
+  require(Channel.isValidName(name),
+    "Invalid channel name: ${name}. ${Channel.nameConstraint}")
+}
+
+private[prediction] object Channel {
+  def isValidName(s: String): Boolean = {
+    // note: update channelNameConstraint if this rule is changed
+    s.matches("^[a-zA-Z0-9-]{1,16}$")
+  }
+
+  // for display error message consistently
+  val nameConstraint: String =
+    "Only alphanumeric and - characters are allowed and max length is 16."
+}
+
+/**
+ * Base trait for implementations that interact with Channels in the backend
+ * data store.
+ */
+private[prediction] trait Channels {
+
+  /** Insert a new Channel. Returns a generated channel ID if original ID is 0. */
+  def insert(channel: Channel): Option[Int]
+
+  /** Get a Channel by channel ID. */
+  def get(id: Int): Option[Channel]
+
+  /** Get all Channel by app ID. */
+  def getByAppid(appid: Int): Seq[Channel]
+
+  /** Delete a Channel */
+  def delete(id: Int): Boolean
+
+}
diff --git a/data/src/main/scala/io/prediction/data/storage/LEvents.scala b/data/src/main/scala/io/prediction/data/storage/LEvents.scala
index 969b50f..d6dd3da 100644
--- a/data/src/main/scala/io/prediction/data/storage/LEvents.scala
+++ b/data/src/main/scala/io/prediction/data/storage/LEvents.scala
@@ -15,36 +15,45 @@
 
 package io.prediction.data.storage
 
+import io.prediction.annotation.Experimental
+
 import scala.concurrent.Future
 import scala.concurrent.Await
 import scala.concurrent.duration.Duration
-
-import org.joda.time.DateTime
-
 import scala.concurrent.ExecutionContext
 import scala.concurrent.TimeoutException
 
+import org.joda.time.DateTime
+
 /** Base trait of a data access object that directly returns [[Event]] without
   * going through Spark's parallelization.
   */
 trait LEvents {
 
   private def notImplemented(implicit ec: ExecutionContext) = Future {
-    Left(StorageError("Not implemented."))
+    throw new StorageException("Not implemented.")
   }
+
   val defaultTimeout = Duration(60, "seconds")
 
   /** Initialize Event Store for the appId.
-   * initialization routine to be called when app is first created.
-   * return true if succeed or false if fail.
-   */
-  private[prediction] def init(appId: Int): Boolean = {
+    * initialization routine to be called when app is first created.
+    * return true if succeed or false if fail.
+    * @param appId App ID
+    * @param channelId Channel ID
+    * @return status. true if succeeded; false if failed.
+    */
+  private[prediction] def init(appId: Int, channelId: Option[Int] = None): Boolean = {
     throw new Exception("init() is not implemented.")
     false
   }
 
-  /** Remove Event Store for this appId */
-  private[prediction] def remove(appId: Int): Boolean = {
+  /** Remove Event Store for this appId
+    * @param appId App ID
+    * @param channelId Channel ID
+    * @return status. true if succeeded; false if failed.
+    */
+  private[prediction] def remove(appId: Int, channelId: Option[Int] = None): Boolean = {
     throw new Exception("remove() is not implemented.")
     false
   }
@@ -57,47 +66,43 @@
     ()
   }
 
+  /* auxiliary */
+  private[prediction]
+  def futureInsert(event: Event, appId: Int)(implicit ec: ExecutionContext):
+    Future[String] = futureInsert(event, appId, None)
+
   private[prediction]
   def futureInsert(
-    event: Event, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, String]] =
+    event: Event, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[String] =
     notImplemented
 
+  /* auxiliary */
   private[prediction]
   def futureGet(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Option[Event]]] =
+    Future[Option[Event]] = futureGet(eventId, appId, None)
+
+  private[prediction]
+  def futureGet(
+    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[Option[Event]] =
     notImplemented
 
+  /* auxiliary */
   private[prediction]
   def futureDelete(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Boolean]] =
+    Future[Boolean] = futureDelete(eventId, appId, None)
+
+  private[prediction]
+  def futureDelete(
+    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[Boolean] =
     notImplemented
 
-  /** @deprecated */
-  private[prediction]
-  def futureGetByAppId(appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = notImplemented
-
-  /* where t >= start and t < untilTime */
-  /** @deprecated */
-  private[prediction]
-  def futureGetByAppIdAndTime(appId: Int, startTime: Option[DateTime],
-    untilTime: Option[DateTime])(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = notImplemented
-
-  /** @deprecated */
-  private[prediction]
-  def futureGetByAppIdAndTimeAndEntity(appId: Int,
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    entityType: Option[String],
-    entityId: Option[String])(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = notImplemented
-
-  /** Reads from database and returns a Future of either StorageError or
-    * events iterator.
+  /** Reads from database and returns a Future of events iterator.
     *
     * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
     * @param startTime return events with eventTime >= startTime
     * @param untilTime return events with eventTime < untilTime
     * @param entityType return events of this entityType
@@ -116,10 +121,11 @@
     *   - return oldest events first if None or Some(false) (default)
     *   - return latest events first if Some(true)
     * @param ec ExecutionContext
-    * @return Future[Either[StorageError, Iterator[Event]]]
+    * @return Future[Iterator[Event]]
     */
   private[prediction] def futureFind(
     appId: Int,
+    channelId: Option[Int] = None,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     entityType: Option[String] = None,
@@ -129,107 +135,111 @@
     targetEntityId: Option[Option[String]] = None,
     limit: Option[Int] = None,
     reversed: Option[Boolean] = None)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = notImplemented
+    Future[Iterator[Event]] = notImplemented
 
   /** Aggregate properties of entities based on these special events:
     * \$set, \$unset, \$delete events.
-    * and returns a Future of either StorageError or a Map of entityId to
-    * properties.
+    * and returns a Future of Map of entityId to properties.
     *
     * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
     * @param entityType aggregate properties of the entities of this entityType
     * @param startTime use events with eventTime >= startTime
     * @param untilTime use events with eventTime < untilTime
     * @param required only keep entities with these required properties defined
     * @param ec ExecutionContext
-    * @return Future[Either[StorageError, Map[String, PropertyMap]]]
+    * @return Future[Map[String, PropertyMap]]
     */
   private[prediction] def futureAggregateProperties(
     appId: Int,
+    channelId: Option[Int] = None,
     entityType: String,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     required: Option[Seq[String]] = None)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Map[String, PropertyMap]]] = notImplemented
+    Future[Map[String, PropertyMap]] = {
+      futureFind(
+        appId = appId,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = Some(entityType),
+        eventNames = Some(LEventAggregator.eventNames)
+      ).map{ eventIt =>
+        val dm = LEventAggregator.aggregateProperties(eventIt)
+        if (required.isDefined) {
+          dm.filter { case (k, v) =>
+            required.get.map(v.contains(_)).reduce(_ && _)
+          }
+        } else dm
+      }
+    }
 
-  /** Experimental.
+  /**
+    * :: Experimental ::
     *
     * Aggregate properties of the specified entity (entityType + entityId)
     * based on these special events:
     * \$set, \$unset, \$delete events.
-    * and returns a Future of either StorageError or Option[PropertyMap]
+    * and returns a Future of Option[PropertyMap]
     *
     * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
     * @param entityType the entityType
     * @param entityId the entityId
     * @param startTime use events with eventTime >= startTime
     * @param untilTime use events with eventTime < untilTime
     * @param ec ExecutionContext
-    * @return Future[Either[StorageError, Option[PropertyMap]]]
+    * @return Future[Option[PropertyMap]]
     */
-  private[prediction] def futureAggregatePropertiesSingle(
+  @Experimental
+  private[prediction] def futureAggregatePropertiesOfEntity(
     appId: Int,
+    channelId: Option[Int] = None,
     entityType: String,
     entityId: String,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Option[PropertyMap]]] = notImplemented
-
-  private[prediction]
-  def futureDeleteByAppId(appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Unit]] = notImplemented
+    Future[Option[PropertyMap]] = {
+      futureFind(
+        appId = appId,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = Some(entityType),
+        entityId = Some(entityId),
+        eventNames = Some(LEventAggregator.eventNames)
+      ).map{ eventIt =>
+        LEventAggregator.aggregatePropertiesSingle(eventIt)
+      }
+    }
 
   // following is blocking
   private[prediction] def insert(event: Event, appId: Int,
+    channelId: Option[Int] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, String] = {
-    Await.result(futureInsert(event, appId), timeout)
+    String = {
+    Await.result(futureInsert(event, appId, channelId), timeout)
   }
 
   private[prediction] def get(eventId: String, appId: Int,
+    channelId: Option[Int] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Option[Event]] = {
-    Await.result(futureGet(eventId, appId), timeout)
+    Option[Event] = {
+    Await.result(futureGet(eventId, appId, channelId), timeout)
   }
 
   private[prediction] def delete(eventId: String, appId: Int,
+    channelId: Option[Int] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Boolean] = {
-    Await.result(futureDelete(eventId, appId), timeout)
+    Boolean = {
+    Await.result(futureDelete(eventId, appId, channelId), timeout)
   }
 
-  /** @deprecated */
-  private[prediction] def getByAppId(appId: Int,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Iterator[Event]] = {
-    Await.result(futureGetByAppId(appId), timeout)
-  }
-
-  /** @deprecated */
-  private[prediction]
-  def getByAppIdAndTime(appId: Int, startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Iterator[Event]] = {
-    Await.result(futureGetByAppIdAndTime(appId, startTime, untilTime), timeout)
-  }
-
-  /** @deprecated */
-  private[prediction] def getByAppIdAndTimeAndEntity(appId: Int,
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    entityType: Option[String],
-    entityId: Option[String],
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Iterator[Event]] = {
-    Await.result(futureGetByAppIdAndTimeAndEntity(appId, startTime, untilTime,
-      entityType, entityId), timeout)
-  }
-
-  /** reads from database and returns either StorageError or
-    * events iterator.
+  /** reads from database and returns events iterator.
     *
     * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
     * @param startTime return events with eventTime >= startTime
     * @param untilTime return events with eventTime < untilTime
     * @param entityType return events of this entityType
@@ -249,10 +259,41 @@
     *   - return oldest events first if None or Some(false) (default)
     *   - return latest events first if Some(true)
     * @param ec ExecutionContext
-    * @return Either[StorageError, Iterator[Event]]
+    * @return Iterator[Event]
     */
   private[prediction] def find(
     appId: Int,
+    channelId: Option[Int] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    limit: Option[Int] = None,
+    reversed: Option[Boolean] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Iterator[Event] = {
+      Await.result(futureFind(
+        appId = appId,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = entityType,
+        entityId = entityId,
+        eventNames = eventNames,
+        targetEntityType = targetEntityType,
+        targetEntityId = targetEntityId,
+        limit = limit,
+        reversed = reversed), timeout)
+  }
+
+  // NOTE: remove in next release
+  @deprecated("Use find() instead.", "0.9.2")
+  private[prediction] def findLegacy(
+    appId: Int,
+    channelId: Option[Int] = None,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     entityType: Option[String] = None,
@@ -265,8 +306,10 @@
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
     Either[StorageError, Iterator[Event]] = {
       try {
-        Await.result(futureFind(
+        // return Either for legacy usage
+        Right(Await.result(futureFind(
           appId = appId,
+          channelId = channelId,
           startTime = startTime,
           untilTime = untilTime,
           entityType = entityType,
@@ -275,7 +318,7 @@
           targetEntityType = targetEntityType,
           targetEntityId = targetEntityId,
           limit = limit,
-          reversed = reversed), timeout)
+          reversed = reversed), timeout))
       } catch {
         case e: TimeoutException => Left(StorageError(s"${e}"))
         case e: Exception => Left(StorageError(s"${e}"))
@@ -285,6 +328,7 @@
   /** reads events of the specified entity.
     *
     * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
     * @param entityType return events of this entityType
     * @param entityId return events of this entityId
     * @param eventNames return events with any of these event names.
@@ -303,8 +347,11 @@
     * @param ec ExecutionContext
     * @return Either[StorageError, Iterator[Event]]
     */
+  // NOTE: remove this function in next release
+  @deprecated("Use LEventStore.findByEntity() instead.", "0.9.2")
   def findSingleEntity(
     appId: Int,
+    channelId: Option[Int] = None,
     entityType: String,
     entityId: String,
     eventNames: Option[Seq[String]] = None,
@@ -317,8 +364,9 @@
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
     Either[StorageError, Iterator[Event]] = {
 
-    find(
+    findLegacy(
       appId = appId,
+      channelId = channelId,
       startTime = startTime,
       untilTime = untilTime,
       entityType = Some(entityType),
@@ -334,69 +382,70 @@
 
   /** Aggregate properties of entities based on these special events:
     * \$set, \$unset, \$delete events.
-    * and returns either StorageError or a Map of entityId to
-    * properties.
+    * and returns a Map of entityId to properties.
     *
     * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
     * @param entityType aggregate properties of the entities of this entityType
     * @param startTime use events with eventTime >= startTime
     * @param untilTime use events with eventTime < untilTime
     * @param required only keep entities with these required properties defined
     * @param ec ExecutionContext
-    * @return Either[StorageError, Map[String, PropertyMap]]
+    * @return Map[String, PropertyMap]
     */
   private[prediction] def aggregateProperties(
     appId: Int,
+    channelId: Option[Int] = None,
     entityType: String,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     required: Option[Seq[String]] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Map[String, PropertyMap]] = {
+    Map[String, PropertyMap] = {
     Await.result(futureAggregateProperties(
       appId = appId,
+      channelId = channelId,
       entityType = entityType,
       startTime = startTime,
       untilTime = untilTime,
       required = required), timeout)
   }
 
-  /** Experimental.
+  /**
+    * :: Experimental ::
     *
     * Aggregate properties of the specified entity (entityType + entityId)
     * based on these special events:
     * \$set, \$unset, \$delete events.
-    * and returns either StorageError or Option[PropertyMap]
+    * and returns Option[PropertyMap]
     *
     * @param appId use events of this app ID
+    * @param channelId use events of this channel ID
     * @param entityType the entityType
     * @param entityId the entityId
     * @param startTime use events with eventTime >= startTime
     * @param untilTime use events with eventTime < untilTime
     * @param ec ExecutionContext
-    * @return Future[Either[StorageError, Option[PropertyMap]]]
+    * @return Future[Option[PropertyMap]]
     */
-  private[prediction] def aggregatePropertiesSingle(
+  @Experimental
+  private[prediction] def aggregatePropertiesOfEntity(
     appId: Int,
+    channelId: Option[Int] = None,
     entityType: String,
     entityId: String,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Option[PropertyMap]] = {
+    Option[PropertyMap] = {
 
-    Await.result(futureAggregatePropertiesSingle(
+    Await.result(futureAggregatePropertiesOfEntity(
       appId = appId,
+      channelId = channelId,
       entityType = entityType,
       entityId = entityId,
       startTime = startTime,
       untilTime = untilTime), timeout)
   }
 
-  private[prediction] def deleteByAppId(appId: Int,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Unit] = {
-    Await.result(futureDeleteByAppId(appId), timeout)
-  }
-
 }
diff --git a/data/src/main/scala/io/prediction/data/storage/PEvents.scala b/data/src/main/scala/io/prediction/data/storage/PEvents.scala
index 5b8aece..251b362 100644
--- a/data/src/main/scala/io/prediction/data/storage/PEvents.scala
+++ b/data/src/main/scala/io/prediction/data/storage/PEvents.scala
@@ -29,7 +29,7 @@
   */
 trait PEvents extends Serializable {
 
-  /** @deprecated */
+  @deprecated("Use PEventStore.find() instead.", "0.9.2")
   def getByAppIdAndTimeAndEntity(appId: Int,
     startTime: Option[DateTime],
     untilTime: Option[DateTime],
@@ -48,6 +48,7 @@
   /** Read from database and return the events.
     *
     * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
     * @param startTime return events with eventTime >= startTime
     * @param untilTime return events with eventTime < untilTime
     * @param entityType return events of this entityType
@@ -64,8 +65,11 @@
     * @param sc Spark context
     * @return RDD[Event]
     */
+  // NOTE: Don't delete! make this private[prediction] instead when deprecate
+  @deprecated("Use PEventStore.find() instead.", "0.9.2")
   def find(
     appId: Int,
+    channelId: Option[Int] = None,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     entityType: Option[String] = None,
@@ -78,6 +82,7 @@
     * \$set, \$unset, \$delete events.
     *
     * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
     * @param entityType aggregate properties of the entities of this entityType
     * @param startTime use events with eventTime >= startTime
     * @param untilTime use events with eventTime < untilTime
@@ -85,8 +90,11 @@
     * @param sc Spark context
     * @return RDD[(String, PropertyMap)] RDD of entityId and PropetyMap pair
     */
+  // NOTE: Don't delete! make this private[prediction] instead when deprecate
+  @deprecated("Use PEventStore.aggregateProperties() instead.", "0.9.2")
   def aggregateProperties(
     appId: Int,
+    channelId: Option[Int] = None,
     entityType: String,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
@@ -97,7 +105,7 @@
     * Extract EntityMap[A] from events for the entityType
     * NOTE: it is local EntityMap[A]
     */
-  @Experimental
+  @deprecated("Use PEventStore.aggregateProperties() instead.", "0.9.2")
   def extractEntityMap[A: ClassTag](
     appId: Int,
     entityType: String,
@@ -113,7 +121,18 @@
     * @param appId the app ID
     * @param sc Spark Context
     */
-  @Experimental
-  def write(events: RDD[Event], appId: Int)(sc: SparkContext): Unit
+  private[prediction] def write(events: RDD[Event], appId: Int)(sc: SparkContext): Unit =
+    write(events, appId, None)(sc)
+
+  /** :: Experimental ::
+    * Write events to database
+    *
+    * @param events RDD of Event
+    * @param appId the app ID
+    * @param channelId  channel ID (default channel if it's None)
+    * @param sc Spark Context
+    */
+  private[prediction] def write(
+    events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit
 
 }
diff --git a/data/src/main/scala/io/prediction/data/storage/Storage.scala b/data/src/main/scala/io/prediction/data/storage/Storage.scala
index dd68eb7..abec851 100644
--- a/data/src/main/scala/io/prediction/data/storage/Storage.scala
+++ b/data/src/main/scala/io/prediction/data/storage/Storage.scala
@@ -23,8 +23,15 @@
 
 import scala.concurrent.ExecutionContext.Implicits.global
 
+@deprecated("Use StorageException", "0.9.2")
 private[prediction] case class StorageError(val message: String)
 
+private[prediction] class StorageException(message: String, cause: Throwable)
+  extends Exception(message, cause) {
+
+  def this(message: String) = this(message, null)
+}
+
 /**
  * Backend-agnostic data storage layer with lazy initialization and connection
  * pooling. Use this object when you need to interface with Event Store in your
@@ -264,6 +271,9 @@
   private[prediction] def getMetaDataAccessKeys(): AccessKeys =
     getDataObject[AccessKeys](MetaDataRepository)
 
+  private[prediction] def getMetaDataChannels(): Channels =
+    getDataObject[Channels](MetaDataRepository)
+
   private[prediction] def getModelDataModels(): Models =
     getDataObject[Models](ModelDataRepository)
 
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
new file mode 100644
index 0000000..b48bca1
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
@@ -0,0 +1,118 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+  
+package io.prediction.data.storage.elasticsearch
+
+import io.prediction.data.storage.Channel
+import io.prediction.data.storage.Channels
+
+import grizzled.slf4j.Logging
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.index.query.FilterBuilders.termFilter
+import org.elasticsearch.client.Client
+
+import org.json4s.DefaultFormats
+import org.json4s._
+import org.json4s.JsonDSL._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization
+import org.json4s.native.Serialization.{ read, write }
+
+class ESChannels(client: Client, index: String)
+    extends Channels with Logging {
+
+  implicit val formats = DefaultFormats.lossless
+  private val estype = "channels"
+  private val seq = new ESSequences(client, index)
+  private val seqName = "channels"
+
+  val indices = client.admin.indices
+  val indexExistResponse = indices.prepareExists(index).get
+  if (!indexExistResponse.isExists) {
+    indices.prepareCreate(index).get
+  }
+  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
+  if (!typeExistResponse.isExists) {
+    val json =
+      (estype ->
+        ("properties" ->
+          ("name" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
+    indices.preparePutMapping(index).setType(estype).
+      setSource(compact(render(json))).get
+  }
+
+  def insert(channel: Channel): Option[Int] = {
+    val id =
+      if (channel.id == 0) {
+        var roll = seq.genNext(seqName)
+        while (!get(roll).isEmpty) roll = seq.genNext(seqName)
+        roll
+      } else channel.id
+
+    val realChannel = channel.copy(id = id)
+    if (update(realChannel)) Some(id) else None
+  }
+
+  def get(id: Int): Option[Channel] = {
+    try {
+      val response = client.prepareGet(
+        index,
+        estype,
+        id.toString).get()
+      Some(read[Channel](response.getSourceAsString))
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+      case e: NullPointerException => None
+    }
+  }
+
+  def getByAppid(appid: Int): Seq[Channel] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype).
+        setPostFilter(termFilter("appid", appid))
+      ESUtils.getAll[Channel](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq[Channel]()
+    }
+  }
+
+  def update(channel: Channel): Boolean = {
+    try {
+      val response = client.prepareIndex(index, estype, channel.id.toString).
+        setSource(write(channel)).get()
+      true
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        false
+    }
+  }
+
+  def delete(id: Int): Boolean = {
+    try {
+      client.prepareDelete(index, estype, id.toString).get
+      true
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        false
+    }
+  }
+
+}
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala
index f1edd5c..ba47a15 100644
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala
+++ b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala
@@ -71,4 +71,5 @@
         0
     }
   }
+
 }
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/obsolete/ESLEvents.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/obsolete/ESLEvents.scala
deleted file mode 100644
index 2e2a33c..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/obsolete/ESLEvents.scala
+++ /dev/null
@@ -1,163 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.StorageError
-import io.prediction.data.storage.LEvents
-import io.prediction.data.storage.EventJson4sSupport
-
-import grizzled.slf4j.Logging
-
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.elasticsearch.client.transport.TransportClient
-import org.elasticsearch.node.NodeBuilder.nodeBuilder
-import org.elasticsearch.common.transport.InetSocketTransportAddress
-import org.elasticsearch.transport.ConnectTransportException
-import org.elasticsearch.action.index.IndexResponse
-import org.elasticsearch.action.get.GetResponse
-import org.elasticsearch.action.delete.DeleteResponse
-import org.elasticsearch.action.search.SearchResponse
-import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse
-import org.elasticsearch.action.ActionListener
-import org.elasticsearch.index.query.FilterBuilders
-import org.elasticsearch.index.query.QueryBuilders
-
-import org.json4s.DefaultFormats
-import org.json4s.native.Serialization.{ read, write }
-// import org.json4s.ext.JodaTimeSerializers
-
-import scala.util.Try
-import scala.concurrent.Future
-import scala.concurrent.Promise
-import scala.concurrent.ExecutionContext
-
-class ESLEvents(client: Client, index: String) extends LEvents with Logging {
-
-  implicit val formats = DefaultFormats + new EventJson4sSupport.DBSerializer
-  // implicit val formats = DefaultFormats.lossless ++ JodaTimeSerializers.all
-
-  def typeName: String = s"events"
-
-  override
-  def futureInsert(event: Event, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, String]] = {
-    val response = Promise[IndexResponse]
-
-    client.prepareIndex(index, typeName)
-      .setSource(write(event))
-      .execute(new ESActionListener(response))
-
-    response.future
-      .map(r => Right(r.getId()))
-      .recover {
-        case e: Exception => Left(StorageError(e.toString))
-      }
-  }
-
-  override
-  def futureGet(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Option[Event]]] = {
-
-    val response = Promise[GetResponse]
-
-    client.prepareGet(index, typeName, eventId)
-      .execute(new ESActionListener(response))
-
-    response.future
-      .map { r =>
-        if (r.isExists) {
-          Right(Some(read[Event](r.getSourceAsString)))
-        } else {
-          Right(None)
-        }
-      }.recover {
-        case e: Exception => Left(StorageError(e.toString))
-      }
-  }
-
-  override
-  def futureDelete(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Boolean]] = {
-    val response = Promise[DeleteResponse]
-
-    client.prepareDelete(index, typeName, eventId)
-      .execute(new ESActionListener(response))
-
-    response.future
-      .map(r => Right(r.isFound()))
-      .recover {
-        case e: Exception => Left(StorageError(e.toString))
-      }
-  }
-
-  override
-  def futureGetByAppId(appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = {
-    val response = Promise[SearchResponse]
-
-    client.prepareSearch(index).setTypes(typeName)
-      .setPostFilter(FilterBuilders.termFilter("appId", appId))
-      .execute(new ESActionListener(response))
-
-    response.future
-      .map{ r =>
-        val dataIt = r.getHits().hits()
-          .map(h => read[Event](h.getSourceAsString)).toIterator
-        Right(dataIt)
-      }.recover {
-        case e: Exception => Left(StorageError(e.toString))
-      }
-
-  }
-
-  override
-  def futureDeleteByAppId(appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Unit]] = {
-
-    val response = Promise[DeleteByQueryResponse]
-
-    client.prepareDeleteByQuery(index).setTypes(typeName)
-      .setQuery(QueryBuilders.termQuery("appId", appId))
-      .execute(new ESActionListener(response))
-
-    response.future
-      .map { r =>
-        val indexResponse = r.getIndex(index)
-        val numFailures = indexResponse.getFailedShards()
-        if (numFailures != 0) {
-          Left(StorageError(s"Failed to delete ${numFailures} shards."))
-        } else {
-          Right(())
-        }
-      }.recover {
-        case e: Exception => Left(StorageError(e.toString))
-      }
-
-  }
-
-}
-
-
-class ESActionListener[T](val p: Promise[T]) extends ActionListener[T]{
-  override def onResponse(r: T): Unit = {
-    p.success(r)
-  }
-  override def onFailure(e: Throwable): Unit = {
-    p.failure(e)
-  }
-}
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala b/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala
index 564affa..294961f 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala
@@ -48,7 +48,13 @@
 
   implicit val formats = DefaultFormats
 
-  def tableName(namespace: String, appId: Int): String = s"${namespace}:events_${appId}"
+  def tableName(namespace: String, appId: Int, channelId: Option[Int] = None): String = {
+    channelId.map { ch =>
+      s"${namespace}:events_${appId}_${ch}"
+    }.getOrElse {
+      s"${namespace}:events_${appId}"
+    }
+  }
 
   // column names for "e" column family
   val colNames: Map[String, Array[Byte]] = Map(
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala b/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala
index f3194ae..cf0ed21 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala
@@ -18,8 +18,6 @@
 import io.prediction.data.storage.Event
 import io.prediction.data.storage.PropertyMap
 import io.prediction.data.storage.LEvents
-import io.prediction.data.storage.LEventAggregator
-import io.prediction.data.storage.StorageError
 import io.prediction.data.storage.hbase.HBEventsUtil.RowKey
 import io.prediction.data.storage.hbase.HBEventsUtil.RowKeyException
 
@@ -46,11 +44,11 @@
   def resultToEvent(result: Result, appId: Int): Event =
     HBEventsUtil.resultToEvent(result, appId)
 
-  def getTable(appId: Int): HTableInterface = client.connection.getTable(
-    HBEventsUtil.tableName(namespace, appId))
+  def getTable(appId: Int, channelId: Option[Int] = None): HTableInterface =
+    client.connection.getTable(HBEventsUtil.tableName(namespace, appId, channelId))
 
   override
-  def init(appId: Int): Boolean = {
+  def init(appId: Int, channelId: Option[Int] = None): Boolean = {
     // check namespace exist
     val existingNamespace = client.admin.listNamespaceDescriptors()
       .map(_.getName)
@@ -60,7 +58,7 @@
       client.admin.createNamespace(nameDesc)
     }
 
-    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId))
+    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId, channelId))
     if (!client.admin.tableExists(tableName)) {
       info(s"The table ${tableName.getNameAsString()} doesn't exist yet." +
         " Creating now...")
@@ -73,8 +71,8 @@
   }
 
   override
-  def remove(appId: Int): Boolean = {
-    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId))
+  def remove(appId: Int, channelId: Option[Int] = None): Boolean = {
+    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId, channelId))
     try {
       if (client.admin.tableExists(tableName)) {
         info(s"Removing table ${tableName.getNameAsString()}...")
@@ -100,27 +98,25 @@
   }
 
   override
-  def futureInsert(event: Event, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, String]] = {
+  def futureInsert(
+    event: Event, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[String] = {
     Future {
-      val table = getTable(appId)
+      val table = getTable(appId, channelId)
       val (put, rowKey) = HBEventsUtil.eventToPut(event, appId)
       table.put(put)
       table.flushCommits()
       table.close()
-      Right(rowKey.toString)
-    }/* .recover {
-       case e: Exception => Left(StorageError(e.toString))
-    } */
+      rowKey.toString
+    }
   }
 
-
-
   override
-  def futureGet(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Option[Event]]] = {
+  def futureGet(
+    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[Option[Event]] = {
       Future {
-        val table = getTable(appId)
+        val table = getTable(appId, channelId)
         val rowKey = RowKey(eventId)
         val get = new Get(rowKey.toBytes)
 
@@ -129,79 +125,31 @@
 
         if (!result.isEmpty()) {
           val event = resultToEvent(result, appId)
-          Right(Some(event))
+          Some(event)
         } else {
-          Right(None)
+          None
         }
-      }.recover {
-        case e: RowKeyException => Left(StorageError(e.toString))
-        case e: Exception => throw e
       }
     }
 
   override
-  def futureDelete(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Boolean]] = {
+  def futureDelete(
+    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[Boolean] = {
     Future {
-      val table = getTable(appId)
+      val table = getTable(appId, channelId)
       val rowKey = RowKey(eventId)
       val exists = table.exists(new Get(rowKey.toBytes))
       table.delete(new Delete(rowKey.toBytes))
       table.close()
-      Right(exists)
+      exists
     }
   }
 
   override
-  def futureGetByAppId(appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = {
-      futureFind(
-        appId = appId,
-        startTime = None,
-        untilTime = None,
-        entityType = None,
-        entityId = None,
-        eventNames = None,
-        limit = None,
-        reversed = None)
-    }
-
-  override
-  def futureGetByAppIdAndTime(appId: Int, startTime: Option[DateTime],
-    untilTime: Option[DateTime])(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = {
-      futureFind(
-        appId = appId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = None,
-        entityId = None,
-        eventNames = None,
-        limit = None,
-        reversed = None)
-  }
-
-  override
-  def futureGetByAppIdAndTimeAndEntity(appId: Int,
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    entityType: Option[String],
-    entityId: Option[String])(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = {
-      futureFind(
-        appId = appId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = entityType,
-        entityId = entityId,
-        eventNames = None,
-        limit = None,
-        reversed = None)
-  }
-
-  override
   def futureFind(
     appId: Int,
+    channelId: Option[Int] = None,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     entityType: Option[String] = None,
@@ -211,9 +159,10 @@
     targetEntityId: Option[Option[String]] = None,
     limit: Option[Int] = None,
     reversed: Option[Boolean] = None)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Iterator[Event]]] = {
+    Future[Iterator[Event]] = {
       Future {
-        val table = getTable(appId)
+
+        val table = getTable(appId, channelId)
 
         val scan = HBEventsUtil.createScan(
           startTime = startTime,
@@ -238,75 +187,8 @@
 
         val eventsIt = results.map { resultToEvent(_, appId) }
 
-        Right(eventsIt)
+        eventsIt
       }
   }
 
-  override
-  def futureAggregateProperties(
-    appId: Int,
-    entityType: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    required: Option[Seq[String]] = None)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Map[String, PropertyMap]]] = {
-      futureFind(
-        appId = appId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = Some(entityType),
-        eventNames = Some(LEventAggregator.eventNames)
-      ).map{ either =>
-        either.right.map{ eventIt =>
-          val dm = LEventAggregator.aggregateProperties(eventIt)
-          if (required.isDefined) {
-            dm.filter { case (k, v) =>
-              required.get.map(v.contains(_)).reduce(_ && _)
-            }
-          } else dm
-        }
-      }
-    }
-
-  override
-  def futureAggregatePropertiesSingle(
-    appId: Int,
-    entityType: String,
-    entityId: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Option[PropertyMap]]] = {
-      futureFind(
-        appId = appId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = Some(entityType),
-        entityId = Some(entityId),
-        eventNames = Some(LEventAggregator.eventNames)
-      ).map{ either =>
-        either.right.map{ eventIt =>
-          LEventAggregator.aggregatePropertiesSingle(eventIt)
-        }
-      }
-    }
-
-  override
-  def futureDeleteByAppId(appId: Int)(implicit ec: ExecutionContext):
-    Future[Either[StorageError, Unit]] = {
-    Future {
-      // TODO: better way to handle range delete
-      val table = getTable(appId)
-      val scan = new Scan()
-      val scanner = table.getScanner(scan)
-      val it = scanner.iterator()
-      while (it.hasNext()) {
-        val result = it.next()
-        table.delete(new Delete(result.getRow()))
-      }
-      scanner.close()
-      table.close()
-      Right(())
-    }
-  }
-
 }
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala b/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala
index 97924a9..da44ec6 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala
@@ -48,16 +48,24 @@
 class HBPEvents(client: HBClient, namespace: String)
   extends PEvents with Logging {
 
-  def checkTableExists(appId: Int): Unit = {
-    if (!client.admin.tableExists(HBEventsUtil.tableName(namespace, appId))) {
-      error(s"The appId ${appId} does not exist. Please use valid appId.")
-      throw new Exception(s"HBase table not found for appId ${appId}.")
+  def checkTableExists(appId: Int, channelId: Option[Int]): Unit = {
+    if (!client.admin.tableExists(HBEventsUtil.tableName(namespace, appId, channelId))) {
+      if (channelId.isDefined) {
+        error(s"The appId ${appId} with channelId ${channelId} does not exist." +
+          s" Please use valid appId and channelId.")
+        throw new Exception(s"HBase table not found for appId ${appId}" +
+          s" with channelId ${channelId}.")
+      } else {
+        error(s"The appId ${appId} does not exist. Please use valid appId.")
+        throw new Exception(s"HBase table not found for appId ${appId}.")
+      }
     }
   }
 
   override
   def find(
     appId: Int,
+    channelId: Option[Int] = None,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     entityType: Option[String] = None,
@@ -67,11 +75,11 @@
     targetEntityId: Option[Option[String]] = None
     )(sc: SparkContext): RDD[Event] = {
 
-    checkTableExists(appId)
+    checkTableExists(appId, channelId)
 
     val conf = HBaseConfiguration.create()
     conf.set(TableInputFormat.INPUT_TABLE,
-      HBEventsUtil.tableName(namespace, appId))
+      HBEventsUtil.tableName(namespace, appId, channelId))
 
     val scan = HBEventsUtil.createScan(
         startTime = startTime,
@@ -100,16 +108,18 @@
   override
   def aggregateProperties(
     appId: Int,
+    channelId: Option[Int] = None,
     entityType: String,
     startTime: Option[DateTime] = None,
     untilTime: Option[DateTime] = None,
     required: Option[Seq[String]] = None)
     (sc: SparkContext): RDD[(String, PropertyMap)] = {
 
-    checkTableExists(appId)
+    checkTableExists(appId, channelId)
 
     val eventRDD = find(
       appId = appId,
+      channelId = channelId,
       startTime = startTime,
       untilTime = untilTime,
       entityType = Some(entityType),
@@ -119,7 +129,12 @@
       PEventAggregator.aggregateProperties(eventRDD)
     } catch {
       case e: TableNotFoundException => {
-        error(s"The appId ${appId} does not exist. Please use valid appId.")
+        if (channelId.isDefined) {
+          error(s"The appId ${appId} with channelId ${channelId} does not exist." +
+            s" Please use valid appId and channelId.")
+        } else {
+          error(s"The appId ${appId} does not exist. Please use valid appId.")
+        }
         throw e
       }
       case e: Exception => throw e
@@ -164,13 +179,14 @@
   }
 
   override
-  def write(events: RDD[Event], appId: Int)(sc: SparkContext): Unit = {
+  def write(
+    events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
 
-    checkTableExists(appId)
+    checkTableExists(appId, channelId)
 
     val conf = HBaseConfiguration.create()
     conf.set(TableOutputFormat.OUTPUT_TABLE,
-      HBEventsUtil.tableName(namespace, appId))
+      HBEventsUtil.tableName(namespace, appId, channelId))
     conf.setClass("mapreduce.outputformat.class",
       classOf[TableOutputFormat[Object]],
       classOf[OutputFormat[Object, Writable]])
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala
index 82c4e88..e3edbc3 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala
@@ -15,6 +15,8 @@
 
 package io.prediction.data.storage.hbase.upgrade
 
+import io.prediction.annotation.Experimental
+
 import io.prediction.data.storage.Event
 import io.prediction.data.storage.EventValidation
 import io.prediction.data.storage.DataMap
@@ -36,7 +38,8 @@
 
 import scala.collection.JavaConversions._
 
-/* Experimental */
+/** :: Experimental :: */
+@Experimental
 object HB_0_8_0 {
 
   implicit val formats = DefaultFormats
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala
index eda9b61..7ef5305 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala
@@ -15,13 +15,16 @@
 
 package io.prediction.data.storage.hbase.upgrade
 
+import io.prediction.annotation.Experimental
+
 import io.prediction.data.storage.Storage
 import io.prediction.data.storage.hbase.HBLEvents
 import io.prediction.data.storage.hbase.HBEventsUtil
 
 import scala.collection.JavaConversions._
 
-/* Experimental */
+/** :: Experimental :: */
+@Experimental
 object Upgrade {
 
   def main(args: Array[String]) {
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
index 47f12ba..8b80b83 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
@@ -15,6 +15,8 @@
 
 package io.prediction.data.storage.hbase.upgrade
 
+import io.prediction.annotation.Experimental
+
 import grizzled.slf4j.Logger
 import io.prediction.data.storage.Storage
 import io.prediction.data.storage.DataMap
@@ -35,8 +37,6 @@
   : Map[(String, Option[String]), Int] = {
     eventClient
     .find(appId = appId)
-    .right
-    .get
     .foldLeft(Map[(String, Option[String]), Int]().withDefaultValue(0)) {
       case (m, e) => {
         val k = (e.entityType, e.targetEntityType)
@@ -61,7 +61,8 @@
 
 }
 
-/* Experimental */
+/** :: Experimental :: */
+@Experimental
 object Upgrade_0_8_3 {
   val NameMap = Map(
     "pio_user" -> "user",
@@ -88,7 +89,7 @@
     "pio_inactive", "pio_price", "pio_rating")
 
   def hasPIOPrefix(eventClient: LEvents, appId: Int): Boolean = {
-    eventClient.find(appId = appId).right.get.filter( e =>
+    eventClient.find(appId = appId).filter( e =>
       (obsEntityTypes.contains(e.entityType) ||
        e.targetEntityType.map(obsEntityTypes.contains(_)).getOrElse(false) ||
        (!e.properties.keySet.forall(!obsProperties.contains(_)))
@@ -97,7 +98,7 @@
   }
 
   def isEmpty(eventClient: LEvents, appId: Int): Boolean =
-    !eventClient.find(appId = appId).right.get.hasNext
+    !eventClient.find(appId = appId).hasNext
 
 
   def upgradeCopy(eventClient: LEvents, fromAppId: Int, toAppId: Int) {
@@ -108,8 +109,6 @@
 
     val events = eventClient
     .find(appId = fromAppId)
-    .right
-    .get
     .zipWithIndex
     .foreach { case (fromEvent, index) => {
       if (index % 50000 == 0) {
diff --git a/data/src/main/scala/io/prediction/data/store/Common.scala b/data/src/main/scala/io/prediction/data/store/Common.scala
new file mode 100644
index 0000000..e45051f
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/store/Common.scala
@@ -0,0 +1,50 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.store
+
+import io.prediction.data.storage.Storage
+import grizzled.slf4j.Logger
+
+private[prediction] object Common {
+
+  @transient lazy val logger = Logger[this.type]
+  @transient lazy private val appsDb = Storage.getMetaDataApps()
+  @transient lazy private val channelsDb = Storage.getMetaDataChannels()
+
+  /* throw exception if invalid app name or channel name */
+  def appNameToId(appName: String, channelName: Option[String]): (Int, Option[Int]) = {
+    val appOpt = appsDb.getByName(appName)
+
+    appOpt.map { app =>
+      val channelMap: Map[String, Int] = channelsDb.getByAppid(app.id)
+        .map(c => (c.name, c.id)).toMap
+
+      val channelId: Option[Int] = channelName.map { ch =>
+        if (channelMap.contains(ch))
+          channelMap(ch)
+        else {
+          logger.error(s"Invalid channel name ${ch}.")
+          throw new IllegalArgumentException(s"Invalid channel name ${ch}.")
+        }
+      }
+
+      (app.id, channelId)
+    }.getOrElse {
+      logger.error(s"Invalid app name ${appName}")
+      throw new IllegalArgumentException(s"Invalid app name ${appName}")
+    }
+  }
+}
diff --git a/data/src/main/scala/io/prediction/data/store/LEventStore.scala b/data/src/main/scala/io/prediction/data/store/LEventStore.scala
new file mode 100644
index 0000000..46e0f8f
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/store/LEventStore.scala
@@ -0,0 +1,89 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.store
+
+import io.prediction.data.storage.Storage
+import io.prediction.data.storage.Event
+
+import org.joda.time.DateTime
+
+import scala.concurrent.Await
+import scala.concurrent.TimeoutException
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.duration.Duration
+
+/** This object provides a set of operation to access Event Store
+  * without going through Spark's parallelization
+  */
+object LEventStore {
+
+  private val defaultTimeout = Duration(60, "seconds")
+
+  @transient lazy private val eventsDb = Storage.getLEvents()
+
+  /** Reads events of the specified entity. May use this in Algorithm's predict()
+    * or Serving logic to have fast event store access.
+    *
+    * @param appName return events of this app
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @param latest Return latest event first (default true)
+    * @return Iterator[Event]
+    */
+  def findByEntity(
+    appName: String,
+    entityType: String,
+    entityId: String,
+    channelName: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    limit: Option[Int] = None,
+    latest: Boolean = true,
+    timeout: Duration = defaultTimeout): Iterator[Event] = {
+
+    val (appId, channelId) = Common.appNameToId(appName, channelName)
+
+    Await.result(eventsDb.futureFind(
+      appId = appId,
+      channelId = channelId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = Some(entityType),
+      entityId = Some(entityId),
+      eventNames = eventNames,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId,
+      limit = limit,
+      reversed = Some(latest)), timeout)
+  }
+
+}
diff --git a/data/src/main/scala/io/prediction/data/store/PEventStore.scala b/data/src/main/scala/io/prediction/data/store/PEventStore.scala
new file mode 100644
index 0000000..cd20da9
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/store/PEventStore.scala
@@ -0,0 +1,116 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.store
+
+import io.prediction.data.storage.Storage
+import io.prediction.data.storage.Event
+import io.prediction.data.storage.PropertyMap
+
+import org.joda.time.DateTime
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** This object provides a set of operation to access Event Store
+  * with Spark's parallelization
+  */
+object PEventStore {
+
+  @transient lazy private val eventsDb = Storage.getPEvents()
+
+  /** Read events from Event Store
+    *
+    * @param appName return events of this app
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param sc Spark context
+    * @return RDD[Event]
+    */
+  def find(
+    appName: String,
+    channelName: Option[String] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None
+  )(sc: SparkContext): RDD[Event] = {
+
+    val (appId, channelId) = Common.appNameToId(appName, channelName)
+
+    eventsDb.find(
+      appId = appId,
+      channelId = channelId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = entityType,
+      entityId = entityId,
+      eventNames = eventNames,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId
+    )(sc)
+
+  }
+
+  /** Aggregate properties of entities based on these special events:
+    * \$set, \$unset, \$delete events.
+    *
+    * @param appName use events of this app
+    * @param entityType aggregate properties of the entities of this entityType
+    * @param channelName use events of this channel (default channel if it's None)
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param required only keep entities with these required properties defined
+    * @param sc Spark context
+    * @return RDD[(String, PropertyMap)] RDD of entityId and PropetyMap pair
+    */
+  def aggregateProperties(
+    appName: String,
+    entityType: String,
+    channelName: Option[String] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    required: Option[Seq[String]] = None)
+    (sc: SparkContext): RDD[(String, PropertyMap)] = {
+
+      val (appId, channelId) = Common.appNameToId(appName, channelName)
+
+      eventsDb.aggregateProperties(
+        appId = appId,
+        entityType = entityType,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        required = required
+      )(sc)
+
+    }
+
+}
diff --git a/data/src/main/scala/io/prediction/data/view/LBatchView.scala b/data/src/main/scala/io/prediction/data/view/LBatchView.scala
index ba2ac94..f806056 100644
--- a/data/src/main/scala/io/prediction/data/view/LBatchView.scala
+++ b/data/src/main/scala/io/prediction/data/view/LBatchView.scala
@@ -13,7 +13,6 @@
   * limitations under the License.
   */
 
-/* Deprecated */
 package io.prediction.data.view
 
 import io.prediction.data.storage.Event
@@ -26,6 +25,7 @@
 
 import scala.concurrent.ExecutionContext.Implicits.global // TODO
 
+@deprecated("Use LEvents or LEventStore instead.", "0.9.2")
 object ViewPredicates {
   def getStartTimePredicate(startTimeOpt: Option[DateTime])
   : (Event => Boolean) = {
@@ -64,6 +64,7 @@
   }
 }
 
+@deprecated("Use LEvents instead.", "0.9.2")
 object ViewAggregators {
   def getDataMapAggregator(): ((Option[DataMap], Event) => Option[DataMap]) = {
     (p, e) => {
@@ -89,7 +90,7 @@
   }
 }
 
-
+@deprecated("Use LEvents instead.", "0.9.2")
 object EventSeq {
   // Need to
   // >>> import scala.language.implicitConversions
@@ -100,6 +101,7 @@
 }
 
 
+@deprecated("Use LEvents instead.", "0.9.2")
 class EventSeq(val events: List[Event]) {
   def filter(
     eventOpt: Option[String] = None,
@@ -128,6 +130,7 @@
 }
 
 
+@deprecated("Use LEventStore instead.", "0.9.2")
 class LBatchView(
   val appId: Int,
   val startTime: Option[DateTime],
@@ -135,8 +138,10 @@
 
   @transient lazy val eventsDb = Storage.getLEvents()
 
-  @transient lazy val _events = eventsDb.getByAppIdAndTime(appId,
-    startTime, untilTime).right.get.toList
+  @transient lazy val _events = eventsDb.find(
+    appId = appId,
+    startTime = startTime,
+    untilTime = untilTime).toList
 
   @transient lazy val events: EventSeq = new EventSeq(_events)
 
diff --git a/data/src/main/scala/io/prediction/data/view/PBatchView.scala b/data/src/main/scala/io/prediction/data/view/PBatchView.scala
index ee0d6cf..5b0f878 100644
--- a/data/src/main/scala/io/prediction/data/view/PBatchView.scala
+++ b/data/src/main/scala/io/prediction/data/view/PBatchView.scala
@@ -13,7 +13,6 @@
   * limitations under the License.
   */
 
-/* Deprecated */
 package io.prediction.data.view
 
 import io.prediction.data.storage.hbase.HBPEvents
@@ -30,10 +29,11 @@
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
-// each JValue data associated with the time it is set
-case class PropTime(val d: JValue, val t: Long) extends Serializable
 
-case class SetProp (
+// each JValue data associated with the time it is set
+private[prediction] case class PropTime(val d: JValue, val t: Long) extends Serializable
+
+private[prediction] case class SetProp (
   val fields: Map[String, PropTime],
   // last set time. Note: fields could be empty with valid set time
   val t: Long) extends Serializable {
@@ -62,7 +62,7 @@
   }
 }
 
-case class UnsetProp (fields: Map[String, Long]) extends Serializable {
+private[prediction] case class UnsetProp (fields: Map[String, Long]) extends Serializable {
   def ++ (that: UnsetProp): UnsetProp = {
     val commonKeys = fields.keySet.intersect(that.fields.keySet)
 
@@ -83,13 +83,13 @@
   }
 }
 
-case class DeleteEntity (t: Long) extends Serializable {
+private[prediction] case class DeleteEntity (t: Long) extends Serializable {
   def ++ (that: DeleteEntity): DeleteEntity = {
     if (this.t > that.t) this else that
   }
 }
 
-case class EventOp (
+private[prediction] case class EventOp (
   val setProp: Option[SetProp] = None,
   val unsetProp: Option[UnsetProp] = None,
   val deleteEntity: Option[DeleteEntity] = None
@@ -133,7 +133,7 @@
 
 }
 
-object EventOp {
+private[prediction] object EventOp {
   def apply(e: Event): EventOp = {
     val t = e.eventTime.getMillis
     e.event match {
@@ -164,7 +164,7 @@
   }
 }
 
-
+@deprecated("Use PEvents or PEventStore instead.", "0.9.2")
 class PBatchView(
   val appId: Int,
   val startTime: Option[DateTime],
diff --git a/data/src/main/scala/io/prediction/data/view/QuickTest.scala b/data/src/main/scala/io/prediction/data/view/QuickTest.scala
index 7975f33..68ade1d 100644
--- a/data/src/main/scala/io/prediction/data/view/QuickTest.scala
+++ b/data/src/main/scala/io/prediction/data/view/QuickTest.scala
@@ -32,10 +32,12 @@
   @transient lazy val eventsDb = Storage.getLEvents()
 
   def run(): Unit = {
-    val r = eventsDb
-      .getByAppIdAndTimeAndEntity(
-        1, None, None,
-        Some("pio_user"), Some("3")).right.get.toList
+    val r = eventsDb.find(
+      appId = 1,
+      startTime = None,
+      untilTime = None,
+      entityType = Some("pio_user"),
+      entityId = Some("3")).toList
     println(r)
   }
 }
diff --git a/data/src/main/scala/io/prediction/data/webhooks/ConnectorException.scala b/data/src/main/scala/io/prediction/data/webhooks/ConnectorException.scala
new file mode 100644
index 0000000..0b64afb
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/webhooks/ConnectorException.scala
@@ -0,0 +1,31 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks
+
+/** Webhooks Connnector Exception
+  *
+  * @param message the detail message
+  * @param cause the cause
+  */
+private[prediction] class ConnectorException(message: String, cause: Throwable)
+  extends Exception(message, cause) {
+
+  /** Webhooks Connnector Exception with cause being set to null
+    *
+    * @param message the detail message
+    */
+  def this(message: String) = this(message, null)
+}
diff --git a/data/src/main/scala/io/prediction/data/webhooks/ConnectorUtil.scala b/data/src/main/scala/io/prediction/data/webhooks/ConnectorUtil.scala
new file mode 100644
index 0000000..424b6ba
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/webhooks/ConnectorUtil.scala
@@ -0,0 +1,46 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks
+
+import io.prediction.data.storage.Event
+import io.prediction.data.storage.EventJson4sSupport
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+
+private[prediction] object ConnectorUtil {
+
+  implicit val eventJson4sFormats: Formats = DefaultFormats +
+    new EventJson4sSupport.APISerializer
+
+  // intentionally use EventJson4sSupport.APISerializer to convert
+  // from JSON to Event object. Don't allow connector directly create
+  // Event object so that the Event object formation is consistent
+  // by enforcing JSON format
+
+  def toEvent(connector: JsonConnector, data: JObject): Event = {
+    read[Event](write(connector.toEventJson(data)))
+  }
+
+  def toEvent(connector: FormConnector, data: Map[String, String]): Event = {
+    read[Event](write(connector.toEventJson(data)))
+  }
+
+}
diff --git a/data/src/main/scala/io/prediction/data/webhooks/FormConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/FormConnector.scala
new file mode 100644
index 0000000..9087f31
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/webhooks/FormConnector.scala
@@ -0,0 +1,32 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks
+
+import org.json4s.JObject
+
+/** Connector for Webhooks connection with Form submission data format
+  */
+private[prediction] trait FormConnector {
+
+  // TODO: support conversion to multiple events?
+
+  /** Convert from original Form submission data to Event JObject
+    * @param data Map of key-value pairs in String type received through webhooks
+    * @return Event JObject
+   */
+  def toEventJson(data: Map[String, String]): JObject
+
+}
diff --git a/core/src/main/scala/io/prediction/controller/java/package.scala b/data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala
similarity index 60%
rename from core/src/main/scala/io/prediction/controller/java/package.scala
rename to data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala
index 501db5b..e0e80fe 100644
--- a/core/src/main/scala/io/prediction/controller/java/package.scala
+++ b/data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala
@@ -13,11 +13,19 @@
   * limitations under the License.
   */
 
-package io.prediction.controller
-/** This package provides the Java backend for the Java API. If you are writing
-  * in Java, please refer to the Javadoc of the io.prediction.controller.java
-  * package instead.
-  *
-  * @group Java
-  */
-package object java {}
+package io.prediction.data.webhooks
+
+import org.json4s.JObject
+
+/** Connector for Webhooks connection */
+private[prediction] trait JsonConnector {
+
+  // TODO: support conversion to multiple events?
+
+  /** Convert from original JObject to Event JObject
+    * @param data original JObject recevived through webhooks
+    * @return Event JObject
+   */
+  def toEventJson(data: JObject): JObject
+
+}
diff --git a/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala
new file mode 100644
index 0000000..f19e009
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala
@@ -0,0 +1,123 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks.exampleform
+
+import io.prediction.data.webhooks.FormConnector
+import io.prediction.data.webhooks.ConnectorException
+
+import org.json4s.JObject
+
+
+/** Example FormConnector with following types of webhook form data inputs:
+  *
+  * UserAction
+  *
+  *   "type"="userAction"
+  *   "userId"="as34smg4",
+  *   "event"="do_something",
+  *   "context[ip]"="24.5.68.47", // optional
+  *   "context[prop1]"="2.345", // optional
+  *   "context[prop2]"="value1" // optional
+  *   "anotherProperty1"="100",
+  *   "anotherProperty2"="optional1", // optional
+  *   "timestamp"="2015-01-02T00:30:12.984Z"
+  *
+  * UserActionItem
+  *
+  *   "type"="userActionItem"
+  *   "userId"="as34smg4",
+  *   "event"="do_something_on",
+  *   "itemId"="kfjd312bc",
+  *   "context[ip]"="1.23.4.56",
+  *   "context[prop1]"="2.345",
+  *   "context[prop2]"="value1",
+  *   "anotherPropertyA"="4.567", // optional
+  *   "anotherPropertyB"="false", // optional
+  *   "timestamp"="2015-01-15T04:20:23.567Z"
+  *
+  */
+private[prediction] object ExampleFormConnector extends FormConnector {
+
+  override
+  def toEventJson(data: Map[String, String]): JObject = {
+    val json = try {
+      data.get("type") match {
+        case Some("userAction") => userActionToEventJson(data)
+        case Some("userActionItem") => userActionItemToEventJson(data)
+        case Some(x) => throw new ConnectorException(
+          s"Cannot convert unknown type ${x} to event JSON")
+        case None => throw new ConnectorException(
+          s"The field 'type' is required.")
+      }
+    } catch {
+      case e: ConnectorException => throw e
+      case e: Exception => throw new ConnectorException(
+        s"Cannot convert ${data} to event JSON. ${e.getMessage()}", e)
+    }
+    json
+  }
+
+  def userActionToEventJson(data: Map[String, String]): JObject = {
+    import org.json4s.JsonDSL._
+
+    // two level optional data
+    val context = if (data.exists(_._1.startsWith("context["))) {
+      Some(
+        ("ip" -> data.get("context[ip]")) ~
+        ("prop1" -> data.get("context[prop1]").map(_.toDouble)) ~
+        ("prop2" -> data.get("context[prop2]"))
+      )
+    } else {
+      None
+    }
+
+    val json =
+      ("event" -> data("event")) ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("userId")) ~
+      ("eventTime" -> data("timestamp")) ~
+      ("properties" -> (
+        ("context" -> context) ~
+        ("anotherProperty1" -> data("anotherProperty1").toInt) ~
+        ("anotherProperty2" -> data.get("anotherProperty2"))
+      ))
+    json
+  }
+
+
+  def userActionItemToEventJson(data: Map[String, String]): JObject = {
+    import org.json4s.JsonDSL._
+
+    val json =
+      ("event" -> data("event")) ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("userId")) ~
+      ("targetEntityType" -> "item") ~
+      ("targetEntityId" -> data("itemId")) ~
+      ("eventTime" -> data("timestamp")) ~
+      ("properties" -> (
+        ("context" -> (
+          ("ip" -> data("context[ip]")) ~
+          ("prop1" -> data("context[prop1]").toDouble) ~
+          ("prop2" -> data("context[prop2]"))
+        )) ~
+        ("anotherPropertyA" -> data.get("anotherPropertyA").map(_.toDouble)) ~
+        ("anotherPropertyB" -> data.get("anotherPropertyB").map(_.toBoolean))
+      ))
+    json
+  }
+
+}
diff --git a/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala
new file mode 100644
index 0000000..6fdf6f3
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala
@@ -0,0 +1,153 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks.examplejson
+
+import io.prediction.data.webhooks.JsonConnector
+import io.prediction.data.webhooks.ConnectorException
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+
+/** Example JsonConnector with following types of webhooks JSON input:
+  *
+  * UserAction
+  *
+  * {
+  *   "type": "userAction"
+  *   "userId": "as34smg4",
+  *   "event": "do_something",
+  *   "context": {
+  *     "ip": "24.5.68.47",
+  *     "prop1": 2.345,
+  *     "prop2": "value1"
+  *   },
+  *   "anotherProperty1": 100,
+  *   "anotherProperty2": "optional1",
+  *   "timestamp": "2015-01-02T00:30:12.984Z"
+  * }
+  *
+  * UserActionItem
+  *
+  * {
+  *   "type": "userActionItem"
+  *   "userId": "as34smg4",
+  *   "event": "do_something_on",
+  *   "itemId": "kfjd312bc",
+  *   "context": {
+  *     "ip": "1.23.4.56",
+  *     "prop1": 2.345,
+  *     "prop2": "value1"
+  *   },
+  *   "anotherPropertyA": 4.567,
+  *   "anotherPropertyB": false,
+  *   "timestamp": "2015-01-15T04:20:23.567Z"
+  * }
+  */
+private[prediction] object ExampleJsonConnector extends JsonConnector {
+
+  implicit val json4sFormats: Formats = DefaultFormats
+
+  override def toEventJson(data: JObject): JObject = {
+    val common = try {
+      data.extract[Common]
+    } catch {
+      case e: Exception => throw new ConnectorException(
+        s"Cannot extract Common field from ${data}. ${e.getMessage()}", e)
+    }
+
+    val json = try {
+      common.`type` match {
+        case "userAction" =>
+          toEventJson(common = common, userAction = data.extract[UserAction])
+        case "userActionItem" =>
+          toEventJson(common = common, userActionItem = data.extract[UserActionItem])
+        case x: String =>
+          throw new ConnectorException(
+            s"Cannot convert unknown type '${x}' to Event JSON.")
+      }
+    } catch {
+      case e: ConnectorException => throw e
+      case e: Exception => throw new ConnectorException(
+        s"Cannot convert ${data} to eventJson. ${e.getMessage()}", e)
+    }
+
+    json
+  }
+
+  def toEventJson(common: Common, userAction: UserAction): JObject = {
+    import org.json4s.JsonDSL._
+
+    // map to EventAPI JSON
+    val json =
+      ("event" -> userAction.event) ~
+      ("entityType" -> "user") ~
+      ("entityId" -> userAction.userId) ~
+      ("eventTime" -> userAction.timestamp) ~
+      ("properties" -> (
+        ("context" -> userAction.context) ~
+        ("anotherProperty1" -> userAction.anotherProperty1) ~
+        ("anotherProperty2" -> userAction.anotherProperty2)
+      ))
+    json
+  }
+
+  def toEventJson(common: Common, userActionItem: UserActionItem): JObject = {
+    import org.json4s.JsonDSL._
+
+    // map to EventAPI JSON
+    val json =
+      ("event" -> userActionItem.event) ~
+      ("entityType" -> "user") ~
+      ("entityId" -> userActionItem.userId) ~
+      ("targetEntityType" -> "item") ~
+      ("targetEntityId" -> userActionItem.itemId) ~
+      ("eventTime" -> userActionItem.timestamp) ~
+      ("properties" -> (
+        ("context" -> userActionItem.context) ~
+        ("anotherPropertyA" -> userActionItem.anotherPropertyA) ~
+        ("anotherPropertyB" -> userActionItem.anotherPropertyB)
+      ))
+    json
+  }
+
+  // Common required fields
+  case class Common(
+    `type`: String
+  )
+
+  // User Actions fields
+  case class UserAction (
+    userId: String,
+    event: String,
+    context: Option[JObject],
+    anotherProperty1: Int,
+    anotherProperty2: Option[String],
+    timestamp: String
+  )
+
+  // UserActionItem fields
+  case class UserActionItem (
+    userId: String,
+    event: String,
+    itemId: String,
+    context: JObject,
+    anotherPropertyA: Option[Double],
+    anotherPropertyB: Option[Boolean],
+    timestamp: String
+  )
+
+}
diff --git a/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
new file mode 100644
index 0000000..657f556
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
@@ -0,0 +1,100 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+
+package io.prediction.data.webhooks.mailchimp
+
+import io.prediction.data.webhooks.FormConnector
+import io.prediction.data.webhooks.ConnectorException
+import io.prediction.data.storage.EventValidation
+import io.prediction.data.Utils
+
+import org.json4s.JObject
+
+import org.joda.time.DateTime
+import org.joda.time.format.DateTimeFormat
+
+private[prediction] object MailChimpConnector extends FormConnector {
+
+  override
+  def toEventJson(data: Map[String, String]): JObject = {
+
+    val json = data.get("type") match {
+      case Some("subscribe") => subscribeToEventJson(data)
+      // TODO: support other events
+      case Some(x) => throw new ConnectorException(
+        s"Cannot convert unknown MailChimp data type ${x} to event JSON")
+      case None => throw new ConnectorException(
+        s"The field 'type' is required for MailChimp data.")
+    }
+    json
+  }
+
+
+  val mailChimpDateTimeFormat = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")
+    .withZone(EventValidation.defaultTimeZone)
+
+  def parseMailChimpDateTime(s: String): DateTime = {
+    mailChimpDateTimeFormat.parseDateTime(s)
+  }
+
+  def subscribeToEventJson(data: Map[String, String]): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    /*
+    "type": "subscribe",
+    "fired_at": "2009-03-26 21:35:57",
+    "data[id]": "8a25ff1d98",
+    "data[list_id]": "a6b5da1054",
+    "data[email]": "api@mailchimp.com",
+    "data[email_type]": "html",
+    "data[merges][EMAIL]": "api@mailchimp.com",
+    "data[merges][FNAME]": "MailChimp",
+    "data[merges][LNAME]": "API",
+    "data[merges][INTERESTS]": "Group1,Group2",
+    "data[ip_opt]": "10.20.10.30",
+    "data[ip_signup]": "10.20.10.30"
+    */
+
+    // convert to ISO8601 format
+    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
+
+    // TODO: handle optional fields
+    val json =
+      ("event" -> "subscribe") ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("data[id]")) ~
+      ("targetEntityType" -> "list") ~
+      ("targetEntityId" -> data("data[list_id]")) ~
+      ("eventTime" -> eventTime) ~
+      ("properties" -> (
+        ("email" -> data("data[email]")) ~
+        ("email_type" -> data("data[email_type]")) ~
+        ("merges" -> (
+          ("EMAIL" -> data("data[merges][EMAIL]")) ~
+          ("FNAME" -> data("data[merges][FNAME]"))) ~
+          ("LNAME" -> data("data[merges][LNAME]")) ~
+          ("INTERESTS" -> data("data[merges][INTERESTS]"))
+        )) ~
+        ("ip_opt" -> data("data[ip_opt]")) ~
+        ("ip_signup" -> data("data[ip_signup]")
+      ))
+
+    json
+
+  }
+
+}
diff --git a/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
new file mode 100644
index 0000000..21bece9
--- /dev/null
+++ b/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
@@ -0,0 +1,85 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks.segmentio
+
+import io.prediction.data.webhooks.JsonConnector
+import io.prediction.data.webhooks.ConnectorException
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+
+private[prediction] object SegmentIOConnector extends JsonConnector {
+
+  implicit val json4sFormats: Formats = DefaultFormats
+
+  override
+  def toEventJson(data: JObject): JObject = {
+    // TODO: check segmentio API version
+    val common = try {
+      data.extract[Common]
+    } catch {
+      case e: Exception => throw new ConnectorException(
+          s"Cannot extract Common field from ${data}. ${e.getMessage()}", e)
+    }
+
+    try {
+      common.`type` match {
+        case "identify" => toEventJson(
+          common = common,
+          identify = data.extract[Identify])
+
+        // TODO: support other events
+        case _ => throw new ConnectorException(
+          s"Cannot convert unknown type ${common.`type`} to event JSON.")
+      }
+    } catch {
+      case e: ConnectorException => throw e
+      case e: Exception => throw new ConnectorException(
+        s"Cannot convert ${data} to event JSON. ${e.getMessage()}", e)
+    }
+  }
+
+  def toEventJson(common: Common, identify: Identify): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    val json =
+      ("event" -> common.`type`) ~
+      ("entityType" -> "user") ~
+      ("entityId" -> identify.userId) ~
+      ("eventTime" -> common.timestamp) ~
+      ("properties" -> (
+        ("context" -> common.context) ~
+        ("traits" -> identify.traits)
+      ))
+    json
+  }
+
+}
+
+private[prediction] case class Common(
+  `type`: String,
+  context: Option[JObject],
+  timestamp: String
+  // TODO: add more fields
+)
+
+private[prediction] case class Identify(
+  userId: String,
+  traits: Option[JObject]
+  // TODO: add more fields
+)
diff --git a/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala b/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
index 8daf1fb..fc8b6ce 100644
--- a/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
+++ b/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
@@ -16,6 +16,8 @@
 package io.prediction.data.api
 
 import io.prediction.data.storage.Storage
+import io.prediction.data.webhooks.JsonConnector
+import io.prediction.data.webhooks.FormConnector
 
 import akka.testkit.TestProbe
 import akka.actor.ActorSystem
@@ -33,10 +35,15 @@
   val system = ActorSystem("EventServiceSpecSystem")
 
   val eventClient = Storage.getLEvents()
-  val accessKeysClient = Storage.getMetaDataAccessKeys
-
+  val accessKeysClient = Storage.getMetaDataAccessKeys()
+  val channelsClient = Storage.getMetaDataChannels()
+  
   val eventServiceActor = system.actorOf(
-    Props(classOf[EventServiceActor], eventClient, accessKeysClient, true))
+    Props(classOf[EventServiceActor],
+      eventClient,
+      accessKeysClient,
+      channelsClient,
+      true))
 
   "GET / request" should {
     "properly produce OK HttpResponses" in {
@@ -54,6 +61,6 @@
       success
     }
   }
-  
+
   step(system.shutdown())
 }
diff --git a/data/src/test/scala/io/prediction/data/storage/DataMapSpec.scala b/data/src/test/scala/io/prediction/data/storage/DataMapSpec.scala
new file mode 100644
index 0000000..fb702fc
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/storage/DataMapSpec.scala
@@ -0,0 +1,127 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.storage
+
+import org.specs2.mutable._
+
+class DataMapSpec extends Specification {
+
+  "DataMap" should {
+
+    val properties = DataMap("""
+      {
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c", "c"],
+        "prop6" : 4.56
+      }
+      """)
+
+    "get Int data" in {
+      properties.get[Int]("prop1") must beEqualTo(1)
+      properties.getOpt[Int]("prop1") must beEqualTo(Some(1))
+    }
+
+    "get String data" in {
+      properties.get[String]("prop2") must beEqualTo("value2")
+      properties.getOpt[String]("prop2") must beEqualTo(Some("value2"))
+    }
+
+    "get List of Int data" in {
+      properties.get[List[Int]]("prop3") must beEqualTo(List(1,2,3))
+      properties.getOpt[List[Int]]("prop3") must beEqualTo(Some(List(1,2,3)))
+    }
+
+    "get Boolean data" in {
+      properties.get[Boolean]("prop4") must beEqualTo(true)
+      properties.getOpt[Boolean]("prop4") must beEqualTo(Some(true))
+    }
+
+    "get List of String data" in {
+      properties.get[List[String]]("prop5") must beEqualTo(List("a", "b", "c", "c"))
+      properties.getOpt[List[String]]("prop5") must beEqualTo(Some(List("a", "b", "c", "c")))
+    }
+
+    "get Set of String data" in {
+      properties.get[Set[String]]("prop5") must beEqualTo(Set("a", "b", "c"))
+      properties.getOpt[Set[String]]("prop5") must beEqualTo(Some(Set("a", "b", "c")))
+    }
+
+    "get Double data" in {
+      properties.get[Double]("prop6") must beEqualTo(4.56)
+      properties.getOpt[Double]("prop6") must beEqualTo(Some(4.56))
+    }
+
+    "get empty optional Int data" in {
+      properties.getOpt[Int]("prop9999") must beEqualTo(None)
+    }
+
+  }
+
+  "DataMap with multi-level data" should {
+    val properties = DataMap("""
+      {
+        "context": {
+          "ip": "1.23.4.56",
+          "prop1": 2.345
+          "prop2": "value1",
+          "prop4": [1, 2, 3]
+        },
+        "anotherPropertyA": 4.567,
+        "anotherPropertyB": false
+      }
+      """)
+
+    "get case class data" in {
+      val expected = DataMapSpec.Context(
+        ip = "1.23.4.56",
+        prop1 = Some(2.345),
+        prop2 = Some("value1"),
+        prop3 = None,
+        prop4 = List(1,2,3)
+      )
+
+      properties.get[DataMapSpec.Context]("context") must beEqualTo(expected)
+    }
+
+    "get empty optional case class data" in {
+      properties.getOpt[DataMapSpec.Context]("context999") must beEqualTo(None)
+    }
+
+    "get double data" in {
+      properties.get[Double]("anotherPropertyA") must beEqualTo(4.567)
+    }
+
+    "get boolean data" in {
+      properties.get[Boolean]("anotherPropertyB") must beEqualTo(false)
+    }
+  }
+
+}
+
+object DataMapSpec {
+
+  // define this case class inside object to avoid case class name conflict with other tests
+  case class Context(
+    ip: String,
+    prop1: Option[Double],
+    prop2: Option[String],
+    prop3: Option[Int],
+    prop4: List[Int]
+  )
+}
diff --git a/data/src/test/scala/io/prediction/data/storage/EventsSpec.scala b/data/src/test/scala/io/prediction/data/storage/EventsSpec.scala
deleted file mode 100644
index fa0cadb..0000000
--- a/data/src/test/scala/io/prediction/data/storage/EventsSpec.scala
+++ /dev/null
@@ -1,118 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.specs2._
-import org.specs2.specification.Step
-
-import org.json4s.JObject
-import org.json4s.native.JsonMethods.parse
-
-import org.joda.time.DateTime
-
-class EventsSpec extends Specification {
-  def is = s2"""
-
-  PredictionIO Storage Events Specification
-
-    Events can be implemented by:
-    - HBEvents ${hbEvents}
-
-  """
-
-  def hbEvents = s2"""
-
-    HBEvents should" ^
-    - behave like any Events implementation ${events(hbDO)}
-    - (table cleanup) ${Step(StorageTestUtils.dropHBaseNamespace(dbName))}
-
-  """
-
-  def events(eventClient: LEvents) = s2"""
-
-    inserting and getting 3 Events ${insertTest(eventClient)}
-
-  """
-
-  val dbName = "test_pio_storage_events_" + hashCode
-  def hbDO = Storage.getDataObject[LEvents](
-    StorageTestUtils.hbaseSourceName,
-    dbName
-  )
-
-  def insertTest(eventClient: LEvents) = {
-
-    val appId = 1
-
-    eventClient.init(appId)
-
-    val listOfEvents = List(
-      Event(
-        event = "my_event",
-        entityType = "my_entity_type",
-        entityId = "my_entity_id",
-        targetEntityType = Some("my_target_entity_type"),
-        targetEntityId = Some("my_target_entity_id"),
-        properties = DataMap(parse(
-          """{
-            "prop1" : 1,
-            "prop2" : "value2",
-            "prop3" : [1, 2, 3],
-            "prop4" : true,
-            "prop5" : ["a", "b", "c"],
-            "prop6" : 4.56
-          }"""
-          ).asInstanceOf[JObject]),
-        eventTime = DateTime.now,
-        prId = Some("my_prid")
-      ),
-      Event(
-        event = "my_event2",
-        entityType = "my_entity_type2",
-        entityId = "my_entity_id2"
-      ),
-      Event(
-        event = "my_event3",
-        entityType = "my_entity_type",
-        entityId = "my_entity_id",
-        targetEntityType = Some("my_target_entity_type"),
-        targetEntityId = Some("my_target_entity_id"),
-        properties = DataMap(parse(
-          """{
-            "propA" : 1.2345,
-            "propB" : "valueB",
-          }"""
-          ).asInstanceOf[JObject]),
-        prId = Some("my_prid")
-      )
-    )
-
-    val insertResp = listOfEvents.map { eventClient.insert(_, appId) }
-
-    val insertedEventId: List[String] = insertResp.map { resp =>
-      resp.right.get
-    }
-
-    val insertedEvent: List[Option[Event]] = listOfEvents.zip(insertedEventId)
-      .map { case (e, id) => Some(e.copy(eventId = Some(id))) }
-
-    val getResp = insertedEventId.map { id => eventClient.get(id, appId) }
-
-    val getEvents = getResp.map { resp => resp.right.get }
-
-    insertedEvent must containTheSameElementsAs(getEvents)
-  }
-}
diff --git a/data/src/test/scala/io/prediction/data/storage/LEventsSpec.scala b/data/src/test/scala/io/prediction/data/storage/LEventsSpec.scala
new file mode 100644
index 0000000..30bbeaa
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/storage/LEventsSpec.scala
@@ -0,0 +1,218 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.storage
+
+import org.specs2._
+import org.specs2.specification.Step
+
+class LEventsSpec extends Specification with TestEvents {
+  def is = s2"""
+
+  PredictionIO Storage LEvents Specification
+
+    Events can be implemented by:
+    - HBEvents ${hbEvents}
+
+  """
+
+  def hbEvents = sequential ^ s2"""
+
+    HBEvents should
+    - behave like any Events implementation ${events(hbDO)}
+    - (table cleanup) ${Step(StorageTestUtils.dropHBaseNamespace(dbName))}
+
+  """
+
+  val appId = 1
+
+  def events(eventClient: LEvents) = sequential ^ s2"""
+
+    init default ${initDefault(eventClient)}
+    insert 3 test events and get back by event ID ${insertAndGetEvents(eventClient)}
+    insert and delete by ID ${insertAndDelete(eventClient)}
+    insert test user events ${insertTestUserEvents(eventClient)}
+    find user events ${findUserEvents(eventClient)}
+    aggregate user properties ${aggregateUserProperties(eventClient)}
+    aggregate one user properties ${aggregateOneUserProperties(eventClient)}
+    aggregate non-existent user properties ${aggregateNonExistentUserProperties(eventClient)}
+    init channel ${initChannel(eventClient)}
+    insert 2 events to channel ${insertChannel(eventClient)}
+    insert 1 event to channel and delete by ID  ${insertAndDeleteChannel(eventClient)}
+    find events from channel ${findChannel(eventClient)}
+    remove default ${removeDefault(eventClient)}
+    remove channel ${removeChannel(eventClient)}
+
+  """
+
+  val dbName = "test_pio_storage_events_" + hashCode
+  def hbDO = Storage.getDataObject[LEvents](
+    StorageTestUtils.hbaseSourceName,
+    dbName
+  )
+
+
+  def initDefault(eventClient: LEvents) = {
+    eventClient.init(appId)
+  }
+
+  def insertAndGetEvents(eventClient: LEvents) = {
+
+    // events from TestEvents trait
+    val listOfEvents = List(r1,r2,r3)
+
+    val insertResp = listOfEvents.map { eventClient.insert(_, appId) }
+
+    val insertedEventId: List[String] = insertResp
+
+    val insertedEvent: List[Option[Event]] = listOfEvents.zip(insertedEventId)
+      .map { case (e, id) => Some(e.copy(eventId = Some(id))) }
+
+    val getResp = insertedEventId.map { id => eventClient.get(id, appId) }
+
+    val getEvents = getResp
+
+    insertedEvent must containTheSameElementsAs(getEvents)
+  }
+
+  def insertAndDelete(eventClient: LEvents) = {
+    val eventId = eventClient.insert(r2, appId)
+
+    val resultBefore = eventClient.get(eventId, appId)
+
+    val expectedBefore = r2.copy(eventId = Some(eventId))
+
+    val deleteStatus = eventClient.delete(eventId, appId)
+
+    val resultAfter = eventClient.get(eventId, appId)
+
+    (resultBefore must beEqualTo(Some(expectedBefore))) and
+    (deleteStatus must beEqualTo(true)) and
+    (resultAfter must beEqualTo(None))
+  }
+
+  def insertTestUserEvents(eventClient: LEvents) = {
+    // events from TestEvents trait
+    val listOfEvents = Vector(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
+
+    listOfEvents.map{ eventClient.insert(_, appId) }
+
+    success
+  }
+
+  def findUserEvents(eventClient: LEvents) = {
+
+    val results: List[Event] = eventClient.find(
+      appId = appId,
+      entityType = Some("user"))
+      .toList
+      .map(e => e.copy(eventId = None)) // ignore eventID
+
+    // same events in insertTestUserEvents
+    val expected = List(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
+
+    results must containTheSameElementsAs(expected)
+  }
+
+  def aggregateUserProperties(eventClient: LEvents) = {
+
+    val result: Map[String, PropertyMap] = eventClient.aggregateProperties(
+      appId = appId,
+      entityType = "user")
+
+    val expected = Map(
+      "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
+      "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+    )
+
+    result must beEqualTo(expected)
+  }
+
+  def aggregateOneUserProperties(eventClient: LEvents) = {
+    val result: Option[PropertyMap] = eventClient.aggregatePropertiesOfEntity(
+      appId = appId,
+      entityType = "user",
+      entityId = "u1")
+
+    val expected = Some(PropertyMap(u1, u1BaseTime, u1LastTime))
+
+    result must beEqualTo(expected)
+  }
+
+  def aggregateNonExistentUserProperties(eventClient: LEvents) = {
+    val result: Option[PropertyMap] = eventClient.aggregatePropertiesOfEntity(
+      appId = appId,
+      entityType = "user",
+      entityId = "u999999")
+
+    result must beEqualTo(None)
+  }
+
+  val channelId = 12
+
+  def initChannel(eventClient: LEvents) = {
+    eventClient.init(appId, Some(channelId))
+  }
+
+  def insertChannel(eventClient: LEvents) = {
+
+    // events from TestEvents trait
+    val listOfEvents = List(r4,r5)
+
+    listOfEvents.map( eventClient.insert(_, appId, Some(channelId)) )
+
+    success
+  }
+
+  def insertAndDeleteChannel(eventClient: LEvents) = {
+
+    val eventId = eventClient.insert(r2, appId, Some(channelId))
+
+    val resultBefore = eventClient.get(eventId, appId, Some(channelId))
+
+    val expectedBefore = r2.copy(eventId = Some(eventId))
+
+    val deleteStatus = eventClient.delete(eventId, appId, Some(channelId))
+
+    val resultAfter = eventClient.get(eventId, appId, Some(channelId))
+
+    (resultBefore must beEqualTo(Some(expectedBefore))) and
+    (deleteStatus must beEqualTo(true)) and
+    (resultAfter must beEqualTo(None))
+  }
+
+  def findChannel(eventClient: LEvents) = {
+
+    val results: List[Event] = eventClient.find(
+      appId = appId,
+      channelId = Some(channelId)
+    )
+    .toList
+    .map(e => e.copy(eventId = None)) // ignore eventId
+
+    // same events in insertChannel
+    val expected = List(r4, r5)
+
+    results must containTheSameElementsAs(expected)
+  }
+
+  def removeDefault(eventClient: LEvents) = {
+    eventClient.remove(appId)
+  }
+
+  def removeChannel(eventClient: LEvents) = {
+    eventClient.remove(appId, Some(channelId))
+  }
+}
diff --git a/data/src/test/scala/io/prediction/data/storage/PEventsSpec.scala b/data/src/test/scala/io/prediction/data/storage/PEventsSpec.scala
new file mode 100644
index 0000000..4610e3f
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/storage/PEventsSpec.scala
@@ -0,0 +1,190 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.storage
+
+import org.specs2._
+import org.specs2.specification.Step
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+class PEventsSpec extends Specification with TestEvents {
+
+  System.clearProperty("spark.driver.port")
+  System.clearProperty("spark.hostPort")
+  val sc = new SparkContext("local[4]", "PEventAggregatorSpec test")
+
+  val appId = 1
+  val channelId = 6
+  val dbName = "test_pio_storage_events_" + hashCode
+
+  def hbLocal = Storage.getDataObject[LEvents](
+    StorageTestUtils.hbaseSourceName,
+    dbName
+  )
+
+  def hbPar = Storage.getDataObject[PEvents](
+    StorageTestUtils.hbaseSourceName,
+    dbName
+  )
+
+  def stopSpark = {
+    sc.stop()
+  }
+
+  def is = s2"""
+
+  PredictionIO Storage PEvents Specification
+
+    Events can be implemented by:
+    - HBEvents ${hbPEvents}
+    - (stop Spark) ${Step(sc.stop())}
+
+  """
+
+  def hbPEvents = sequential ^ s2"""
+
+    HBPEvents should
+    - behave like any PEvents implementation ${events(hbLocal, hbPar)}
+    - (table cleanup) ${Step(StorageTestUtils.dropHBaseNamespace(dbName))}
+
+  """
+
+  def events(localEventClient: LEvents, parEventClient: PEvents) = sequential ^ s2"""
+
+    - (init test) ${initTest(localEventClient)}
+    - (insert test events) ${insertTestEvents(localEventClient)}
+    find in default ${find(parEventClient)}
+    find in channel ${findChannel(parEventClient)}
+    aggreagte user properties in defualt ${aggregateUserProperties(parEventClient)}
+    aggregate user properties in channel ${aggregateUserPropertiesChannel(parEventClient)}
+    write to default ${write(parEventClient)}
+    write to channel ${writeChannel(parEventClient)}
+
+  """
+
+  /* setup */
+
+  // events from TestEvents trait
+  val listOfEvents = List(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2, r1, r2)
+  val listOfEventsChannel = List(u3e1, u3e2, u3e3, r3, r4)
+
+  def initTest(localEventClient: LEvents) = {
+    localEventClient.init(appId)
+    localEventClient.init(appId, Some(channelId))
+  }
+
+  def insertTestEvents(localEventClient: LEvents) = {
+    listOfEvents.map( localEventClient.insert(_, appId) )
+    // insert to channel
+    listOfEventsChannel.map( localEventClient.insert(_, appId, Some(channelId)) )
+    success
+  }
+
+  /* following are tests */
+
+  def find(parEventClient: PEvents) = {
+    val resultRDD: RDD[Event] = parEventClient.find(
+      appId = appId
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map {_.copy(eventId = None)} // ignore eventId
+
+    results must containTheSameElementsAs(listOfEvents)
+  }
+
+  def findChannel(parEventClient: PEvents) = {
+    val resultRDD: RDD[Event] = parEventClient.find(
+      appId = appId,
+      channelId = Some(channelId)
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map {_.copy(eventId = None)} // ignore eventId
+
+    results must containTheSameElementsAs(listOfEventsChannel)
+  }
+
+  def aggregateUserProperties(parEventClient: PEvents) = {
+    val resultRDD: RDD[(String, PropertyMap)] = parEventClient.aggregateProperties(
+      appId = appId,
+      entityType = "user"
+    )(sc)
+    val result: Map[String, PropertyMap] = resultRDD.collectAsMap.toMap
+
+    val expected = Map(
+      "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
+      "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+    )
+
+    result must beEqualTo(expected)
+  }
+
+  def aggregateUserPropertiesChannel(parEventClient: PEvents) = {
+    val resultRDD: RDD[(String, PropertyMap)] = parEventClient.aggregateProperties(
+      appId = appId,
+      channelId = Some(channelId),
+      entityType = "user"
+    )(sc)
+    val result: Map[String, PropertyMap] = resultRDD.collectAsMap.toMap
+
+    val expected = Map(
+      "u3" -> PropertyMap(u3, u3BaseTime, u3LastTime)
+    )
+
+    result must beEqualTo(expected)
+  }
+
+  def write(parEventClient: PEvents) = {
+    val written = List(r5, r6)
+    val writtenRDD = sc.parallelize(written)
+    parEventClient.write(writtenRDD, appId)(sc)
+
+    // read back
+    val resultRDD = parEventClient.find(
+      appId = appId
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map { _.copy(eventId = None)} // ignore eventId
+
+    val expected = listOfEvents ++ written
+
+    results must containTheSameElementsAs(expected)
+  }
+
+  def writeChannel(parEventClient: PEvents) = {
+    val written = List(r1, r5, r6)
+    val writtenRDD = sc.parallelize(written)
+    parEventClient.write(writtenRDD, appId, Some(channelId))(sc)
+
+    // read back
+    val resultRDD = parEventClient.find(
+      appId = appId,
+      channelId = Some(channelId)
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map { _.copy(eventId = None)} // ignore eventId
+
+    val expected = listOfEventsChannel ++ written
+
+    results must containTheSameElementsAs(expected)
+  }
+
+}
diff --git a/data/src/test/scala/io/prediction/data/storage/TestEvents.scala b/data/src/test/scala/io/prediction/data/storage/TestEvents.scala
index 5d1a9a7..bce1478 100644
--- a/data/src/test/scala/io/prediction/data/storage/TestEvents.scala
+++ b/data/src/test/scala/io/prediction/data/storage/TestEvents.scala
@@ -21,6 +21,7 @@
 
   val u1BaseTime = new DateTime(654321)
   val u2BaseTime = new DateTime(6543210)
+  val u3BaseTime = new DateTime(6543410)
 
   // u1 events
   val u1e1 = Event(
@@ -99,4 +100,126 @@
   val u2LastTime = u2BaseTime.plusDays(2)
   val u2 = """{"b": "value9", "d": [7, 5, 6], "g": "new11"}"""
 
+  // u3 events
+  val u3e1 = Event(
+    event = "$set",
+    entityType = "user",
+    entityId = "u3",
+    properties = DataMap(
+      """{
+        "a" : 22,
+        "b" : "value13",
+        "d" : [5, 6, 1],
+      }"""),
+    eventTime = u3BaseTime
+  )
+
+  val u3e2 = u3e1.copy(
+    event = "$unset",
+    properties = DataMap("""{"a" : null}"""),
+    eventTime = u3BaseTime.plusDays(1)
+  )
+
+  val u3e3 = u3e1.copy(
+    event = "$set",
+    properties = DataMap("""{"b" : "value10", "f": "new12", "d" : [1, 3, 2]}"""),
+    eventTime = u3BaseTime.plusDays(2)
+  )
+
+  val u3LastTime = u3BaseTime.plusDays(2)
+  val u3 = """{"b": "value10", "d": [1, 3, 2], "f": "new12"}"""
+
+  // some random events
+  val r1 = Event(
+    event = "my_event",
+    entityType = "my_entity_type",
+    entityId = "my_entity_id",
+    targetEntityType = Some("my_target_entity_type"),
+    targetEntityId = Some("my_target_entity_id"),
+    properties = DataMap(
+      """{
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""
+    ),
+    eventTime = DateTime.now,
+    prId = Some("my_prid")
+  )
+  val r2 = Event(
+    event = "my_event2",
+    entityType = "my_entity_type2",
+    entityId = "my_entity_id2"
+  )
+  val r3 = Event(
+    event = "my_event3",
+    entityType = "my_entity_type",
+    entityId = "my_entity_id",
+    targetEntityType = Some("my_target_entity_type"),
+    targetEntityId = Some("my_target_entity_id"),
+    properties = DataMap(
+      """{
+        "propA" : 1.2345,
+        "propB" : "valueB",
+      }"""
+    ),
+    prId = Some("my_prid")
+  )
+  val r4 = Event(
+    event = "my_event4",
+    entityType = "my_entity_type4",
+    entityId = "my_entity_id4",
+    targetEntityType = Some("my_target_entity_type4"),
+    targetEntityId = Some("my_target_entity_id4"),
+    properties = DataMap(
+      """{
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""),
+    eventTime = DateTime.now
+  )
+  val r5 = Event(
+    event = "my_event5",
+    entityType = "my_entity_type5",
+    entityId = "my_entity_id5",
+    targetEntityType = Some("my_target_entity_type5"),
+    targetEntityId = Some("my_target_entity_id5"),
+    properties = DataMap(
+      """{
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""
+    ),
+    eventTime = DateTime.now
+  )
+  val r6 = Event(
+    event = "my_event6",
+    entityType = "my_entity_type6",
+    entityId = "my_entity_id6",
+    targetEntityType = Some("my_target_entity_type6"),
+    targetEntityId = Some("my_target_entity_id6"),
+    properties = DataMap(
+      """{
+        "prop1" : 6,
+        "prop2" : "value2",
+        "prop3" : [6, 7, 8],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""
+    ),
+    eventTime = DateTime.now
+  )
+
 }
diff --git a/data/src/test/scala/io/prediction/data/webhooks/ConnectorTestUtil.scala b/data/src/test/scala/io/prediction/data/webhooks/ConnectorTestUtil.scala
new file mode 100644
index 0000000..9bf75dd
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/webhooks/ConnectorTestUtil.scala
@@ -0,0 +1,48 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks
+
+import org.specs2.mutable._
+
+import org.json4s.JObject
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.native.JsonMethods.parse
+import org.json4s.native.Serialization.write
+
+/** TestUtil for JsonConnector */
+trait ConnectorTestUtil extends Specification {
+
+  implicit val formats = DefaultFormats
+
+  def check(connector: JsonConnector, original: String, event: String) = {
+    val originalJson = parse(original).asInstanceOf[JObject]
+    val eventJson = parse(event).asInstanceOf[JObject]
+    // write and parse back to discard any JNothing field
+    val result = parse(write(connector.toEventJson(originalJson))).asInstanceOf[JObject]
+
+    result.obj must containTheSameElementsAs(eventJson.obj)
+  }
+
+  def check(connector: FormConnector, original: Map[String, String], event: String) = {
+
+    val eventJson = parse(event).asInstanceOf[JObject]
+    // write and parse back to discard any JNothing field
+    val result = parse(write(connector.toEventJson(original))).asInstanceOf[JObject]
+
+    result.obj must containTheSameElementsAs(eventJson.obj)
+  }
+}
diff --git a/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
new file mode 100644
index 0000000..7f6ad8f
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
@@ -0,0 +1,164 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks.exampleform
+
+import io.prediction.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+/** Test the ExampleFormConnector */
+class ExampleFormConnectorSpec extends Specification with ConnectorTestUtil {
+
+  "ExampleFormConnector" should {
+
+    "convert userAction to Event JSON" in {
+      // webhooks input
+      val userAction = Map(
+        "type" -> "userAction",
+        "userId" -> "as34smg4",
+        "event" -> "do_something",
+        "context[ip]" -> "24.5.68.47", // optional
+        "context[prop1]" -> "2.345", // optional
+        "context[prop2]" -> "value1", // optional
+        "anotherProperty1" -> "100",
+        "anotherProperty2"-> "optional1", // optional
+        "timestamp" -> "2015-01-02T00:30:12.984Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "context": {
+              "ip": "24.5.68.47",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherProperty1": 100,
+            "anotherProperty2": "optional1"
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleFormConnector, userAction, expected)
+    }
+
+    "convert userAction without optional fields to Event JSON" in {
+      // webhooks input
+      val userAction = Map(
+        "type" -> "userAction",
+        "userId" -> "as34smg4",
+        "event" -> "do_something",
+        "anotherProperty1" -> "100",
+        "timestamp" -> "2015-01-02T00:30:12.984Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "anotherProperty1": 100,
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleFormConnector, userAction, expected)
+    }
+
+    "convert userActionItem to Event JSON" in {
+      // webhooks input
+      val userActionItem = Map(
+        "type" -> "userActionItem",
+        "userId" -> "as34smg4",
+        "event" -> "do_something_on",
+        "itemId" -> "kfjd312bc",
+        "context[ip]" -> "1.23.4.56",
+        "context[prop1]" -> "2.345",
+        "context[prop2]" -> "value1",
+        "anotherPropertyA" -> "4.567", // optional
+        "anotherPropertyB" -> "false", // optional
+        "timestamp" -> "2015-01-15T04:20:23.567Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherPropertyA": 4.567
+            "anotherPropertyB": false
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleFormConnector, userActionItem, expected)
+    }
+
+    "convert userActionItem without optional fields to Event JSON" in {
+      // webhooks input
+      val userActionItem = Map(
+        "type" -> "userActionItem",
+        "userId" -> "as34smg4",
+        "event" -> "do_something_on",
+        "itemId" -> "kfjd312bc",
+        "context[ip]" -> "1.23.4.56",
+        "context[prop1]" -> "2.345",
+        "context[prop2]" -> "value1",
+        "timestamp" -> "2015-01-15T04:20:23.567Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            }
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleFormConnector, userActionItem, expected)
+    }
+
+  }
+}
diff --git a/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
new file mode 100644
index 0000000..bdf1cc4
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
@@ -0,0 +1,179 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks.examplejson
+
+import io.prediction.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+/** Test the ExampleJsonConnector */
+class ExampleJsonConnectorSpec extends Specification with ConnectorTestUtil {
+
+  "ExampleJsonConnector" should {
+
+    "convert userAction to Event JSON" in {
+      // webhooks input
+      val userAction = """
+        {
+          "type": "userAction"
+          "userId": "as34smg4",
+          "event": "do_something",
+          "context": {
+            "ip": "24.5.68.47",
+            "prop1": 2.345
+            "prop2": "value1"
+          },
+          "anotherProperty1": 100,
+          "anotherProperty2": "optional1",
+          "timestamp": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "context": {
+              "ip": "24.5.68.47",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherProperty1": 100,
+            "anotherProperty2": "optional1"
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userAction, expected)
+    }
+
+    "convert userAction without optional field to Event JSON" in {
+      // webhooks input
+      val userAction = """
+        {
+          "type": "userAction"
+          "userId": "as34smg4",
+          "event": "do_something",
+          "anotherProperty1": 100,
+          "timestamp": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "anotherProperty1": 100,
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userAction, expected)
+    }
+
+    "convert userActionItem to Event JSON" in {
+      // webhooks input
+      val userActionItem = """
+        {
+          "type": "userActionItem"
+          "userId": "as34smg4",
+          "event": "do_something_on",
+          "itemId": "kfjd312bc",
+          "context": {
+            "ip": "1.23.4.56",
+            "prop1": 2.345
+            "prop2": "value1"
+          },
+          "anotherPropertyA": 4.567
+          "anotherPropertyB": false
+          "timestamp": "2015-01-15T04:20:23.567Z"
+      }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherPropertyA": 4.567
+            "anotherPropertyB": false
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userActionItem, expected)
+    }
+
+    "convert userActionItem without optional fields to Event JSON" in {
+      // webhooks input
+      val userActionItem = """
+        {
+          "type": "userActionItem"
+          "userId": "as34smg4",
+          "event": "do_something_on",
+          "itemId": "kfjd312bc",
+          "context": {
+            "ip": "1.23.4.56",
+            "prop1": 2.345
+            "prop2": "value1"
+          }
+          "timestamp": "2015-01-15T04:20:23.567Z"
+      }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            }
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userActionItem, expected)
+    }
+
+  }
+
+}
diff --git a/data/src/test/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
new file mode 100644
index 0000000..a2df508
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
@@ -0,0 +1,73 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks.mailchimp
+
+import io.prediction.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+class MailChimpConnectorSpec extends Specification with ConnectorTestUtil {
+
+  // TOOD: test other events
+  // TODO: test different optional fields
+
+  "MailChimpConnector" should {
+
+    "convert subscribe to event JSON" in {
+
+      val subscribe = Map(
+        "type" -> "subscribe",
+        "fired_at" -> "2009-03-26 21:35:57",
+        "data[id]" -> "8a25ff1d98",
+        "data[list_id]" -> "a6b5da1054",
+        "data[email]" -> "api@mailchimp.com",
+        "data[email_type]" -> "html",
+        "data[merges][EMAIL]" -> "api@mailchimp.com",
+        "data[merges][FNAME]" -> "MailChimp",
+        "data[merges][LNAME]" -> "API",
+        "data[merges][INTERESTS]" -> "Group1,Group2",
+        "data[ip_opt]" -> "10.20.10.30",
+        "data[ip_signup]" -> "10.20.10.30"
+      )
+
+      val expected = """
+        {
+          "event" : "subscribe",
+          "entityType" : "user",
+          "entityId" : "8a25ff1d98",
+          "targetEntityType" : "list",
+          "targetEntityId" : "a6b5da1054",
+          "properties" : {
+            "email" : "api@mailchimp.com",
+            "email_type" : "html",
+            "merges" : {
+              "EMAIL" : "api@mailchimp.com",
+              "FNAME" : "MailChimp",
+              "LNAME" : "API"
+              "INTERESTS" : "Group1,Group2"
+            },
+            "ip_opt" : "10.20.10.30",
+            "ip_signup" : "10.20.10.30"
+          },
+          "eventTime" : "2009-03-26T21:35:57.000Z"
+        }
+      """
+
+      check(MailChimpConnector, subscribe, expected)
+    }
+
+  }
+}
diff --git a/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
new file mode 100644
index 0000000..e6fd83b
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
@@ -0,0 +1,68 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.webhooks.segmentio
+
+import io.prediction.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+class SegmentIOConnectorSpec extends Specification with ConnectorTestUtil {
+
+  // TOOD: test other events
+  // TODO: test different optional fields
+
+  "SegmentIOConnector" should {
+
+    "convert identify to event JSON" in {
+      // simple format
+      val identify = """
+        {
+          "version"   : 1,
+          "type"      : "identify",
+          "userId"    : "019mr8mf4r",
+          "traits"    : {
+              "email"            : "achilles@segment.com",
+              "name"             : "Achilles",
+              "subscriptionPlan" : "Premium",
+              "friendCount"      : 29
+          },
+          "timestamp" : "2012-12-02T00:30:08.276Z"
+        }
+      """
+
+      val expected = """
+        {
+          "event" : "identify",
+          "entityType": "user",
+          "entityId" : "019mr8mf4r",
+          "properties" : {
+            "traits" : {
+              "email"            : "achilles@segment.com",
+              "name"             : "Achilles",
+              "subscriptionPlan" : "Premium",
+              "friendCount"      : 29
+            }
+          },
+          "eventTime" : "2012-12-02T00:30:08.276Z"
+        }
+      """
+
+      check(SegmentIOConnector, identify, expected)
+    }
+
+  }
+
+}
diff --git a/data/test-form.sh b/data/test-form.sh
new file mode 100755
index 0000000..db6e814
--- /dev/null
+++ b/data/test-form.sh
@@ -0,0 +1,79 @@
+#!/usr/bin/env bash
+accessKey=$1
+
+# normal
+curl -i -X POST http://localhost:7070/webhooks/mailchimp?accessKey=$accessKey \
+-H "Content-type: application/x-www-form-urlencoded" \
+--data-urlencode "type"="subscribe" \
+--data-urlencode "fired_at"="2009-03-26 21:35:57" \
+--data-urlencode "data[id]"="8a25ff1d98" \
+--data-urlencode "data[list_id]"="a6b5da1054" \
+--data-urlencode "data[email]"="api@mailchimp.com" \
+--data-urlencode "data[email_type]"="html" \
+--data-urlencode "data[merges][EMAIL]"="api@mailchimp.com" \
+--data-urlencode "data[merges][FNAME]"="MailChimp" \
+--data-urlencode "data[merges][LNAME]"="API" \
+--data-urlencode "data[merges][INTERESTS]"="Group1,Group2" \
+--data-urlencode "data[ip_opt]"="10.20.10.30" \
+--data-urlencode "data[ip_signup]"="10.20.10.30" \
+-w %{time_total}
+
+# invalid type
+curl -i -X POST http://localhost:7070/webhooks/mailchimp?accessKey=$accessKey \
+-H "Content-type: application/x-www-form-urlencoded" \
+--data-urlencode "type"="something_invalid" \
+--data-urlencode "fired_at"="2009-03-26 21:35:57" \
+--data-urlencode "data[id]"="8a25ff1d98" \
+--data-urlencode "data[list_id]"="a6b5da1054" \
+--data-urlencode "data[email]"="api@mailchimp.com" \
+--data-urlencode "data[email_type]"="html" \
+--data-urlencode "data[merges][EMAIL]"="api@mailchimp.com" \
+--data-urlencode "data[merges][FNAME]"="MailChimp" \
+--data-urlencode "data[merges][LNAME]"="API" \
+--data-urlencode "data[merges][INTERESTS]"="Group1,Group2" \
+--data-urlencode "data[ip_opt]"="10.20.10.30" \
+--data-urlencode "data[ip_signup]"="10.20.10.30" \
+-w %{time_total}
+
+# missing data (type)
+curl -i -X POST http://localhost:7070/webhooks/mailchimp?accessKey=$accessKey \
+-H "Content-type: application/x-www-form-urlencoded" \
+--data-urlencode "fired_at"="2009-03-26 21:35:57" \
+--data-urlencode "data[id]"="8a25ff1d98" \
+--data-urlencode "data[list_id]"="a6b5da1054" \
+--data-urlencode "data[email]"="api@mailchimp.com" \
+--data-urlencode "data[email_type]"="html" \
+--data-urlencode "data[merges][EMAIL]"="api@mailchimp.com" \
+--data-urlencode "data[merges][FNAME]"="MailChimp" \
+--data-urlencode "data[merges][LNAME]"="API" \
+--data-urlencode "data[merges][INTERESTS]"="Group1,Group2" \
+--data-urlencode "data[ip_opt]"="10.20.10.30" \
+--data-urlencode "data[ip_signup]"="10.20.10.30" \
+-w %{time_total}
+
+# invalid webhooks path
+curl -i -X POST http://localhost:7070/webhooks/invalid?accessKey=$accessKey \
+-H "Content-type: application/x-www-form-urlencoded" \
+--data-urlencode "type"="subscribe" \
+--data-urlencode "fired_at"="2009-03-26 21:35:57" \
+--data-urlencode "data[id]"="8a25ff1d98" \
+--data-urlencode "data[list_id]"="a6b5da1054" \
+--data-urlencode "data[email]"="api@mailchimp.com" \
+--data-urlencode "data[email_type]"="html" \
+--data-urlencode "data[merges][EMAIL]"="api@mailchimp.com" \
+--data-urlencode "data[merges][FNAME]"="MailChimp" \
+--data-urlencode "data[merges][LNAME]"="API" \
+--data-urlencode "data[merges][INTERESTS]"="Group1,Group2" \
+--data-urlencode "data[ip_opt]"="10.20.10.30" \
+--data-urlencode "data[ip_signup]"="10.20.10.30" \
+-w %{time_total}
+
+# get normal
+curl -i -X GET http://localhost:7070/webhooks/mailchimp?accessKey=$accessKey \
+-H "Content-type: application/x-www-form-urlencoded" \
+-w %{time_total}
+
+# get invalid
+curl -i -X GET http://localhost:7070/webhooks/invalid?accessKey=$accessKey \
+-H "Content-type: application/x-www-form-urlencoded" \
+-w %{time_total}
diff --git a/data/test-normal.sh b/data/test-normal.sh
new file mode 100755
index 0000000..0c17c82
--- /dev/null
+++ b/data/test-normal.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+accessKey=$1
+
+curl -i -X POST http://localhost:7070/events.json?accessKey=$1 \
+-H "Content-Type: application/json" \
+-d '{
+  "event" : "my_event1",
+  "entityType" : "user"
+  "entityId" : "uid",
+  "eventTime" : "2004-12-13T21:39:45.618-07:00"
+}' \
+-w %{time_total}
diff --git a/data/test-segmentio.sh b/data/test-segmentio.sh
new file mode 100755
index 0000000..4737edf
--- /dev/null
+++ b/data/test-segmentio.sh
@@ -0,0 +1,82 @@
+#!/usr/bin/env bash
+accessKey=$1
+
+# normal case
+curl -i -X POST http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
+-H "Content-Type: application/json" \
+-d '{
+  "version"   : 1,
+  "type"      : "identify",
+  "userId"    : "019mr8mf4r",
+  "traits"    : {
+      "email"            : "achilles@segment.com",
+      "name"             : "Achilles",
+      "subscriptionPlan" : "Premium",
+      "friendCount"      : 29
+  },
+  "timestamp" : "2012-12-02T00:30:08.276Z"
+}' \
+-w %{time_total}
+
+
+# invalid type
+curl -i -X POST http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
+-H "Content-Type: application/json" \
+-d '{
+  "version"   : 1,
+  "type"      : "invalid_type",
+  "userId"    : "019mr8mf4r",
+  "traits"    : {
+      "email"            : "achilles@segment.com",
+      "name"             : "Achilles",
+      "subscriptionPlan" : "Premium",
+      "friendCount"      : 29
+  },
+  "timestamp" : "2012-12-02T00:30:08.276Z"
+}' \
+-w %{time_total}
+
+# invalid data format
+curl -i -X POST http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
+-H "Content-Type: application/json" \
+-d '{
+  "version"   : 1,
+  "userId"    : "019mr8mf4r",
+  "traits"    : {
+      "email"            : "achilles@segment.com",
+      "name"             : "Achilles",
+      "subscriptionPlan" : "Premium",
+      "friendCount"      : 29
+  },
+  "timestamp" : "2012-12-02T00:30:08.276Z"
+}' \
+-w %{time_total}
+
+
+# invalid webhooks path
+curl -i -X POST http://localhost:7070/webhooks/invalidpath.json?accessKey=$accessKey \
+-H "Content-Type: application/json" \
+-d '{
+  "version"   : 1,
+  "type"      : "identify",
+  "userId"    : "019mr8mf4r",
+  "traits"    : {
+      "email"            : "achilles@segment.com",
+      "name"             : "Achilles",
+      "subscriptionPlan" : "Premium",
+      "friendCount"      : 29
+  },
+  "timestamp" : "2012-12-02T00:30:08.276Z"
+}' \
+-w %{time_total}
+
+
+# get request
+curl -i -X GET http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
+-H "Content-Type: application/json" \
+-w %{time_total}
+
+# get invalid
+curl -i -X GET http://localhost:7070/webhooks/invalidpath.json?accessKey=$accessKey \
+-H "Content-Type: application/json" \
+-w %{time_total}
diff --git a/docs/manual/data/versions.yml b/docs/manual/data/versions.yml
index a007db0..0b1bf18 100644
--- a/docs/manual/data/versions.yml
+++ b/docs/manual/data/versions.yml
@@ -1,6 +1,5 @@
-pio: 0.9.1
-spark_download_filename: spark-1.2.1-bin-hadoop2.4
+pio: 0.9.2
+spark_download_filename: spark-1.3.0-bin-hadoop2.4
 elasticsearch_download_filename: elasticsearch-1.4.4
-hbase_basename: hbase-0.98.11
-hbase_variant: hadoop2-bin
-hbase_dir_suffix: hadoop2
+hbase_basename: hbase-1.0.0
+hbase_variant: bin
diff --git a/docs/manual/source/datacollection/eventapi.html.md b/docs/manual/source/datacollection/eventapi.html.md
index 7b14f17..f80f534 100644
--- a/docs/manual/source/datacollection/eventapi.html.md
+++ b/docs/manual/source/datacollection/eventapi.html.md
@@ -35,8 +35,9 @@
 $ pio eventserver
 ```
 
-INFO: By default, the Event Server is bound to localhost, which serves only local
-traffic. To serve global traffic, you can use $pio eventserver --ip 0.0.0.0
+INFO: By default, the Event Server is bound to 0.0.0.0, which serves global
+traffic. To tighten security, you may use `pio eventserver --ip 127.0.0.1` to
+serve only local traffic.
 
 ### Check Server Status
 
diff --git a/docs/manual/source/evaluation/paramtuning.html.md b/docs/manual/source/evaluation/paramtuning.html.md
index 976bff1..0b437ce 100644
--- a/docs/manual/source/evaluation/paramtuning.html.md
+++ b/docs/manual/source/evaluation/paramtuning.html.md
@@ -31,7 +31,7 @@
 ```
 
 ### Build and run the evaluation
-To run evaluation, the command `pio eval` is used. It takes to
+To run evaluation, the command `pio eval` is used. It takes two
 mandatory parameter, 
 1. the `Evaluation` object, it tells PredictionIO the engine and metric we use
    for the evaluation; and 
diff --git a/docs/manual/source/install/install-linux.html.md.erb b/docs/manual/source/install/install-linux.html.md.erb
index 6682d5b..0dd18fd 100644
--- a/docs/manual/source/install/install-linux.html.md.erb
+++ b/docs/manual/source/install/install-linux.html.md.erb
@@ -15,7 +15,7 @@
 
 If you do not want to use the install script above, you can follow the steps
 below to setup PredictionIO and its dependencies. In these instructions we will
-assume you are in your home directory. Whereever you see `/home/abc`, replace it
+assume you are in your home directory. Wherever you see `/home/abc`, replace it
 with your own home directory.
 
 
@@ -36,6 +36,9 @@
 Download PredictionIO and extract it.
 
 ```
+$ cd
+$ pwd
+/home/abc
 $ wget https://d8k1yxp8elc6b.cloudfront.net/PredictionIO-<%= data.versions.pio %>.tar.gz
 $ tar zxvf PredictionIO-<%= data.versions.pio %>.tar.gz
 ```
@@ -43,116 +46,38 @@
 
 ### Installing Dependencies
 
+Let us install dependencies inside a subdirectory of the PredictionIO
+installation. By following this convention, you can use PredictionIO's default
+configuration as is.
+
+```
+$ mkdir PredictionIO-<%= data.versions.pio %>/vendors
+```
+
 
 #### Spark Setup
 
-[Apache Spark](http://spark.apache.org) is the default processing engine for
-PredictionIO. Download and extract it.
-
-```
-$ wget http://d3kbcqa49mib13.cloudfront.net/<%= data.versions.spark_download_filename %>.tgz
-$ tar zxvf <%= data.versions.spark_download_filename %>.tgz
-```
-
-After that, edit `PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh`.
-
-```
-SPARK_HOME=/home/abc/<%= data.versions.spark_download_filename %>
-```
+<%= partial 'shared/install/spark' %>
 
 
 #### Elasticsearch Setup
 
-[Elasticsearch](http://www.elasticsearch.org/) is the default metadata store for
-PredictionIO. Download and extract it.
-
-```
-$ wget https://download.elasticsearch.org/elasticsearch/elasticsearch/<%= data.versions.elasticsearch_download_filename %>.tar.gz
-$ tar zxvf <%= data.versions.elasticsearch_download_filename %>.tar.gz
-```
-
-After that, edit `PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh`.
-
-```
-PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=/home/abc/<%= data.versions.elasticsearch_download_filename %>
-```
-
-If you are using a shared network, change the `network.host` line in
-`<%= data.versions.elasticsearch_download_filename %>/config/elasticsearch.yml` to `network.host: 127.0.0.1` because by default,
-Elasticsearch looks for other machines on the network upon setup and you may run
-into weird errors if there are other machines that is also running
-Elasticsearch.
-If you are not using the default setting at localhost. You may change the
-following in `PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh` to fit your setup.
-
-```
-PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch
-PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=localhost
-PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300
-```
+<%= partial 'shared/install/elasticsearch' %>
 
 
 #### HBase Setup<a class="anchor" name="hbase">&nbsp;</a>
 
-[HBase](http://hbase.apache.org) is the default event data store for
-PredictionIO. Download and extract it.
+<%= partial 'shared/install/hbase' %>
 
-```
-$ wget http://archive.apache.org/dist/hbase/<%= data.versions.hbase_basename %>/<%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>.tar.gz
-$ tar zxvf <%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>.tar.gz
-```
-
-After that, edit `PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh`.
-
-```
-PIO_STORAGE_SOURCES_HBASE_HOME=/home/abc/<%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>
-```
-
-You will need to at least add a minimal configuration to HBase to start it in
-standalone mode. Details can be found
-[here](http://hbase.apache.org/book/quickstart.html). Here, we are showing a
-sample minimal configuration.
-
-INFO: For production deployment, run a fully distributed HBase configuration.
-
-Edit `<%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant
-%>/conf/hbase-site.xml`.
-
-```
-<configuration>
-  <property>
-    <name>hbase.rootdir</name>
-    <value>file:///home/abc/<%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>/data</value>
-  </property>
-  <property>
-    <name>hbase.zookeeper.property.dataDir</name>
-    <value>/home/abc/zookeeper</value>
-  </property>
-</configuration>
-```
-
-INFO: HBase will create `hbase.rootdir` automatically to store its data.
-
-Edit `<%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant
-%>/conf/hbase-env.sh` to set `JAVA_HOME` for the cluster. For example:
-
-```
-export JAVA_HOME=`/usr/lib/jvm/java-8-oracle/jre`
-```
-
-For Mac users, use this instead:
-
-```
-export JAVA_HOME=`/usr/libexec/java_home -v 1.7`
-```
 
 In addition, you must set your environment variable `JAVA_HOME`. For example, in
 `/home/abc/.bashrc` add the following line:
 
 ```
-export JAVA_HOME=`/usr/lib/jvm/java-8-oracle`
+export JAVA_HOME=/usr/lib/jvm/java-8-oracle
 ```
 
+
 <%= partial 'shared/install/dependent_services' %>
 
 Now you have installed everything you need!
diff --git a/docs/manual/source/install/install-sourcecode.html.md.erb b/docs/manual/source/install/install-sourcecode.html.md.erb
index 25ffc76..cf019ab 100644
--- a/docs/manual/source/install/install-sourcecode.html.md.erb
+++ b/docs/manual/source/install/install-sourcecode.html.md.erb
@@ -2,6 +2,9 @@
 title: Installing PredictionIO from Source Code
 ---
 
+INFO: Assuming you are following the directory structure in the followoing,
+replace `/home/abc` with your own home directory wherever you see it.
+
 ## Building
 
 Run the following to download and build PredictionIO from its source code.
@@ -24,110 +27,48 @@
 PredictionIO binary distribution created at PredictionIO-<%= data.versions.pio %>.tar.gz
 ```
 
+Extract the binary distribution you have just built.
+
+```
+$ tar zxvf PredictionIO-<%= data.versions.pio %>.tar.gz
+```
+
 
 ## Installing Dependencies
 
+Let us install dependencies inside a subdirectory of the PredictionIO
+installation. By following this convention, you can use PredictionIO's default
+configuration as is.
+
+```
+$ mkdir PredictionIO-<%= data.versions.pio %>/vendors
+```
+
+
 ### Spark Setup
 
-Apache Spark is the default processing engine for PredictionIO. Download [Apache
-Spark release 1.2.0 package hadoop2.4](http://spark.apache.org/downloads.html).
+<%= partial 'shared/install/spark' %>
 
 
-```
-$ wget http://d3kbcqa49mib13.cloudfront.net/<%= data.versions.spark_download_filename %>.tgz
-$ tar zxvf <%= data.versions.spark_download_filename %>.tgz
-```
-
-Copy the configuration template `conf/pio-env.sh.template` to `conf/pio-env.sh`
-in your PredictionIO installation directory. After that, edit `conf/pio-env.sh`
-and point `SPARK_HOME` to the location where you extracted Apache Spark.
-
-```
-SPARK_HOME=/home/abc/Downloads/<%= data.versions.spark_download_filename %>
-```
-
 ### Storage Setup
 
 #### Elasticsearch Setup
 
-By default, PredictionIO uses Elasticsearch at localhost as the data store to
-store its metadata. Simply download and install
-[Elasticsearch](http://www.elasticsearch.org/), which looks like this:
+<%= partial 'shared/install/elasticsearch' %>
 
-```
-$ wget https://download.elasticsearch.org/elasticsearch/elasticsearch/<%= data.versions.elasticsearch_download_filename %>.tar.gz
-$ tar zxvf <%= data.versions.elasticsearch_download_filename %>.tar.gz
-$ cd <%= data.versions.elasticsearch_download_filename %>
-```
-
-If you are using a shared network, change the `network.host` line in
-`config/elasticsearch.yml` to `network.host: 127.0.0.1` because by default,
-Elasticsearch looks for other machines on the network upon setup and you may run
-into weird errors if there are other machines that is also running
-Elasticsearch.
-
-If you are not using the default setting at localhost. You may change the following in ```conf/pio-env.sh``` to fit your setup.
-
-WARNING: Make sure to set `PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME` unless you do
-not plan to use `bin/pio-start-all`, e.g. you have a custom HBase setup.
-
-```
-PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch
-PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=localhost
-PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300
-PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=/path/to/elasticsearch
-```
 
 #### <a name="hbase"></a>HBase Setup
 
-By default, PredictionIO's Data API uses [HBase](http://hbase.apache.org/) at localhost as the data store
-for event data.
+<%= partial 'shared/install/hbase' %>
+
+
+In addition, you must set your environment variable `JAVA_HOME`. For example, in
+`/home/abc/.bashrc` add the following line:
 
 ```
-$ wget https://archive.apache.org/dist/hbase/<%= data.versions.hbase_basename %>/<%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>.tar.gz
-$ tar zxvf <%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>.tar.gz
-$ cd <%= data.versions.hbase_basename %>-<%= data.versions.hbase_dir_suffix %>
+export JAVA_HOME=/usr/lib/jvm/java-8-oracle
 ```
 
-You will need to at least add a minimal configuration to HBase to start it in
-standalone mode. Details can be found
-[here](http://hbase.apache.org/book/quickstart.html). Here, we are showing a
-sample minimal configuration.
-
-> For production deployment, run a fully distributed HBase configuration.
-
-Edit `/path/to/hbase/conf/hbase-site.xml` and configure the local **data**
-directories for HBase and ZooKeeper. These directories should be empty the first
-time you launch PredictionIO.
-
-```
-<configuration>
-  <property>
-    <name>hbase.rootdir</name>
-    <value>file:///path/to/hbase-data-dir</value>
-  </property>
-  <property>
-    <name>hbase.zookeeper.property.dataDir</name>
-    <value>/path/to/zookeeper-data-dir</value>
-  </property>
-</configuration>
-```
-
-Edit `/path/to/hbase/conf/hbase-env.sh` to set `JAVA_HOME` for the cluster. For
-Mac users it would be
-
-```
-export JAVA_HOME=`/usr/libexec/java_home -v 1.7`
-```
-
-Navigate back to your PredictionIO's installation directory and edit
-`conf/pio-env.sh` add the path to HBase's root and config directories:
-
-```
-HBASE_CONF_DIR=/path/to/hbase/conf
-...
-PIO_STORAGE_SOURCES_HBASE_HOME=/path/to/hbase
-```
 
 <%= partial 'shared/install/dependent_services' %>
 
diff --git a/docs/manual/source/install/install-vagrant.html.md.erb b/docs/manual/source/install/install-vagrant.html.md.erb
index 9947048..7d338c0 100644
--- a/docs/manual/source/install/install-vagrant.html.md.erb
+++ b/docs/manual/source/install/install-vagrant.html.md.erb
@@ -51,7 +51,7 @@
 ```
 ==> default: Installation done!
 ==> default: --------------------------------------------------------------------------------
-==> default: Installation of PredictionIO 0.9.1 complete!
+==> default: Installation of PredictionIO <%= data.versions.pio %> complete!
 ==> default: IMPORTANT: You still have to start PredictionIO and dependencies manually:
 ==> default: Run: 'pio-start-all'
 ==> default: Check the status with: 'pio status'
@@ -84,11 +84,6 @@
 One you've logged into the VM, you can follow the one of the template quickstart
 guides to get started.
 
-INFO: Note that when you run `pio deploy` engine inside the **VM**, you need to
-specify `--ip 0.0.0.0` in order to access the event server and the deployed
-engine from the host machine. Please remember to specify this --ip parameter
-when you follow the quick start.
-
 To run PredictionIO Event Server and dependent services:
 
 ```
@@ -98,7 +93,7 @@
 To deploy engine:
 
 ```
-vagrant@vagrant-ubuntu-trusty-64:~/MyRecommendation$ pio deploy --ip 0.0.0.0
+vagrant@vagrant-ubuntu-trusty-64:~/MyRecommendation$ pio deploy
 ```
 
 ### Shutdown and bring up PredictionIO VM again
@@ -127,6 +122,6 @@
 ==> default: PredictionIO VM is up!
 ==> default: You could run 'pio status' inside VM ('vagrant ssh' to VM first) to confirm if PredictionIO is ready.
 ==> default: IMPORTANT: You still have to start the eventserver manually (inside VM):
-==> default: Run: 'pio eventserver --ip 0.0.0.0'
+==> default: Run: 'pio eventserver'
 ==> default: --------------------------------------------------------------------------------
 ```
diff --git a/docs/manual/source/partials/shared/dase/_dase.html.md.erb b/docs/manual/source/partials/shared/dase/_dase.html.md.erb
index a1bf5f0..243cede 100644
--- a/docs/manual/source/partials/shared/dase/_dase.html.md.erb
+++ b/docs/manual/source/partials/shared/dase/_dase.html.md.erb
@@ -7,6 +7,12 @@
 * **S**erving
 * **E**valuator
 
-Let's look at the code and see how you can customize the engine you built from the <%= template_name %>.
+Let's look at the code and see how you can customize the engine you built from 
+the <%= template_name %>.
 
+<% if defined?(evaluation_link) %>
+NOTE: Evaluator will not be covered in this tutorial. 
+Please visit [evaluation explained](<%= evaluation_link%>) for using evaluation.
+<% else %>
 NOTE: Evaluator will not be covered in this tutorial.
+<% end %>
diff --git a/docs/manual/source/partials/shared/install/_dependent_services.html.erb b/docs/manual/source/partials/shared/install/_dependent_services.html.erb
index 13c14f0..cf74429 100644
--- a/docs/manual/source/partials/shared/install/_dependent_services.html.erb
+++ b/docs/manual/source/partials/shared/install/_dependent_services.html.erb
@@ -1,13 +1,13 @@
 ### Start PredictionIO and Dependent Services
 
-Simply do `bin/pio-start-all` and you should see something similar to the
-following:
+Simply do `PredictionIO-<%= data.versions.pio %>/bin/pio-start-all` and you
+should see something similar to the following:
 
 ```
-$ bin/pio-start-all
+$ PredictionIO-<%= data.versions.pio %>/bin/pio-start-all
 Starting Elasticsearch...
 Starting HBase...
-starting master, logging to /home/abc/<%= data.versions.hbase_basename %>-<%= data.versions.hbase_dir_suffix %>/bin/../logs/hbase-abc-master-yourhost.local.out
+starting master, logging to /home/abc/PredictionIO-<%= data.versions.pio %>/vendors/<%= data.versions.hbase_basename %>/bin/../logs/hbase-abc-master-yourhost.local.out
 Waiting 10 seconds for HBase to fully initialize...
 Starting PredictionIO Event Server...
 $
@@ -30,4 +30,5 @@
 - org.apache.hadoop.hbase.master.HMaster
 - org.elasticsearch.bootstrap.Elasticsearch
 
-At any time, you can run `pio status` to check the status of the dependencies.
+At any time, you can run `PredictionIO-<%= data.versions.pio %>/bin/pio status`
+to check the status of the dependencies.
diff --git a/docs/manual/source/partials/shared/install/_elasticsearch.html.erb b/docs/manual/source/partials/shared/install/_elasticsearch.html.erb
new file mode 100644
index 0000000..7b93cfe
--- /dev/null
+++ b/docs/manual/source/partials/shared/install/_elasticsearch.html.erb
@@ -0,0 +1,29 @@
+[Elasticsearch](http://www.elasticsearch.org/) is the default metadata store for
+PredictionIO. Download and extract it.
+
+```
+$ wget https://download.elasticsearch.org/elasticsearch/elasticsearch/<%= data.versions.elasticsearch_download_filename %>.tar.gz
+$ tar zxvfC <%= data.versions.elasticsearch_download_filename %>.tar.gz PredictionIO-<%= data.versions.pio %>/vendors
+```
+
+INFO: If you decide to install Elasticsearch to another location, you must edit
+`PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh` and change the
+`PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME` variable to point to your own
+Elasticsearch installation.
+
+INFO: If you are using a shared network, change the `network.host` line in
+`PredictionIO-<%= data.versions.pio %>/vendors/<%=
+data.versions.elasticsearch_download_filename %>/config/elasticsearch.yml` to
+`network.host: 127.0.0.1` because by default, Elasticsearch looks for other
+machines on the network upon setup and you may run into weird errors if there
+are other machines that is also running Elasticsearch.
+
+If you are not using the default setting at `localhost`, you may change the
+following in `PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh` to fit your
+setup.
+
+```
+PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch
+PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=localhost
+PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300
+```
diff --git a/docs/manual/source/partials/shared/install/_hbase.html.erb b/docs/manual/source/partials/shared/install/_hbase.html.erb
new file mode 100644
index 0000000..68737db
--- /dev/null
+++ b/docs/manual/source/partials/shared/install/_hbase.html.erb
@@ -0,0 +1,52 @@
+[HBase](http://hbase.apache.org) is the default event data store for
+PredictionIO. Download and extract it.
+
+```
+$ wget http://archive.apache.org/dist/hbase/<%= data.versions.hbase_basename %>/<%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>.tar.gz
+$ tar zxvfC <%= data.versions.hbase_basename %>-<%= data.versions.hbase_variant %>.tar.gz PredictionIO-<%= data.versions.pio %>/vendors
+```
+
+INFO: If you decide to install HBase to another location, you must edit
+`PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh` and change the
+`PIO_STORAGE_SOURCES_HBASE_HOME` variable to point to your own HBase
+installation.
+
+You will need to at least add a minimal configuration to HBase to start it in
+standalone mode. Details can be found
+[here](http://hbase.apache.org/book/quickstart.html). Here, we are showing a
+sample minimal configuration.
+
+INFO: For production deployment, run a fully distributed HBase configuration.
+
+Edit `PredictionIO-<%= data.versions.pio %>/vendors/<%=
+data.versions.hbase_basename %>/conf/hbase-site.xml`.
+
+```
+<configuration>
+  <property>
+    <name>hbase.rootdir</name>
+    <value>file:///home/abc/PredictionIO-<%= data.versions.pio %>/vendors/<%= data.versions.hbase_basename %>/data</value>
+  </property>
+  <property>
+    <name>hbase.zookeeper.property.dataDir</name>
+    <value>/home/abc/PredictionIO-<%= data.versions.pio %>/vendors/<%= data.versions.hbase_basename %>/zookeeper</value>
+  </property>
+</configuration>
+```
+
+INFO: HBase will create `hbase.rootdir` automatically to store its data.
+
+Edit `PredictionIO-<%= data.versions.pio %>/vendors/<%=
+data.versions.hbase_basename %>/conf/hbase-env.sh` to set `JAVA_HOME` for the
+cluster. For example:
+
+```
+export JAVA_HOME=/usr/lib/jvm/java-8-oracle/jre
+```
+
+For Mac users, use this instead (change `1.8` to `1.7` if you have Java 7
+installed):
+
+```
+export JAVA_HOME=`/usr/libexec/java_home -v 1.8`
+```
diff --git a/docs/manual/source/partials/shared/install/_spark.html.erb b/docs/manual/source/partials/shared/install/_spark.html.erb
new file mode 100644
index 0000000..9fb7d46
--- /dev/null
+++ b/docs/manual/source/partials/shared/install/_spark.html.erb
@@ -0,0 +1,11 @@
+[Apache Spark](http://spark.apache.org) is the default processing engine for
+PredictionIO. Download and extract it.
+
+```
+$ wget http://d3kbcqa49mib13.cloudfront.net/<%= data.versions.spark_download_filename %>.tgz
+$ tar zxvfC <%= data.versions.spark_download_filename %>.tgz PredictionIO-<%= data.versions.pio %>/vendors
+```
+
+INFO: If you decide to install Apache Spark to another location, you must edit
+`PredictionIO-<%= data.versions.pio %>/conf/pio-env.sh` and change the
+`SPARK_HOME` variable to point to your own Apache Spark installation.
diff --git a/docs/manual/source/resources/faq.html.md b/docs/manual/source/resources/faq.html.md
index 27ab6ad..ad4317d 100644
--- a/docs/manual/source/resources/faq.html.md
+++ b/docs/manual/source/resources/faq.html.md
@@ -103,8 +103,10 @@
 
 ### Q: How do I increase the JVM heap size of the Event Server?
 
+Add the `JAVA_OPTS` environmental variable to supply JVM options, e.g.
+
 ```
-$ JAVA_OPTS=-Xmx16g bin/pio eventserver --ip 0.0.0.0 --port 7071
+$ JAVA_OPTS=-Xmx16g bin/pio eventserver ...
 ````
 
 ## Engine Training
diff --git a/docs/manual/source/samples/languages.html.md b/docs/manual/source/samples/languages.html.md
index e4c94ac..2b7c63b 100644
--- a/docs/manual/source/samples/languages.html.md
+++ b/docs/manual/source/samples/languages.html.md
@@ -8,10 +8,10 @@
 This is a sample code block with no language.
 
 ```
-$ $PIO_HOME/bin/pio eventserver --ip 0.0.0.0
+$ $PIO_HOME/bin/pio eventserver
 $ cd /path/to/engine
 $ ../bin/pio train
-$ ../bin/pio deploy --ip 0.0.0.0
+$ ../bin/pio deploy
 ```
 
 ## Scala
diff --git a/docs/manual/source/system/deploy-cloudformation.html.md b/docs/manual/source/system/deploy-cloudformation.html.md.erb
similarity index 98%
rename from docs/manual/source/system/deploy-cloudformation.html.md
rename to docs/manual/source/system/deploy-cloudformation.html.md.erb
index 86d1402..913ffd5 100644
--- a/docs/manual/source/system/deploy-cloudformation.html.md
+++ b/docs/manual/source/system/deploy-cloudformation.html.md.erb
@@ -75,7 +75,7 @@
 as shown above. This will bring up the **Select Template** screen. Name your
 stack as you like. Within the *Template* section, choose **Specify an Amazon S3
 template URL**, and put
-https://s3.amazonaws.com/cloudformation.prediction.io/0.9.1/pio.json as the
+https://s3.amazonaws.com/cloudformation.prediction.io/<%= data.versions.pio %>/pio.json as the
 value.
 
 ![CloudFormation Stack Template Selection](/images/cloudformation/cf-03.png)
diff --git a/docs/manual/source/templates/classification/dase.html.md.erb b/docs/manual/source/templates/classification/dase.html.md.erb
index 1dec520..47fb26b 100644
--- a/docs/manual/source/templates/classification/dase.html.md.erb
+++ b/docs/manual/source/templates/classification/dase.html.md.erb
@@ -2,7 +2,11 @@
 title: DASE Components Explained (Classification)
 ---
 
-<%= partial 'shared/dase/dase', locals: { template_name: 'Classification Engine Template' } %>
+<%= partial 'shared/dase/dase', 
+locals: { 
+  template_name: 'Classification Engine Template',
+  evaluation_link: '/evaluation/paramtuning/'
+} %>
 
 ## The Engine Design
 
diff --git a/docs/manual/source/templates/recommendation/dase.html.md.erb b/docs/manual/source/templates/recommendation/dase.html.md.erb
index 245db16..6cb87f5 100644
--- a/docs/manual/source/templates/recommendation/dase.html.md.erb
+++ b/docs/manual/source/templates/recommendation/dase.html.md.erb
@@ -2,7 +2,11 @@
 title: DASE Components Explained (Recommendation)
 ---
 
-<%= partial 'shared/dase/dase', locals: { template_name: 'Recommendation Engine Template' } %>
+<%= partial 'shared/dase/dase', 
+locals: { 
+  template_name: 'Recommendation Engine Template',
+  evaluation_link: '/templates/recommendation/evaluation/'
+} %>
 
 ## The Engine Design
 
diff --git a/docs/manual/source/templates/recommendation/evaluation.html.md.erb b/docs/manual/source/templates/recommendation/evaluation.html.md.erb
new file mode 100644
index 0000000..78fbc16
--- /dev/null
+++ b/docs/manual/source/templates/recommendation/evaluation.html.md.erb
@@ -0,0 +1,353 @@
+---
+title: Evaluation Explained (Recommendation)
+---
+
+A PredictionIO engine is instantiated by a set of parameters, these parameters
+determines which algorithm is used as well as the parameter for the algorithm.
+It naturally raises a question of how to choose the best set of parameters.  The
+evaluation module steamlines the process of tuning the engine to the best
+parameter set and deploy it.
+
+## Evaluation Quick Start
+
+We assume you have run the [Recommendation Quick Start](/templates/recommendation/quickstart/) 
+will skip the data collection / import instructions.
+
+### Edit the AppId
+
+Edit MyRecommendation/src/main/scala/***Evaluation.scala*** to specify the
+*appId* you used to import the data.
+
+```scala
+object ParamsList extends EngineParamsGenerator {
+  private[this] val baseEP = EngineParams(
+    dataSourceParams = DataSourceParams(
+      appId = 21, 
+      ...
+      )
+  ...
+}
+```
+
+### Build and run the evaluation
+
+To run evaluation, the command `pio eval` is used. It takes two
+mandatory parameter, 
+1. the `Evaluation` object, it tells PredictionIO the engine and metric we use
+   for the evaluation; and 
+2. the `EngineParamsGenerator`, it contains a list of engine params to test
+   against. 
+The following command kickstarts the evaluation 
+workflow for the classification template.
+
+```
+$ pio build
+...
+$ pio eval org.template.recommendation.RecommendationEvaluation \
+    org.template.recommendation.EngineParamsList 
+```
+
+You will see the following output:
+
+```
+...
+[INFO 2015-03-31 00:31:53,934] [CoreWorkflow$] runEvaluation started
+...
+[INFO 2015-03-31 00:35:56,782] [CoreWorkflow$] Updating evaluation instance with result: MetricEvaluatorResult:
+  # engine params evaluated: 3
+Optimal Engine Params:
+  {
+  "dataSourceParams":{
+    "":{
+      "appId":19,
+      "evalParams":{
+        "kFold":5,
+        "queryNum":10
+      }
+    }
+  },
+  "preparatorParams":{
+    "":{
+      
+    }
+  },
+  "algorithmParamsList":[
+    {
+      "als":{
+        "rank":10,
+        "numIterations":40,
+        "lambda":0.01,
+        "seed":3
+      }
+    }
+  ],
+  "servingParams":{
+    "":{
+      
+    }
+  }
+}
+Metrics:
+  Precision@K (k=10, threshold=4.0): 0.15205820105820103
+  PositiveCount (threshold=4.0): 5.753333333333333
+  Precision@K (k=10, threshold=2.0): 0.1542777777777778
+  PositiveCount (threshold=2.0): 6.833333333333333
+  Precision@K (k=10, threshold=1.0): 0.15068518518518517
+  PositiveCount (threshold=1.0): 10.006666666666666
+[INFO 2015-03-31 00:36:01,516] [CoreWorkflow$] runEvaluation completed
+
+```
+
+The console prints out the evaluation meric score of each engine params, and finally
+pretty print the optimal engine params. Amongs the 3 engine params we evaluate,
+The second yeilds the best Prediction@k score of ~0.1521.
+
+
+## The Evaluation Design
+
+We assume you have read the [Tuning and Evaluation](/evaluation) section. We
+will cover the evaluation aspects which are specific to the recommendation
+engine.
+
+In recommendation evaluation, the raw data is a sequence of known rating.  A
+rating has 3 components: user, item, and a score. We use the $k-fold$ method for
+evaluation, the raw data is sliced into a sequence of (training, validation)
+data tuple. 
+
+In the validation data, we construct a query for *each user*, and get a list of 
+recommended items from the engine. It is vastly different from the 
+classification tutorial, where there is a one-to-one corresponding between the
+training data point and the validation data point. In this evaluation, 
+our unit of evaluation is *user*, we evaluate the quality of an engine
+using the known rating of a user.
+
+### Key assumptions 
+
+There are multiple assumptions we have to make when we evaluate a 
+recommendation engine:
+
+- Definition of 'good'. We want to quantity if the engine is able to recommend
+items which the user likes, we need to define what is meant by 'good'. In this
+examle, we have two kinds of events: 'rate' and 'buy'. The 'rate' event is 
+associated with a rating value which ranges between 1 to 4, and the 'buy'
+event is mapped to a rating of 4. When we 
+implement the metric, we have to specify a rating threshold, only the rating 
+above the threshold is considered 'good'.
+
+- The absense of complete rating. It is extremely unlikely that the training 
+data contains rating for all user-item tuples. In contrast, of a system containing
+1000 items, a user may only have rated 20 of them, leaving 980 items unrated. There
+is no way for us to certainly tell if the user likes an unrated product.
+When we examinte the evaluation result, it is important for us to keep in mind
+that the final metric is only an approximation of the actual result.
+
+- Recommendation affects user behavior. Suppose you are a e-commerce company and 
+would like to use the recommendation engine to personalize the landing page, 
+the item you show in the langing page directly impacts what the user is going to
+purchase. This is different from weather prediction, whatever the weather
+forecast engine predicts, tomorrow's weather won't be affected.  Therefore, when
+we conduct offline evaluation for recommendation engines, it is possible that
+the final user behavior is dramatically different from the evaluation result.
+However, in the evaluation, for simplicity, we have to assume that user
+behavior is homogenous.
+
+
+## Evaluation Data Generation
+
+### Actual Result
+
+In MyRecommendation/src/main/scala/***Engine.scala***,
+we define the `ActualResult` which represents the user rating for validation. 
+It stores the list of rating in the validation set for a user.
+
+```scala
+case class ActualResult(
+  ratings: Array[Rating]
+) extends Serializable
+```
+
+### Implement Data Generate Method in DataSource
+
+In MyRecommendatin/src/main/scala/***DataSource.scala***,
+the method `readEval` method reads, and selects, data from datastore
+and resturns a sequence of (training, validation) data.
+
+```scala
+case class DataSourceEvalParams(kFold: Int, queryNum: Int)
+
+case class DataSourceParams(
+  appId: Int, 
+  evalParams: Option[DataSourceEvalParams]) extends Params
+
+class DataSource(val dsp: DataSourceParams)
+  extends PDataSource[TrainingData,
+      EmptyEvaluationInfo, Query, ActualResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  def getRatings(sc: SparkContext): RDD[Rating] = {
+    val eventsDb = Storage.getPEvents()
+    val eventsRDD: RDD[Event] = eventsDb.find(
+      appId = dsp.appId,
+      entityType = Some("user"),
+      eventNames = Some(List("rate", "buy")), // read "rate" and "buy" event
+      // targetEntityType is optional field of an event.
+      targetEntityType = Some(Some("item")))(sc)
+
+    val ratingsRDD: RDD[Rating] = eventsRDD.map { event =>
+      val rating = try {
+        val ratingValue: Double = event.event match {
+          case "rate" => event.properties.get[Double]("rating")
+          case "buy" => 4.0 // map buy event to rating value of 4
+          case _ => throw new Exception(s"Unexpected event ${event} is read.")
+        }
+        // entityId and targetEntityId is String
+        Rating(event.entityId,
+          event.targetEntityId.get,
+          ratingValue)
+      } catch {
+        case e: Exception => {
+          logger.error(s"Cannot convert ${event} to Rating. Exception: ${e}.")
+          throw e
+        }
+      }
+      rating
+    }.cache()
+
+    ratingsRDD
+  }
+
+  ...
+
+  override
+  def readEval(sc: SparkContext)
+  : Seq[(TrainingData, EmptyEvaluationInfo, RDD[(Query, ActualResult)])] = {
+    require(!dsp.evalParams.isEmpty, "Must specify evalParams")
+    val evalParams = dsp.evalParams.get
+
+    val kFold = evalParams.kFold
+    val ratings: RDD[(Rating, Long)] = getRatings(sc).zipWithUniqueId
+    
+    (0 until kFold).map { idx => {
+      val trainingRatings = ratings.filter(_._2 % kFold != idx).map(_._1)
+      val testingRatings = ratings.filter(_._2 % kFold == idx).map(_._1)
+
+      val testingUsers: RDD[(String, Iterable[Rating])] = testingRatings.groupBy(_.user)
+
+      (new TrainingData(trainingRatings),
+        new EmptyEvaluationInfo(),
+        testingUsers.map { 
+          case (user, ratings) => (Query(user, evalParams.queryNum), ActualResult(ratings.toArray))
+        }
+      )
+    }}
+  }
+}
+```
+
+The evaluation data generate is controlled by two parameters
+in the `DataSourceEvalParams`. The first parameter `kFold` is the number of
+fold we use for evaluation, the second parameter `queryNum` is used for
+query construction. 
+
+
+The `getRating` method is factored out from the `readTraining` method as 
+they both serve the same function of reading from data source and
+perform a user-item action into a rating (lines 22 - 40).
+
+The `readEval` method is a k-fold evaluation implementation.
+We annotate each rating in the raw data by an index (line 54), then
+in each fold, the rating goes to either the training or testing set
+based on the modulus value.
+We group ratings by user, and one query is constructed *for each user* (line 60).
+
+## Evaluation Metrics
+
+In the [evaluation and tuning tutorial](/evaluation/), we use ***Metric*** to
+compute the quality of an engine variant. 
+However, in actual use cases like recommendation, as we have made many 
+assumptions in our model, using a single metric may lead to a biased evaluation.
+We will discuss using multiple
+***Metrics*** to generate a comprehensive evaluation, to generate a more global view 
+of the engine.
+
+### Precision@K
+
+Precision@K measures the portion of *relevant* items amongst the first *k* items.
+Recommendation engine usually wants to make sure the top few items recommended 
+are appealing to the user. Think about Google search, we usually give up after
+looking at the first and second result pages.
+
+### Precision@K Parameters
+
+There are two questions associated with it. 
+
+1. How do we define *relevant*? 
+2. What is a good value of *k*?
+
+Before we answer these questions, we need to understand what constitute a good metric.
+It is like exams, if everyone get full scores, the exam fails its goal to
+determine what the candidates don't know; if everyone fails, the exam fails its goal 
+to determine what the candidates know.
+A good metric should be able to distinguish the good from the bad.
+
+A way to define relevant is to use the notion of rating threshold. If the user 
+rating for an item is higher than certain threshold, we say it is relevant. 
+However, without looking at the data, it is hard to pick a reasonable threshold.
+We can set the threshold be as high as the maximum rating of 4.0, but it may 
+severely limit the relevant set size, and the precision scores will be close to 
+zero or undefined (precision is undefined if there is no relevant data).
+On the other hand, we can set the threshold be as low as the minimum rating, but
+it makes the precision metric uninformative as well since all scores will be close 
+to 1.
+Similar argument applies to picking a good value of *k* too. 
+
+A method to choose a good parameter is *not* to choose one, but instead test
+out *a whole sprectrum of parameters*. If an engine variant is good, it should
+robustly perform well across different metric parameters.
+The evaluation module supports multiple metrics. The following code
+snippets demonstrates a sample usage.
+
+```scala
+object ComprehensiveRecommendationEvaluation extends Evaluation {
+  val ratingThresholds = Seq(0.0, 2.0, 4.0)
+  val ks = Seq(1, 3, 10)
+
+  engineEvaluator = (
+    RecommendationEngine(),
+    MetricEvaluator(
+      metric = PrecisionAtK(k = 3, ratingThreshold = 2.0),
+      otherMetrics = (
+        (for (r <- ratingThresholds) yield PositiveCount(ratingThreshold = r)) ++
+        (for (r <- ratingThresholds; k <- ks) yield PrecisionAtK(k = k, ratingThreshold = r))
+      )))
+}
+```
+
+We have two types of `Metric`s.
+
+- `PositiveCount` is a helper metrics that returns the average 
+number of positive samples for a specific rating threshold, therefore we get some
+idea about the *demographic* of the data. If `PositiveCount` is too low or too
+high for certain threshold, we know that it should not be used.
+We have three thresholds (line 2), and three instances of 
+`PositiveCount` metric are instantiated (line 10), one for each threshold.
+
+- `Precision@K` is the actual metrics we use.
+We have two lists of parameters (lines 2 to 3): `ratingThreshold` defines what rating is good,
+and `k` defines how many items we evaluate in the `PredictedResult`. 
+We generate a list of all combinations (line 11).
+
+These metrics are expecified as `otherMetrics` (lines 9 to 11), they 
+will be calculated and generated on the evaluation UI.
+
+To run this evaluation, you can:
+
+```
+$ pio eval org.template.recommendation.ComprehensiveRecommendationEvaluation \
+  org.template.recommendation.EngineParamsList
+```
+
+
+
+
diff --git a/docs/scaladoc/rootdoc.txt b/docs/scaladoc/rootdoc.txt
index bf625e9..94c194a 100644
--- a/docs/scaladoc/rootdoc.txt
+++ b/docs/scaladoc/rootdoc.txt
@@ -1,8 +1,11 @@
 This is the API documentation of PredictionIO.
 
-If you are building a prediction engine, the most interesting package would be
-[[io.prediction.controller]]. If your prediction engine uses Event Store, you
-may also want to look at [[io.prediction.data.storage]].
+== Package Structure ==
+
+ - [[io.prediction.controller]] - The common starting point. Building blocks of a prediction engine.
+ - [[io.prediction.data.storage]] - Event Store API.
+
+== Experimental Features ==
 
 Classes and methods marked with <span class="experimental badge" style="float:
 none;">Experimental</span> are user-facing features which have not been
diff --git a/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala b/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala
index 7bc52cb..985077f 100644
--- a/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala
+++ b/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala
@@ -163,4 +163,14 @@
 
     s"($label, $featuresString)"
   }
+
+  override def equals(other: Any) = other match {
+    case that: LabeledPoint => that.toString == this.toString
+    case _ => false
+  }
+
+  override def hashCode() = {
+    this.toString.hashCode
+  }
+
 }
diff --git a/e2/src/main/scala/io/prediction/e2/evaluation/CrossValidation.scala b/e2/src/main/scala/io/prediction/e2/evaluation/CrossValidation.scala
new file mode 100644
index 0000000..dff193b
--- /dev/null
+++ b/e2/src/main/scala/io/prediction/e2/evaluation/CrossValidation.scala
@@ -0,0 +1,64 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package io.prediction.e2.evaluation
+
+import scala.reflect.ClassTag
+import org.apache.spark.rdd.RDD
+
+object CommonHelperFunctions {
+
+  /**
+   * Split a data set into evalK folds for crossvalidation.
+   * Apply to data sets supplied to evaluation.
+   *
+   * @tparam D Data point class.
+   * @tparam TD Training data class.
+   * @tparam EI Evaluation Info class.
+   * @tparam Q Input query class.
+   * @tparam A Actual value class.
+   */
+
+  def splitData[D: ClassTag, TD, EI, Q, A](
+
+     evalK: Int,
+     dataset: RDD[D],
+     evaluatorInfo: EI,
+     trainingDataCreator: RDD[D] => TD,
+     queryCreator: D => Q,
+     actualCreator: D => A): Seq[(TD, EI, RDD[(Q, A)])] = {
+
+    val indexedPoints = dataset.zipWithIndex
+
+    def selectPoint(foldIdx: Int, pt: D, idx: Long, k: Int, isTraining: Boolean): Option[D] = {
+      if ((idx % k == foldIdx) ^ isTraining) Some(pt)
+      else None
+    }
+
+    (0 until evalK).map { foldIdx =>
+      val trainingPoints = indexedPoints.flatMap { case(pt, idx) =>
+        selectPoint(foldIdx, pt, idx, evalK, true)
+      }
+      val testingPoints = indexedPoints.flatMap { case(pt, idx) =>
+        selectPoint(foldIdx, pt, idx, evalK, false)
+      }
+
+      (
+        trainingDataCreator(trainingPoints),
+        evaluatorInfo,
+        testingPoints.map { d => (queryCreator(d), actualCreator(d)) }
+      )
+    }
+  }
+}
diff --git a/e2/src/main/scala/io/prediction/e2/package.scala b/e2/src/main/scala/io/prediction/e2/package.scala
index 111e9bc..bb94503 100644
--- a/e2/src/main/scala/io/prediction/e2/package.scala
+++ b/e2/src/main/scala/io/prediction/e2/package.scala
@@ -19,3 +19,4 @@
   * PredictionIO.
   */
 package object engine {}
+package object evaluation {}
diff --git a/e2/src/test/scala/io/prediction/e2/evaluation/CrossValidationTest.scala b/e2/src/test/scala/io/prediction/e2/evaluation/CrossValidationTest.scala
new file mode 100644
index 0000000..ead51b2
--- /dev/null
+++ b/e2/src/test/scala/io/prediction/e2/evaluation/CrossValidationTest.scala
@@ -0,0 +1,111 @@
+package io.prediction.e2.evaluation
+
+import org.scalatest.{Matchers, Inspectors, FlatSpec}
+import org.apache.spark.rdd.RDD
+import io.prediction.e2.fixture.SharedSparkContext
+import io.prediction.e2.engine.LabeledPoint
+
+object CrossValidationTest {
+  case class TrainingData(labeledPoints: Seq[LabeledPoint])
+  case class Query(features: Array[String])
+  case class ActualResult(label: String)
+
+  case class EmptyEvaluationParams()
+
+  def toTrainingData(labeledPoints: RDD[LabeledPoint]) = TrainingData(labeledPoints.collect().toSeq)
+  def toQuery(labeledPoint: LabeledPoint) = Query(labeledPoint.features)
+  def toActualResult(labeledPoint: LabeledPoint) = ActualResult(labeledPoint.label)
+
+}
+
+
+class CrossValidationTest extends FlatSpec with Matchers with Inspectors
+with SharedSparkContext{
+
+
+  val Label1 = "l1"
+  val Label2 = "l2"
+  val Label3 = "l3"
+  val Label4 = "l4"
+  val Attribute1 = "a1"
+  val NotAttribute1 = "na1"
+  val Attribute2 = "a2"
+  val NotAttribute2 = "na2"
+
+  val labeledPoints = Seq(
+    LabeledPoint(Label1, Array(Attribute1, Attribute2)),
+    LabeledPoint(Label2, Array(NotAttribute1, Attribute2)),
+    LabeledPoint(Label3, Array(Attribute1, NotAttribute2)),
+    LabeledPoint(Label4, Array(NotAttribute1, NotAttribute2))
+  )
+
+  val dataCount = labeledPoints.size
+  val evalKs = (1 to dataCount)
+  val emptyParams = new CrossValidationTest.EmptyEvaluationParams()
+  type Fold = (
+    CrossValidationTest.TrainingData,
+    CrossValidationTest.EmptyEvaluationParams,
+    RDD[(CrossValidationTest.Query, CrossValidationTest.ActualResult)])
+
+  def toTestTrain(dataSplit: Fold): (Seq[LabeledPoint], Seq[LabeledPoint]) = {
+    val trainingData = dataSplit._1.labeledPoints
+    val queryActual = dataSplit._3
+    val testingData = queryActual.map { case (query, actual) =>
+      LabeledPoint(actual.label, query.features)
+    }
+    (trainingData, testingData.collect().toSeq)
+  }
+
+  def splitData(k: Int, labeledPointsRDD: RDD[LabeledPoint]): Seq[Fold] = {
+    CommonHelperFunctions.splitData[
+      LabeledPoint,
+      CrossValidationTest.TrainingData,
+      CrossValidationTest.EmptyEvaluationParams,
+      CrossValidationTest.Query,
+      CrossValidationTest.ActualResult](
+        k,
+        labeledPointsRDD,
+        emptyParams,
+        CrossValidationTest.toTrainingData,
+        CrossValidationTest.toQuery,
+        CrossValidationTest.toActualResult)
+  }
+
+  "Fold count" should "equal evalK" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    val lengths = evalKs.map(k => splitData(k, labeledPointsRDD).length)
+    lengths should be(evalKs)
+  }
+
+  "Testing data size" should  "be within 1 of total / evalK" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    val splits = evalKs.map(k => k -> splitData(k, labeledPointsRDD))
+    val diffs = splits.map { case (k, folds) =>
+      folds.map(fold => fold._3.count() - dataCount / k)
+    }
+    forAll(diffs) {foldDiffs => foldDiffs.max should be <=  1L}
+    diffs.map(folds => folds.sum) should be(evalKs.map(k => dataCount % k))
+  }
+
+  "Training + testing" should "equal original dataset" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    forAll(evalKs) {k =>
+      val split = splitData(k, labeledPointsRDD)
+      forAll(split) {fold =>
+        val(training, testing) = toTestTrain(fold)
+        (training ++ testing).toSet should be(labeledPoints.toSet)
+      }
+    }
+  }
+
+  "Training and testing" should "be disjoint" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    forAll(evalKs) { k =>
+      val split = splitData(k, labeledPointsRDD)
+      forAll(split) { fold =>
+        val (training, testing) = toTestTrain(fold)
+        training.toSet.intersect(testing.toSet) should be('empty)
+      }
+    }
+  }
+}
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/.gitignore b/examples/scala-parallel-ecommercerecommendation/weighted-items/.gitignore
new file mode 100644
index 0000000..57841c6
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/.gitignore
@@ -0,0 +1,4 @@
+manifest.json
+target/
+pio.log
+/pio.sbt
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/README.md b/examples/scala-parallel-ecommercerecommendation/weighted-items/README.md
new file mode 100644
index 0000000..68759c0
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/README.md
@@ -0,0 +1,212 @@
+# E-Commerce Recommendation Template With Weighted Items
+
+This engine template is based on the E-Commerce Recommendation Template v0.1.1. It has been modified so that
+each item can be given a different weight.
+
+By default, items have a weight of 1.0. Giving an item a weight greater than
+1.0 will make them appear more often and can be useful for i.e. promoted products. An item can also be given
+a weight smaller than 1.0 (but bigger than 0), in which case it will be recommended less often than originally. Weight
+values smaller than 0.0 are invalid.
+
+## Documentation
+
+Please refer to http://docs.prediction.io/templates/ecommercerecommendation/quickstart/
+
+## Development Notes
+
+### Weight constraint event
+
+Item weights are specified by means of a `weightedItems` constraint type event , which includes the weight for all items
+which don't have the default weight of 1.0. At any given time, only the last such `weightedItems` event is taken into
+account.
+
+The event design has been optimized for the use case where there may be many different items that are to be adjusted
+by the same percentage, an so it's based on a list of objects, each containing a list of item IDs and their weight:
+
+```
+{
+  "event" : "$set",
+  "entityType" : "constraint"
+  "entityId" : "weightedItems",
+  "properties" : {
+    weights: [
+      {  
+        "items": [ "i4", "i14"],
+        "weight" : 1.2,
+      },
+      {
+        "items": [ "i11"],
+        "weight" : 1.5,
+      }
+    ]
+  },
+  "eventTime" : "2015-02-17T02:11:21.934Z"
+}
+```
+
+### Changes to ALSAlgorithm.scala
+
+* Added a case class to represent each group items which are given the same weight:
+
+```scala
+// Item weights are defined according to this structure so that groups of items can be easily changed together
+case class WeightsGroup(
+  items: Set[String],
+  weight: Double
+)
+```
+
+* Extract the sequence of `WeightsGroup`s defined in the last `weightedItems` event:
+
+```scala
+    // Get the latest constraint weightedItems. This comes in the form of a sequence of WeightsGroup
+    val groupedWeights = lEventsDb.findSingleEntity(
+      appId = ap.appId,
+      entityType = "constraint",
+      entityId = "weightedItems",
+      eventNames = Some(Seq("$set")),
+      limit = Some(1),
+      latest = true,
+      timeout = 200.millis
+    ) match {
+      case Right(x) =>
+        if (x.hasNext)
+          x.next().properties.get[Seq[WeightsGroup]]("weights")
+        else
+          Seq.empty
+      case Left(e) =>
+        logger.error(s"Error when reading set weightedItems event: ${e}")
+        Seq.empty
+    }
+```
+
+* Transform the sequence of `WeightsGroup`s into a `Map[Int, Double]` that we can easily query to extract the weight
+given to an item, using its `Int` index. For undefined items, their weight is 1.0.
+
+```scala
+    // Transform groupedWeights into a map of index -> weight that we can easily query
+    val weights: Map[Int, Double] = (for {
+      group <- groupedWeights
+      item <- group.items
+      index <- model.itemStringIntMap.get(item)
+    } yield (index, group.weight))
+      .toMap
+      .withDefaultValue(1.0)
+```
+
+* Adjust scores according to item weights:
+
+```scala
+            val originalScore = dotProduct(uf, feature.get)
+            // Adjusting score according to given item weights
+            val adjustedScore = originalScore * weights(i)
+            (i, adjustedScore)
+```
+
+* Pass map of weights to `predictNewUser` function and adjust scores similarly
+
+```scala
+  private
+  def predictNewUser(
+    model: ALSModel,
+    query: Query,
+    whiteList: Option[Set[Int]],
+    blackList: Set[Int],
+    weights: Map[Int, Double]): Array[(Int, Double)] = {
+```
+
+```scala
+          val originalScore = recentFeatures.map { rf =>
+            cosine(rf, feature.get) // feature is defined
+          }.sum
+          // Adjusting score according to given item weights
+          val adjustedScore = originalScore * weights(i)
+          (i, adjustedScore)
+```
+
+```scala
+      predictNewUser(
+        model = model,
+        query = query,
+        whiteList = whiteList,
+        blackList = finalBlackList,
+        weights = weights
+      )
+```
+
+### Setting constraint "weightedItems"
+
+You can set the constraint *weightedItems* by simply sending an event to Event Server.
+
+For example, say, you wanna adjust the score of items "i4", "i14" with a weight of 1.2 and item "i11" with weight of 1.5:
+
+```
+$ curl -i -X POST http://localhost:7070/events.json?accessKey=zPkr6sBwQoBwBjVHK2hsF9u26L38ARSe19QzkdYentuomCtYSuH0vXP5fq7advo4 \
+-H "Content-Type: application/json" \
+-d '{
+  "event" : "$set",
+  "entityType" : "constraint"
+  "entityId" : "weightedItems",
+  "properties" : {
+    "weights": [
+      {
+        "items": ["i4", "i14"],
+        "weight": 1.2
+      },
+      {
+        "items": ["i11"],
+        "weight": 1.5
+      }
+    ]
+  }
+  "eventTime" : "2015-02-17T02:11:21.934Z"
+}'
+```
+
+Note that only latest set constraint is used (based on eventTime), which means that if you create another new constraint event, the previous constraint won't have any effect anymore. For example, after you send the following event, only scores of items "i2" and "i10" will be adjusted by the weights and previous constraint for items "i4", "i14"," i11" won't be used anymore.
+
+```
+$ curl -i -X POST http://localhost:7070/events.json?accessKey=<ACCESS KEY> \
+-H "Content-Type: application/json" \
+-d '{
+  "event" : "$set",
+  "entityType" : "constraint"
+  "entityId" : "weightedItems",
+  "properties" : {
+    "weights": [
+      {
+        "items": ["i2", "i10"],
+        "weight": 1.5
+      }
+    ]
+  }
+  "eventTime" : "2015-02-20T04:56:78.123Z"
+}'
+```
+
+To clear the constraint, simply set empty weights array. i.e:
+
+```
+curl -i -X POST http://localhost:7070/events.json?accessKey=<ACCESS KEY> \
+-H "Content-Type: application/json" \
+-d '{
+  "event" : "$set",
+  "entityType" : "constraint"
+  "entityId" : "weightedItems",
+  "properties" : {
+    "weights": []
+  }
+  "eventTime" : "2015-02-20T04:56:78.123Z"
+}'
+```
+
+
+You can also use SDK to send these events as shown in the sample set_weights.py script.
+
+### set_weights.py script
+
+A `set_weights.py` has been created to add weight to some of the elements. Usage:
+
+```
+$ python data/set_weights.py --access_key <your_access_key>
+```
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt b/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt
new file mode 100644
index 0000000..f4d98ee
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt
@@ -0,0 +1,12 @@
+import AssemblyKeys._
+
+assemblySettings
+
+name := "template-scala-parallel-ecommercerecommendation"
+
+organization := "io.prediction"
+
+libraryDependencies ++= Seq(
+  "io.prediction"    %% "core"          % pioVersion.value  % "provided",
+  "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
+  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/data/import_eventserver.py b/examples/scala-parallel-ecommercerecommendation/weighted-items/data/import_eventserver.py
new file mode 100644
index 0000000..5391756
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/data/import_eventserver.py
@@ -0,0 +1,84 @@
+"""
+Import sample data for E-Commerce Recommendation Engine Template
+"""
+
+import predictionio
+import argparse
+import random
+
+SEED = 3
+
+def import_events(client):
+  random.seed(SEED)
+  count = 0
+  print client.get_status()
+  print "Importing data..."
+
+  # generate 10 users, with user ids u1,u2,....,u10
+  user_ids = ["u%s" % i for i in range(1, 11)]
+  for user_id in user_ids:
+    print "Set user", user_id
+    client.create_event(
+      event="$set",
+      entity_type="user",
+      entity_id=user_id
+    )
+    count += 1
+
+  # generate 50 items, with item ids i1,i2,....,i50
+  # random assign 1 to 4 categories among c1-c6 to items
+  categories = ["c%s" % i for i in range(1, 7)]
+  item_ids = ["i%s" % i for i in range(1, 51)]
+  for item_id in item_ids:
+    print "Set item", item_id
+    client.create_event(
+      event="$set",
+      entity_type="item",
+      entity_id=item_id,
+      properties={
+        "categories" : random.sample(categories, random.randint(1, 4))
+      }
+    )
+    count += 1
+
+  # each user randomly viewed 10 items
+  for user_id in user_ids:
+    for viewed_item in random.sample(item_ids, 10):
+      print "User", user_id ,"views item", viewed_item
+      client.create_event(
+        event="view",
+        entity_type="user",
+        entity_id=user_id,
+        target_entity_type="item",
+        target_entity_id=viewed_item
+      )
+      count += 1
+      # randomly buy some of the viewed items
+      if random.choice([True, False]):
+        print "User", user_id ,"buys item", viewed_item
+        client.create_event(
+          event="buy",
+          entity_type="user",
+          entity_id=user_id,
+          target_entity_type="item",
+          target_entity_id=viewed_item
+        )
+        count += 1
+
+  print "%s events are imported." % count
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+    description="Import sample data for e-commerce recommendation engine")
+  parser.add_argument('--access_key', default='invalid_access_key')
+  parser.add_argument('--url', default="http://localhost:7070")
+
+  args = parser.parse_args()
+  print args
+
+  client = predictionio.EventClient(
+    access_key=args.access_key,
+    url=args.url,
+    threads=5,
+    qsize=500)
+  import_events(client)
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/data/send_query.py b/examples/scala-parallel-ecommercerecommendation/weighted-items/data/send_query.py
new file mode 100644
index 0000000..b0eb651
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/data/send_query.py
@@ -0,0 +1,7 @@
+"""
+Send sample query to prediction engine
+"""
+
+import predictionio
+engine_client = predictionio.EngineClient(url="http://localhost:8000")
+print engine_client.send_query({"user": "u1", "num": 4})
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/data/set_weights.py b/examples/scala-parallel-ecommercerecommendation/weighted-items/data/set_weights.py
new file mode 100644
index 0000000..f3ae6b4
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/data/set_weights.py
@@ -0,0 +1,39 @@
+"""
+Set weights for E-Commerce Recommendation Engine Template items
+"""
+
+import argparse
+import predictionio
+
+def set_weights(client):
+    client.create_event(
+        event="$set",
+        entity_type="constraint",
+        entity_id="weightedItems",
+        properties={
+            "weights": [
+                {
+                    "items": ["i4", "i14"],
+                    "weight": 1.2
+                },
+                {
+                    "items": ["i11"],
+                    "weight": 1.5
+                }
+            ]
+        }
+    )
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description="Set weights to items")
+    parser.add_argument('--access_key', default='invalid_access_key')
+    parser.add_argument('--url', default="http://localhost:7070")
+
+    args = parser.parse_args()
+
+    client = predictionio.EventClient(
+        access_key=args.access_key,
+        url=args.url,
+        threads=5,
+        qsize=500)
+    set_weights(client)
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/engine.json b/examples/scala-parallel-ecommercerecommendation/weighted-items/engine.json
new file mode 100644
index 0000000..2a11542
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/engine.json
@@ -0,0 +1,24 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.template.ecommercerecommendation.ECommerceRecommendationEngine",
+  "datasource": {
+    "params" : {
+      "appId": 4
+    }
+  },
+  "algorithms": [
+    {
+      "name": "als",
+      "params": {
+        "appId": 4,
+        "unseenOnly": true,
+        "seenEvents": ["buy", "view"],
+        "rank": 10,
+        "numIterations" : 20,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/project/assembly.sbt b/examples/scala-parallel-ecommercerecommendation/weighted-items/project/assembly.sbt
new file mode 100644
index 0000000..54c3252
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/project/assembly.sbt
@@ -0,0 +1 @@
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt b/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt
new file mode 100644
index 0000000..8346a96
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt
@@ -0,0 +1 @@
+addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala
new file mode 100644
index 0000000..019937b
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala
@@ -0,0 +1,465 @@
+package org.template.ecommercerecommendation
+
+import io.prediction.controller.P2LAlgorithm
+import io.prediction.controller.Params
+import io.prediction.data.storage.BiMap
+import io.prediction.data.storage.Event
+import io.prediction.data.storage.Storage
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.recommendation.ALS
+import org.apache.spark.mllib.recommendation.{Rating => MLlibRating}
+
+import grizzled.slf4j.Logger
+
+import scala.collection.mutable.PriorityQueue
+import scala.concurrent.duration._
+import scala.concurrent.ExecutionContext.Implicits.global
+
+case class ALSAlgorithmParams(
+  appId: Int,
+  unseenOnly: Boolean,
+  seenEvents: List[String],
+  rank: Int,
+  numIterations: Int,
+  lambda: Double,
+  seed: Option[Long]
+) extends Params
+
+class ALSModel(
+  val rank: Int,
+  val userFeatures: Map[Int, Array[Double]],
+  val productFeatures: Map[Int, (Item, Option[Array[Double]])],
+  val userStringIntMap: BiMap[String, Int],
+  val itemStringIntMap: BiMap[String, Int]
+) extends Serializable {
+
+  @transient lazy val itemIntStringMap = itemStringIntMap.inverse
+
+  override def toString = {
+    s" rank: ${rank}" +
+    s" userFeatures: [${userFeatures.size}]" +
+    s"(${userFeatures.take(2).toList}...)" +
+    s" productFeatures: [${productFeatures.size}]" +
+    s"(${productFeatures.take(2).toList}...)" +
+    s" userStringIntMap: [${userStringIntMap.size}]" +
+    s"(${userStringIntMap.take(2).toString}...)]" +
+    s" itemStringIntMap: [${itemStringIntMap.size}]" +
+    s"(${itemStringIntMap.take(2).toString}...)]"
+  }
+}
+
+// Item weights are defined according to this structure so that groups of items can be easily changed together
+case class WeightsGroup(
+  items: Set[String],
+  weight: Double
+)
+
+/**
+  * Use ALS to build item x feature matrix
+  */
+class ALSAlgorithm(val ap: ALSAlgorithmParams)
+  extends P2LAlgorithm[PreparedData, ALSModel, Query, PredictedResult] {
+
+  @transient lazy val logger = Logger[this.type]
+  // NOTE: use getLEvents() for local access
+  @transient lazy val lEventsDb = Storage.getLEvents()
+
+  def train(sc: SparkContext, data: PreparedData): ALSModel = {
+    require(!data.viewEvents.take(1).isEmpty,
+      s"viewEvents in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    require(!data.users.take(1).isEmpty,
+      s"users in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    require(!data.items.take(1).isEmpty,
+      s"items in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    // create User and item's String ID to integer index BiMap
+    val userStringIntMap = BiMap.stringInt(data.users.keys)
+    val itemStringIntMap = BiMap.stringInt(data.items.keys)
+
+    val mllibRatings = data.viewEvents
+      .map { r =>
+        // Convert user and item String IDs to Int index for MLlib
+        val uindex = userStringIntMap.getOrElse(r.user, -1)
+        val iindex = itemStringIntMap.getOrElse(r.item, -1)
+
+        if (uindex == -1)
+          logger.info(s"Couldn't convert nonexistent user ID ${r.user}"
+            + " to Int index.")
+
+        if (iindex == -1)
+          logger.info(s"Couldn't convert nonexistent item ID ${r.item}"
+            + " to Int index.")
+
+        ((uindex, iindex), 1)
+      }.filter { case ((u, i), v) =>
+        // keep events with valid user and item index
+        (u != -1) && (i != -1)
+      }
+      .reduceByKey(_ + _) // aggregate all view events of same user-item pair
+      .map { case ((u, i), v) =>
+        // MLlibRating requires integer index for user and item
+        MLlibRating(u, i, v)
+      }.cache()
+
+    // MLLib ALS cannot handle empty training data.
+    require(!mllibRatings.take(1).isEmpty,
+      s"mllibRatings cannot be empty." +
+      " Please check if your events contain valid user and item ID.")
+
+    // seed for MLlib ALS
+    val seed = ap.seed.getOrElse(System.nanoTime)
+
+    val m = ALS.trainImplicit(
+      ratings = mllibRatings,
+      rank = ap.rank,
+      iterations = ap.numIterations,
+      lambda = ap.lambda,
+      blocks = -1,
+      alpha = 1.0,
+      seed = seed)
+
+    val userFeatures = m.userFeatures.collectAsMap.toMap
+
+    // convert ID to Int index
+    val items = data.items.map { case (id, item) =>
+      (itemStringIntMap(id), item)
+    }
+
+    // join item with the trained productFeatures
+    val productFeatures = items.leftOuterJoin(m.productFeatures)
+      .collectAsMap.toMap
+
+    new ALSModel(
+      rank = m.rank,
+      userFeatures = userFeatures,
+      productFeatures = productFeatures,
+      userStringIntMap = userStringIntMap,
+      itemStringIntMap = itemStringIntMap
+    )
+  }
+
+  def predict(model: ALSModel, query: Query): PredictedResult = {
+
+    val userFeatures = model.userFeatures
+    val productFeatures = model.productFeatures
+
+    // convert whiteList's string ID to integer index
+    val whiteList: Option[Set[Int]] = query.whiteList.map( set =>
+      set.map(model.itemStringIntMap.get).flatten
+    )
+
+    val blackList: Set[String] = query.blackList.getOrElse(Set[String]())
+
+    // if unseenOnly is True, get all seen items
+    val seenItems: Set[String] = if (ap.unseenOnly) {
+
+      // get all user item events which are considered as "seen" events
+      val seenEvents: Iterator[Event] = lEventsDb.findSingleEntity(
+        appId = ap.appId,
+        entityType = "user",
+        entityId = query.user,
+        eventNames = Some(ap.seenEvents),
+        targetEntityType = Some(Some("item")),
+        // set time limit to avoid super long DB access
+        timeout = 200.millis
+      ) match {
+        case Right(x) => x
+        case Left(e) => {
+          logger.error(s"Error when read seen events: ${e}")
+          Iterator[Event]()
+        }
+      }
+
+      seenEvents.map { event =>
+        try {
+          event.targetEntityId.get
+        } catch {
+          case e => {
+            logger.error(s"Can't get targetEntityId of event ${event}.")
+            throw e
+          }
+        }
+      }.toSet
+    } else {
+      Set[String]()
+    }
+
+    // get the latest constraint unavailableItems $set event
+    val unavailableItems: Set[String] = lEventsDb.findSingleEntity(
+      appId = ap.appId,
+      entityType = "constraint",
+      entityId = "unavailableItems",
+      eventNames = Some(Seq("$set")),
+      limit = Some(1),
+      latest = true,
+      timeout = 200.millis
+    ) match {
+      case Right(x) => {
+        if (x.hasNext) {
+          x.next.properties.get[Set[String]]("items")
+        } else {
+          Set[String]()
+        }
+      }
+      case Left(e) => {
+        logger.error(s"Error when read set unavailableItems event: ${e}")
+        Set[String]()
+      }
+    }
+
+    // Get the latest constraint weightedItems. This comes in the form of a sequence of WeightsGroup
+    val groupedWeights = lEventsDb.findSingleEntity(
+      appId = ap.appId,
+      entityType = "constraint",
+      entityId = "weightedItems",
+      eventNames = Some(Seq("$set")),
+      limit = Some(1),
+      latest = true,
+      timeout = 200.millis
+    ) match {
+      case Right(x) =>
+        if (x.hasNext)
+          x.next().properties.get[Seq[WeightsGroup]]("weights")
+        else
+          Seq.empty
+      case Left(e) =>
+        logger.error(s"Error when reading set weightedItems event: ${e}")
+        Seq.empty
+    }
+
+    // Transform groupedWeights into a map of index -> weight that we can easily query
+    val weights: Map[Int, Double] = (for {
+      group <- groupedWeights
+      item <- group.items
+      index <- model.itemStringIntMap.get(item)
+    } yield (index, group.weight))
+      .toMap
+      .withDefaultValue(1.0)
+
+    // combine query's blackList,seenItems and unavailableItems
+    // into final blackList.
+    // convert seen Items list from String ID to integer Index
+    val finalBlackList: Set[Int] = (blackList ++ seenItems ++ unavailableItems)
+      .map( x => model.itemStringIntMap.get(x)).flatten
+
+    val userFeature =
+      model.userStringIntMap.get(query.user).map { userIndex =>
+        userFeatures.get(userIndex)
+      }
+      // flatten Option[Option[Array[Double]]] to Option[Array[Double]]
+      .flatten
+
+    val topScores = if (userFeature.isDefined) {
+      // the user has feature vector
+      val uf = userFeature.get
+      val indexScores: Map[Int, Double] =
+        productFeatures.par // convert to parallel collection
+          .filter { case (i, (item, feature)) =>
+            feature.isDefined &&
+            isCandidateItem(
+              i = i,
+              item = item,
+              categories = query.categories,
+              whiteList = whiteList,
+              blackList = finalBlackList
+            )
+          }
+          .map { case (i, (item, feature)) =>
+            // NOTE: feature must be defined, so can call .get
+            val originalScore = dotProduct(uf, feature.get)
+            // Adjusting score according to given item weights
+            val adjustedScore = originalScore * weights(i)
+            (i, adjustedScore)
+          }
+          .filter(_._2 > 0) // only keep items with score > 0
+          .seq // convert back to sequential collection
+
+      val ord = Ordering.by[(Int, Double), Double](_._2).reverse
+      val topScores = getTopN(indexScores, query.num)(ord).toArray
+
+      topScores
+
+    } else {
+      // the user doesn't have feature vector.
+      // For example, new user is created after model is trained.
+      logger.info(s"No userFeature found for user ${query.user}.")
+      predictNewUser(
+        model = model,
+        query = query,
+        whiteList = whiteList,
+        blackList = finalBlackList,
+        weights = weights
+      )
+    }
+
+    val itemScores = topScores.map { case (i, s) =>
+      new ItemScore(
+        // convert item int index back to string ID
+        item = model.itemIntStringMap(i),
+        score = s
+      )
+    }
+
+    new PredictedResult(itemScores)
+  }
+
+  /** Get recently viewed item of the user and return top similar items */
+  private
+  def predictNewUser(
+    model: ALSModel,
+    query: Query,
+    whiteList: Option[Set[Int]],
+    blackList: Set[Int],
+    weights: Map[Int, Double]): Array[(Int, Double)] = {
+
+    val productFeatures = model.productFeatures
+
+    // get latest 10 user view item events
+    val recentEvents = lEventsDb.findSingleEntity(
+      appId = ap.appId,
+      // entityType and entityId is specified for fast lookup
+      entityType = "user",
+      entityId = query.user,
+      eventNames = Some(Seq("view")),
+      targetEntityType = Some(Some("item")),
+      limit = Some(10),
+      latest = true,
+      // set time limit to avoid super long DB access
+      timeout = Duration(200, "millis")
+    ) match {
+      case Right(x) => x
+      case Left(e) => {
+        logger.error(s"Error when read recent events: ${e}")
+        Iterator[Event]()
+      }
+    }
+
+    val recentItems: Set[String] = recentEvents.map { event =>
+      try {
+        event.targetEntityId.get
+      } catch {
+        case e => {
+          logger.error("Can't get targetEntityId of event ${event}.")
+          throw e
+        }
+      }
+    }.toSet
+
+    val recentList: Set[Int] = recentItems.map (x =>
+      model.itemStringIntMap.get(x)).flatten
+
+    val recentFeatures: Vector[Array[Double]] = recentList.toVector
+      // productFeatures may not contain the requested item
+      .map { i =>
+        productFeatures.get(i).map { case (item, f) => f }.flatten
+      }.flatten
+
+    val indexScores: Map[Int, Double] = if (recentFeatures.isEmpty) {
+      logger.info(s"No productFeatures vector for recent items ${recentItems}.")
+      Map[Int, Double]()
+    } else {
+      productFeatures.par // convert to parallel collection
+        .filter { case (i, (item, feature)) =>
+          feature.isDefined &&
+          isCandidateItem(
+            i = i,
+            item = item,
+            categories = query.categories,
+            whiteList = whiteList,
+            blackList = blackList
+          )
+        }
+        .map { case (i, (item, feature)) =>
+          val originalScore = recentFeatures.map { rf =>
+            cosine(rf, feature.get) // feature is defined
+          }.sum
+          // Adjusting score according to given item weights
+          val adjustedScore = originalScore * weights(i)
+          (i, adjustedScore)
+        }
+        .filter(_._2 > 0) // keep items with score > 0
+        .seq // convert back to sequential collection
+    }
+
+    val ord = Ordering.by[(Int, Double), Double](_._2).reverse
+    val topScores = getTopN(indexScores, query.num)(ord).toArray
+
+    topScores
+  }
+
+  private
+  def getTopN[T](s: Iterable[T], n: Int)(implicit ord: Ordering[T]): Seq[T] = {
+
+    val q = PriorityQueue()
+
+    for (x <- s) {
+      if (q.size < n)
+        q.enqueue(x)
+      else {
+        // q is full
+        if (ord.compare(x, q.head) < 0) {
+          q.dequeue()
+          q.enqueue(x)
+        }
+      }
+    }
+
+    q.dequeueAll.toSeq.reverse
+  }
+
+  private
+  def dotProduct(v1: Array[Double], v2: Array[Double]): Double = {
+    val size = v1.length
+    var i = 0
+    var d: Double = 0
+    while (i < size) {
+      d += v1(i) * v2(i)
+      i += 1
+    }
+    d
+  }
+
+  private
+  def cosine(v1: Array[Double], v2: Array[Double]): Double = {
+    val size = v1.length
+    var i = 0
+    var n1: Double = 0
+    var n2: Double = 0
+    var d: Double = 0
+    while (i < size) {
+      n1 += v1(i) * v1(i)
+      n2 += v2(i) * v2(i)
+      d += v1(i) * v2(i)
+      i += 1
+    }
+    val n1n2 = math.sqrt(n1) * math.sqrt(n2)
+    if (n1n2 == 0) 0 else d / n1n2
+  }
+
+  private
+  def isCandidateItem(
+    i: Int,
+    item: Item,
+    categories: Option[Set[String]],
+    whiteList: Option[Set[Int]],
+    blackList: Set[Int]
+  ): Boolean = {
+    // can add other custom filtering here
+    whiteList.map(_.contains(i)).getOrElse(true) &&
+    !blackList.contains(i) &&
+    // filter categories
+    categories.map { cat =>
+      item.categories.exists { itemCat =>
+        // keep this item if its categories overlap with the query categories
+        itemCat.toSet.intersect(cat).nonEmpty
+      } // discard this item if it has no categories
+    }.getOrElse(true)
+  }
+}
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala
new file mode 100644
index 0000000..c102b72
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala
@@ -0,0 +1,114 @@
+package org.template.ecommercerecommendation
+
+import io.prediction.controller.PDataSource
+import io.prediction.controller.EmptyEvaluationInfo
+import io.prediction.controller.EmptyActualResult
+import io.prediction.controller.Params
+import io.prediction.data.storage.Event
+import io.prediction.data.storage.Storage
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+import grizzled.slf4j.Logger
+
+case class DataSourceParams(appId: Int) extends Params
+
+class DataSource(val dsp: DataSourceParams)
+  extends PDataSource[TrainingData,
+      EmptyEvaluationInfo, Query, EmptyActualResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  override
+  def readTraining(sc: SparkContext): TrainingData = {
+    val eventsDb = Storage.getPEvents()
+
+    // create a RDD of (entityID, User)
+    val usersRDD: RDD[(String, User)] = eventsDb.aggregateProperties(
+      appId = dsp.appId,
+      entityType = "user"
+    )(sc).map { case (entityId, properties) =>
+      val user = try {
+        User()
+      } catch {
+        case e: Exception => {
+          logger.error(s"Failed to get properties ${properties} of" +
+            s" user ${entityId}. Exception: ${e}.")
+          throw e
+        }
+      }
+      (entityId, user)
+    }.cache()
+
+    // create a RDD of (entityID, Item)
+    val itemsRDD: RDD[(String, Item)] = eventsDb.aggregateProperties(
+      appId = dsp.appId,
+      entityType = "item"
+    )(sc).map { case (entityId, properties) =>
+      val item = try {
+        // Assume categories is optional property of item.
+        Item(categories = properties.getOpt[List[String]]("categories"))
+      } catch {
+        case e: Exception => {
+          logger.error(s"Failed to get properties ${properties} of" +
+            s" item ${entityId}. Exception: ${e}.")
+          throw e
+        }
+      }
+      (entityId, item)
+    }.cache()
+
+    // get all "user" "view" "item" events
+    val viewEventsRDD: RDD[ViewEvent] = eventsDb.find(
+      appId = dsp.appId,
+      entityType = Some("user"),
+      eventNames = Some(List("view")),
+      // targetEntityType is optional field of an event.
+      targetEntityType = Some(Some("item")))(sc)
+      // eventsDb.find() returns RDD[Event]
+      .map { event =>
+        val viewEvent = try {
+          event.event match {
+            case "view" => ViewEvent(
+              user = event.entityId,
+              item = event.targetEntityId.get,
+              t = event.eventTime.getMillis)
+            case _ => throw new Exception(s"Unexpected event ${event} is read.")
+          }
+        } catch {
+          case e: Exception => {
+            logger.error(s"Cannot convert ${event} to ViewEvent." +
+              s" Exception: ${e}.")
+            throw e
+          }
+        }
+        viewEvent
+      }.cache()
+
+    new TrainingData(
+      users = usersRDD,
+      items = itemsRDD,
+      viewEvents = viewEventsRDD
+    )
+  }
+}
+
+case class User()
+
+case class Item(categories: Option[List[String]])
+
+case class ViewEvent(user: String, item: String, t: Long)
+
+class TrainingData(
+  val users: RDD[(String, User)],
+  val items: RDD[(String, Item)],
+  val viewEvents: RDD[ViewEvent]
+) extends Serializable {
+  override def toString = {
+    s"users: [${users.count()} (${users.take(2).toList}...)]" +
+    s"items: [${items.count()} (${items.take(2).toList}...)]" +
+    s"viewEvents: [${viewEvents.count()}] (${viewEvents.take(2).toList}...)"
+  }
+}
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
new file mode 100644
index 0000000..42ec4d4
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
@@ -0,0 +1,31 @@
+package org.template.ecommercerecommendation
+
+import io.prediction.controller.IEngineFactory
+import io.prediction.controller.Engine
+
+case class Query(
+  user: String,
+  num: Int,
+  categories: Option[Set[String]],
+  whiteList: Option[Set[String]],
+  blackList: Option[Set[String]]
+) extends Serializable
+
+case class PredictedResult(
+  itemScores: Array[ItemScore]
+) extends Serializable
+
+case class ItemScore(
+  item: String,
+  score: Double
+) extends Serializable
+
+object ECommerceRecommendationEngine extends IEngineFactory {
+  def apply() = {
+    new Engine(
+      classOf[DataSource],
+      classOf[Preparator],
+      Map("als" -> classOf[ALSAlgorithm]),
+      classOf[Serving])
+  }
+}
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala
new file mode 100644
index 0000000..4dd45cf
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala
@@ -0,0 +1,24 @@
+package org.template.ecommercerecommendation
+
+import io.prediction.controller.PPreparator
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+class Preparator
+  extends PPreparator[TrainingData, PreparedData] {
+
+  def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
+    new PreparedData(
+      users = trainingData.users,
+      items = trainingData.items,
+      viewEvents = trainingData.viewEvents)
+  }
+}
+
+class PreparedData(
+  val users: RDD[(String, User)],
+  val items: RDD[(String, Item)],
+  val viewEvents: RDD[ViewEvent]
+) extends Serializable
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala
new file mode 100644
index 0000000..21cf2df
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala
@@ -0,0 +1,13 @@
+package org.template.ecommercerecommendation
+
+import io.prediction.controller.LServing
+
+class Serving
+  extends LServing[Query, PredictedResult] {
+
+  override
+  def serve(query: Query,
+    predictedResults: Seq[PredictedResult]): PredictedResult = {
+    predictedResults.head
+  }
+}
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/template.json b/examples/scala-parallel-ecommercerecommendation/weighted-items/template.json
new file mode 100644
index 0000000..932e603
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/template.json
@@ -0,0 +1 @@
+{"pio": {"version": { "min": "0.9.0" }}}
diff --git a/examples/scala-parallel-recommendation/custom-query/.gitignore b/examples/scala-parallel-recommendation/custom-query/.gitignore
new file mode 100644
index 0000000..295cafd
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/.gitignore
@@ -0,0 +1,4 @@
+data/sample_movielens_data.txt
+manifest.json
+target/
+pio.log
diff --git a/examples/scala-parallel-recommendation/custom-query/README.md b/examples/scala-parallel-recommendation/custom-query/README.md
new file mode 100644
index 0000000..39e12de
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/README.md
@@ -0,0 +1,144 @@
+# Filtering result set on custom item field (Recommendation)
+
+## Import data to pio engine
+
+By default the recommendation template reads the rate and buy user events and the user itself. You can modify the default DataSource to read your custom item with specified list of properties.
+
+First off all you have to import your events to the pio event server.
+
+You can use ImportDataScript.scala to import users, movies and rate events from [movielenses database](http://grouplens.org/datasets/movielens/). 
+Make sure that data files are in `UTF-8` encoding.
+
+This command line tool accepts 2 args:
+
+ 1. app access key and it is mandatory
+ 2. pio engine url. default is `http://localhost:7070`
+ 
+For example in the sbt console: `> runMain org.template.recommendation.ImportDataScript.scala <access_key>`
+
+## Modify the engine.
+
+This example is based on v0.1.1 of [scala parallel recommendation template](https://github.com/PredictionIO/template-scala-parallel-recommendation/)
+
+In this example we modify DataSource to read custom event with one property.
+
+* Modify the Query data structure with your needs, at this example we added item creation year as a query/filter param:
+
+`case class Query(user: String, num: Int, creationYear: Option[Int] = None)`
+
+* Define the Item case class which will describe your data:
+
+`case class Item(creationYear: Option[Int])`
+
+* Define the utility unmarshaller to read the list of properties to your structure:
+
+```
+object ItemMarshaller {
+ def unmarshall(properties: DataMap): Option[Item] =
+   Some(Item(properties.getOpt[Int]("creationYear")))
+}
+```
+
+* Modify the TrainingData class to hold your custom items:
+
+`class TrainingData(val ratings: RDD[Rating], val items: RDD[(String, Item)])`
+ 
+* Modify the readTraining method in the DataSource to read your custom item:
+
+```
+val itemsRDD = eventsDb.aggregateProperties(
+    appId = dsp.appId,
+    entityType = "item"
+  )(sc).flatMap { case (entityId, properties) ⇒
+    ItemMarshaller.unmarshall(properties).map(entityId → _)
+  }
+```
+
+* Modify the ASLModel to hold your custom items to have possibility filter them.
+
+```
+class ALSModel(
+ val productFeatures: RDD[(Int, Array[Double])],
+ val itemStringIntMap: BiMap[String, Int],
+ val items: Map[Int, Item])
+```
+
+
+* Modify train method in ALSAlgorithm to match you items with numeric ids that are needed by the algo.
+
+```
+val items: Map[Int, Item] = data.items.map { case (id, item) ⇒
+   (itemStringIntMap(id), item)
+}.collectAsMap.toMap
+```
+
+* Define the filterItems method in ALSAlgorithm to filter the predicted result set according the query.
+
+```
+private def filterItems(selectedScores: Array[(Int, Double)],
+                       items: Map[Int, Item],
+                       query: Query) =
+ selectedScores.view.filter { case (iId, _) ⇒
+   items(iId).creationYear.map(icr ⇒ query.creationYear.forall(icr >= _))
+     .getOrElse(true)
+ }
+```
+
+* Modify the predict method in the ALSAlgorithm to filter predicted result set:
+
+`val filteredScores = filterItems(indexScores, model.items, query)`
+
+* And the last step could be to modify the serving to sort recommended items by the year of movie creation(our custom property) as Hagay Gazuli mentioned in the [google group](https://groups.google.com/forum/#!searchin/predictionio-user/created$20%7Csort:date/predictionio-user/LEHxuc0Bu_0/W9RkAApvivsJ).
+
+```
+class Serving extends LServxing[Query, PredictedResult] {
+  override def serve(query: Query,
+            predictedResults: Seq[PredictedResult]): PredictedResult =
+    predictedResults.headOption.map { result ⇒
+      val preparedItems = result.itemScores
+        .sortBy { case ItemScore(item, score, year) ⇒ year }(
+          Ordering.Option[Int].reverse)
+      new PredictedResult(preparedItems)
+    }.getOrElse(new PredictedResult(Array.empty[ItemScore]))
+}
+```
+
+* Now you can filter your recommendation by items that were made after some certain year:
+
+```> curl -H 'Content-Type: application/json' '127.0.0.1:8000/queries.json' -d '{"user":100, "num":5, "creationYear":1990}' | python -m json.tool```
+
+Where result of curl is piped to python json.tool lib just for convenience to pretty print the response from engine:
+```
+    "itemScores": [
+        {
+            "creationYear": 1996,
+            "item": "831",
+            "score": 518.9319563470217
+        },
+        {
+            "creationYear": 1996,
+            "item": "1619",
+            "score": 15.321792791296401
+        },
+        {
+            "creationYear": 1994,
+            "item": "1554",
+            "score": 628.1994336041231
+        },
+        {
+            "creationYear": 1993,
+            "item": "736",
+            "score": 419.3508956666954
+        },
+        {
+            "creationYear": 1991,
+            "item": "627",
+            "score": 498.28818189885175
+        }
+    ]
+}
+```
+
+That's it! Now your recommendation engine is using filtering on custom item field on predicted result set.
+
+
diff --git a/examples/scala-parallel-recommendation/custom-query/build.sbt b/examples/scala-parallel-recommendation/custom-query/build.sbt
new file mode 100644
index 0000000..5ba1880
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/build.sbt
@@ -0,0 +1,14 @@
+import AssemblyKeys._
+
+assemblySettings
+
+name := "template-scala-parallel-recommendation-custom-query"
+
+organization := "io.prediction"
+
+def provided  (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
+
+libraryDependencies ++= provided(
+  "io.prediction"    %% "core"          % "0.8.6",
+  "org.apache.spark" %% "spark-core"    % "1.2.0",
+  "org.apache.spark" %% "spark-mllib"   % "1.2.0")
diff --git a/examples/scala-parallel-recommendation/custom-query/data/build.sbt b/examples/scala-parallel-recommendation/custom-query/data/build.sbt
new file mode 100644
index 0000000..a4b18c9
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/data/build.sbt
@@ -0,0 +1,8 @@
+name := "import-movielenses"
+
+organization := "org.template.recommendation"
+
+def provided  (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
+
+libraryDependencies ++= provided(
+  "io.prediction" % "client" % "0.8.3" withSources() withJavadoc())
diff --git a/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala b/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala
new file mode 100644
index 0000000..04abc2d
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala
@@ -0,0 +1,94 @@
+package org.template.recommendation
+
+import io.prediction.{Event, EventClient
+}
+import scala.collection.JavaConverters._
+
+import scala.io.Source
+
+/**
+ * @author Maxim Korolyov.
+ */
+object ImportDataScript extends App {
+
+  /**
+   * Imports users, movies and rate events if movielens database to pio.
+   * first arg is mandatory - it is app access key
+   * second arg is optional - it is pio engine url (default is `localhost:7070`)
+   * @param args
+   */
+  override def main(args: Array[String]): Unit = {
+    val accessKey = if (args.length == 0) {
+      throw new IllegalArgumentException("access key should be passed")
+    } else args(0)
+
+    val engineUrl = if (args.length > 1) args(1) else "http://localhost:7070"
+    implicit val client = new EventClient(accessKey, engineUrl)
+    println(s"imported ${importMovies.size} movies")
+    println(s"imported ${importUsers.size} users")
+    println(s"imported ${importRateEvents.size} events")
+  }
+
+  /**
+   * imports events to the pio server.
+   * @return the events id list.
+   */
+  def importRateEvents(implicit client: EventClient): Iterator[_] =
+    readCSV("data/u.data", "\t").flatMap { event =>
+      val eventObj = event.lift
+      (for {
+        entityId ← eventObj(0)
+        targetEntityId ← eventObj(1)
+        rating ← eventObj(2)
+      } yield new Event()
+          .event("rate")
+          .entityId(entityId)
+          .entityType("user")
+          .properties(javaMap("rating" → new java.lang.Double(rating)))
+          .targetEntityId(targetEntityId)
+          .targetEntityType("movie")
+        ).map(client.createEvent)
+    }
+
+  def importUsers(implicit ec: EventClient): Iterator[_] =
+    readCSV("data/u.user").flatMap { user ⇒
+      val userObj = user.lift
+      for {
+        uId ← userObj(0)
+        age ← userObj(1)
+      } yield ec.setUser(uId, javaMap("age" → age))
+    }
+
+  /**
+   * imports movies to pio server
+   * @return the number if movies where imported
+   */
+  def importMovies(implicit client: EventClient): Iterator[Unit] = {
+    readCSV("data/u.item").map { movie ⇒
+      val movieObj = movie.lift
+      val releaseYearOpt = movieObj(2)
+        .flatMap(_.split("-").lift(2).map(_.toInt))
+        .map(releaseYear ⇒ "creationYear" → new Integer(releaseYear))
+      for {
+        id ← movieObj(0)
+        title ← movieObj(1).map(t ⇒ "title" → t)
+        releaseYear ← releaseYearOpt
+      } yield client.setItem(id, javaMap(title, releaseYear))
+    }
+  }
+
+  private def javaMap(pair: (String, AnyRef)*) = Map(pair: _*).asJava
+
+  /**
+   * reads csv file into list of string arrays each of them represents splitted
+   * with specified delimeter line
+   * @param filename path to csv file
+   * @param delimiter delimiter of the properties in the file
+   * @return the list of string arrays made from every file line
+   */
+  private def readCSV(filename: String,
+                      delimiter: String = "\\|") =
+    Source.fromFile(filename, "UTF-8")
+      .getLines()
+      .map(_.split(delimiter).toVector)
+}
diff --git a/examples/scala-parallel-recommendation/custom-query/engine.json b/examples/scala-parallel-recommendation/custom-query/engine.json
new file mode 100644
index 0000000..8f3ff21
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/engine.json
@@ -0,0 +1,21 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.template.recommendation.RecommendationEngine",
+  "datasource": {
+    "params" : {
+      "appId": 5
+    }
+  },
+  "algorithms": [
+    {
+      "name": "als",
+      "params": {
+        "rank": 10,
+        "numIterations": 20,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}
diff --git a/examples/scala-parallel-recommendation/custom-query/project/assembly.sbt b/examples/scala-parallel-recommendation/custom-query/project/assembly.sbt
new file mode 100644
index 0000000..54c3252
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/project/assembly.sbt
@@ -0,0 +1 @@
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala
new file mode 100644
index 0000000..3a2e903
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala
@@ -0,0 +1,150 @@
+package org.template.recommendation
+
+import io.prediction.controller.PAlgorithm
+import io.prediction.controller.Params
+import io.prediction.data.storage.BiMap
+
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.recommendation.ALS
+import org.apache.spark.mllib.recommendation.{Rating => MLlibRating}
+
+import grizzled.slf4j.Logger
+
+case class ALSAlgorithmParams(rank: Int, numIterations: Int, lambda: Double,
+                              seed: Option[Long]) extends Params
+
+/**
+ * Use ALS to build item x feature matrix
+ */
+class ALSAlgorithm(val ap: ALSAlgorithmParams)
+  extends PAlgorithm[PreparedData, ALSModel, Query, PredictedResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  def train(data: PreparedData): ALSModel = {
+    require(!data.ratings.take(1).isEmpty,
+      s"viewEvents in PreparedData cannot be empty." +
+        " Please check if DataSource generates TrainingData" +
+        " and Preprator generates PreparedData correctly.")
+    require(!data.items.take(1).isEmpty,
+      s"items in PreparedData cannot be empty." +
+        " Please check if DataSource generates TrainingData" +
+        " and Preprator generates PreparedData correctly.")
+    // create item's String ID to integer index BiMap
+    val itemStringIntMap = BiMap.stringInt(data.items.keys)
+    val userStringIntMap = BiMap.stringInt(data.ratings.map(_.user))
+
+    // HOWTO: collect Item as Map and convert ID to Int index
+    val items: Map[Int, Item] = data.items.map { case (id, item) ⇒
+      (itemStringIntMap(id), item)
+    }.collectAsMap.toMap
+
+    val mllibRatings = data.ratings.map { r =>
+      // Convert user and item String IDs to Int index for MLlib
+      val iindex = itemStringIntMap.getOrElse(r.item, -1)
+      val uindex = userStringIntMap.getOrElse(r.user, -1)
+
+      if (iindex == -1)
+        logger.info(s"Couldn't convert nonexistent item ID ${r.item}"
+          + " to Int index.")
+
+      (uindex -> iindex) -> 1
+    }.filter { case ((u, i), v) => (i != -1) && (u != -1) }
+    .reduceByKey(_ + _) // aggregate all view events of same item
+    .map { case ((u, i), v) =>  MLlibRating(u, i, v) }
+
+    // MLLib ALS cannot handle empty training data.
+    require(!mllibRatings.take(1).isEmpty,
+      s"mllibRatings cannot be empty." +
+        " Please check if your events contain valid user and item ID.")
+
+    // seed for MLlib ALS
+    val seed = ap.seed.getOrElse(System.nanoTime)
+
+    val m = ALS.trainImplicit(
+      ratings = mllibRatings,
+      rank = ap.rank,
+      iterations = ap.numIterations,
+      lambda = ap.lambda,
+      blocks = -1,
+      alpha = 1.0,
+      seed = seed)
+
+    new ALSModel(productFeatures = m.productFeatures,
+      itemStringIntMap = itemStringIntMap, items = items)
+  }
+
+  def predict(model: ALSModel, query: Query): PredictedResult = {
+    val queryFeatures =
+      model.items.keys.flatMap(model.productFeatures.lookup(_).headOption)
+
+    val indexScores = if (queryFeatures.isEmpty) {
+      logger.info(s"No productFeatures found for query ${query}.")
+      Array[(Int, Double)]()
+    } else {
+      model.productFeatures.mapValues { f ⇒
+        queryFeatures.map(cosine(_, f)).reduce(_ + _)
+      }.filter(_._2 > 0) // keep items with score > 0
+       .collect()
+    }
+
+    // HOWTO: filter predicted results by query.
+    val filteredScores = filterItems(indexScores, model.items, query)
+
+    implicit val ord = Ordering.by[(Int, Double), Double](_._2)
+    val topScores = getTopN(filteredScores, query.num).toArray
+
+    val itemScores = topScores.map { case (i, s) ⇒
+      new ItemScore(item = model.itemIntStringMap(i), score = s,
+        creationYear = model.items(i).creationYear)
+    }
+
+    new PredictedResult(itemScores)
+  }
+
+  private def getTopN[T](s: Seq[T], n: Int)
+                        (implicit ord: Ordering[T]): Iterable[T] = {
+
+    var result = List.empty[T]
+
+    for (x <- s) {
+      if (result.size < n)
+        result = x :: result
+      else {
+        val min = result.min
+        if (ord.compare(x, min) < 0) {
+          result = x :: result.filter(_ != min)
+        }
+      }
+    }
+
+    result.sorted.reverse
+  }
+
+  private def cosine(v1: Array[Double], v2: Array[Double]): Double = {
+    val size = v1.size
+    var i = 0
+    var n1: Double = 0
+    var n2: Double = 0
+    var d: Double = 0
+    while (i < size) {
+      n1 += v1(i) * v1(i)
+      n2 += v2(i) * v2(i)
+      d += v1(i) * v2(i)
+      i += 1
+    }
+    val n1n2 = (math.sqrt(n1) * math.sqrt(n2))
+    if (n1n2 == 0) 0 else (d / n1n2)
+  }
+
+  // HOWTO: actual filter of predicted movie results.
+  // filter selects all movies
+  // that were made after the year specified in the query
+  private def filterItems(selectedScores: Array[(Int, Double)],
+                          items: Map[Int, Item],
+                          query: Query) =
+    selectedScores.view.filter { case (iId, _) ⇒
+      items(iId).creationYear.map(icr ⇒ query.creationYear.forall(icr >= _))
+        .getOrElse(true)
+    }
+}
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala
new file mode 100644
index 0000000..8e6201a
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala
@@ -0,0 +1,50 @@
+package org.template.recommendation
+
+import io.prediction.controller.IPersistentModel
+import io.prediction.controller.IPersistentModelLoader
+import io.prediction.data.storage.BiMap
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+class ALSModel(
+  val productFeatures: RDD[(Int, Array[Double])],
+  val itemStringIntMap: BiMap[String, Int],
+  // HOWTO: added a map of `generatedItemIntId -> Item` to the algo data model.
+  val items: Map[Int, Item])
+  extends IPersistentModel[ALSAlgorithmParams] with Serializable {
+
+  @transient lazy val itemIntStringMap = itemStringIntMap.inverse
+
+  def save(id: String, params: ALSAlgorithmParams,
+           sc: SparkContext): Boolean = {
+
+    productFeatures.saveAsObjectFile(s"/tmp/${id}/productFeatures")
+    sc.parallelize(Seq(itemStringIntMap))
+      .saveAsObjectFile(s"/tmp/${id}/itemStringIntMap")
+    // HOWTO: save items too as part of algo model
+    sc.parallelize(Seq(items))
+      .saveAsObjectFile(s"/tmp/${id}/items")
+    true
+  }
+
+  override def toString = {
+    s" productFeatures: [${productFeatures.count()}]" +
+      s"(${productFeatures.take(2).toList}...)" +
+      s" itemStringIntMap: [${itemStringIntMap.size}]" +
+      s"(${itemStringIntMap.take(2).toString}...)]" +
+      s" items: [${items.size}]" +
+      s"(${items.take(2).toString}...)]"
+  }
+}
+
+object ALSModel extends IPersistentModelLoader[ALSAlgorithmParams, ALSModel] {
+  def apply(id: String, params: ALSAlgorithmParams, sc: Option[SparkContext]) =
+    new ALSModel(
+      productFeatures = sc.get.objectFile(s"/tmp/${id}/productFeatures"),
+      itemStringIntMap = sc.get
+        .objectFile[BiMap[String, Int]](s"/tmp/${id}/itemStringIntMap").first,
+    // HOWTO: read items too as part of algo model
+      items = sc.get
+        .objectFile[Map[Int, Item]](s"/tmp/${id}/items").first)
+}
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala
new file mode 100644
index 0000000..0221e25
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala
@@ -0,0 +1,82 @@
+package org.template.recommendation
+
+import io.prediction.controller.PDataSource
+import io.prediction.controller.EmptyEvaluationInfo
+import io.prediction.controller.EmptyActualResult
+import io.prediction.controller.Params
+import io.prediction.data.storage.{DataMap, Event, Storage}
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import grizzled.slf4j.Logger
+
+case class DataSourceParams(appId: Int) extends Params
+
+case class Item(creationYear: Option[Int])
+object Item {
+  object Fields {
+    val CreationYear = "creationYear"
+  }
+}
+
+class DataSource(val dsp: DataSourceParams)
+  extends PDataSource[TrainingData,
+      EmptyEvaluationInfo, Query, EmptyActualResult] {
+
+  @transient lazy val logger = Logger[this.type]
+  private lazy val EntityType = "movie"
+
+  override
+  def readTraining(sc: SparkContext): TrainingData = {
+    val eventsDb = Storage.getPEvents()
+
+    // create a RDD of (entityID, Item)
+    // HOWTO: collecting items(movies)
+    val itemsRDD = eventsDb.aggregateProperties(
+      appId = dsp.appId,
+      entityType = "item"
+    )(sc).flatMap { case (entityId, properties) ⇒
+      ItemMarshaller.unmarshall(properties).map(entityId → _)
+    }
+
+    // get all user rate events
+    val rateEventsRDD: RDD[Event] = eventsDb.find(
+      appId = dsp.appId,
+      entityType = Some("user"),
+      eventNames = Some(List("rate")), // read "rate"
+      // targetEntityType is optional field of an event.
+      targetEntityType = Some(Some(EntityType)))(sc)
+
+    // collect ratings
+    val ratingsRDD = rateEventsRDD.flatMap { event ⇒
+      try {
+        (event.event match {
+          case "rate" => event.properties.getOpt[Double]("rating")
+          case _ ⇒ None
+        }).map(Rating(event.entityId, event.targetEntityId.get, _))
+      } catch { case e: Exception ⇒
+        logger.error(s"Cannot convert ${event} to Rating. Exception: ${e}.")
+        throw e
+      }
+    }.cache()
+
+    new TrainingData(ratingsRDD, itemsRDD)
+  }
+}
+
+object ItemMarshaller {
+  // HOWTO: implemented unmarshaller to collect properties for filtering.
+  def unmarshall(properties: DataMap): Option[Item] =
+    Some(Item(properties.getOpt[Int](Item.Fields.CreationYear)))
+}
+
+case class Rating(user: String, item: String, rating: Double)
+
+class TrainingData(val ratings: RDD[Rating], val items: RDD[(String, Item)])
+  extends Serializable {
+
+  override def toString =
+    s"ratings: [${ratings.count()}] (${ratings.take(2).toList}...)" +
+    s"items: [${items.count()} (${items.take(2).toList}...)]"
+}
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
new file mode 100644
index 0000000..d1f6e88
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
@@ -0,0 +1,21 @@
+package org.template.recommendation
+
+import io.prediction.controller.IEngineFactory
+import io.prediction.controller.Engine
+
+case class Query(user: String, num: Int, creationYear: Option[Int] = None)
+  extends Serializable
+
+case class PredictedResult(itemScores: Array[ItemScore]) extends Serializable
+
+// HOWTO: added movie creation year to predicted result.
+case class ItemScore(item: String, score: Double, creationYear: Option[Int])
+  extends Serializable
+
+object RecommendationEngine extends IEngineFactory {
+  def apply() =
+    new Engine(classOf[DataSource],
+      classOf[Preparator],
+      Map("als" → classOf[ALSAlgorithm]),
+      classOf[Serving])
+}
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala
new file mode 100644
index 0000000..02a9ce5
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala
@@ -0,0 +1,16 @@
+package org.template.recommendation
+
+import io.prediction.controller.PPreparator
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+class Preparator extends PPreparator[TrainingData, PreparedData] {
+  def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData =
+    new PreparedData(ratings = trainingData.ratings, items = trainingData.items)
+}
+
+// HOWTO: added items(movies) list to prepared data to have possiblity to sort
+// them in predict stage.
+class PreparedData(val ratings: RDD[Rating], val items: RDD[(String, Item)])
+  extends Serializable
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala
new file mode 100644
index 0000000..6b3df76
--- /dev/null
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala
@@ -0,0 +1,15 @@
+package org.template.recommendation
+
+import io.prediction.controller.LServing
+
+class Serving extends LServing[Query, PredictedResult] {
+
+  override def serve(query: Query,
+            predictedResults: Seq[PredictedResult]): PredictedResult =
+    predictedResults.headOption.map { result ⇒
+      val preparedItems = result.itemScores
+        .sortBy { case ItemScore(item, score, year) ⇒ year }(
+          Ordering.Option[Int].reverse)
+      new PredictedResult(preparedItems)
+    }.getOrElse(new PredictedResult(Array.empty[ItemScore]))
+}
diff --git a/examples/scala-parallel-similarproduct/recommended-user/.gitignore b/examples/scala-parallel-similarproduct/recommended-user/.gitignore
new file mode 100644
index 0000000..8241178
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/.gitignore
@@ -0,0 +1 @@
+/pio.sbt
diff --git a/examples/scala-parallel-similarproduct/recommended-user/README.md b/examples/scala-parallel-similarproduct/recommended-user/README.md
new file mode 100644
index 0000000..97aa3b0
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/README.md
@@ -0,0 +1,215 @@
+# Recommended User Template
+
+This example is based on version v0.1.3 of the Similar Product Engine Template. The Similar Product Engine Template has been customized to recommend users instead of items.
+
+The main difference from the original template is the following:
+
+Instead of using user-to-item events to find similar items, user-to-user events are used to find similar users you may also follow, like, etc (depending on which events are used in training and how the events are used). By default, "follow" events are used.
+
+## Overview
+
+This engine template recommends users that are "similar" to other users.
+Similarity is not defined by the user's attributes but by the user's previous actions. By default, it uses the 'follow' action such that user A and B are considered similar if most users who follows A also follows B.
+
+This template is ideal for recommending users to other users based on their recent actions.
+Use the IDs of the recently viewed users of a customer as the *Query*,
+the engine will predict other users that this customer may also follow or like.
+
+This approach works perfectly for customers who are **first-time visitors** or have not signed in.
+Recommendations are made dynamically in *real-time* based on the most recent user preference you provide in the *Query*.
+You can, therefore, recommend users to visitors without knowing a long history about them.
+
+One can also use this template to build the popular feature of "people you may also follow, like, etc** quickly by provide similar users to what you have just viewed or followed.
+
+
+## Highlights of the modification from original Similar Product Template
+
+### Engine.scala
+
+- change the Query and and Predicted Result to from "items" to "users" and "similarUsers". The "categories" field is removed:
+
+```scala
+case class Query(
+  users: List[String], // MODIFED
+  num: Int,
+  whiteList: Option[Set[String]],
+  blackList: Option[Set[String]]
+) extends Serializable
+
+case class PredictedResult(
+  similarUserScores: Array[similarUserScore] // MODIFED
+) extends Serializable
+
+case class similarUserScore(
+  user: String, // MODIFED
+  score: Double
+) extends Serializable
+```
+
+### DataSource.scala
+
+- Since user-to-user events will be used, itemsRDD is not needed and removed.
+- change from ViewEvent to FollowEvent in Training Data
+- change to read "folow" events
+
+```scala
+    val followEventsRDD: RDD[FollowEvent] = eventsDb.find(
+      appId = dsp.appId,
+      entityType = Some("user"),
+      eventNames = Some(List("follow")),
+      // targetEntityType is optional field of an event.
+      targetEntityType = Some(Some("user")))(sc)
+      // eventsDb.find() returns RDD[Event]
+      .map { event =>
+        val followEvent = try {
+          event.event match {
+            case "follow" => FollowEvent(
+              user = event.entityId,
+              followedUser = event.targetEntityId.get,
+              t = event.eventTime.getMillis)
+            case _ => throw new Exception(s"Unexpected event $event is read.")
+          }
+        } catch {
+          case e: Exception => {
+            logger.error(s"Cannot convert $event to FollowEvent." +
+              s" Exception: $e.")
+            throw e
+          }
+        }
+        followEvent
+      }.cache()
+```
+
+### Preparator.scala
+
+Change to pass the followEvents to Prepared Data.
+
+### Algorithm.scala
+
+Use Spark MLlib algorithm to train the productFeature vector by treating the followed user as the "product".
+
+Modify train to use "followEvents" to create MLlibRating object and remove the code that aggregate number of views:
+
+```scala
+    val mllibRatings = data.followEvents
+      .map { r =>
+        // Convert user and user String IDs to Int index for MLlib
+        val uindex = userStringIntMap.getOrElse(r.user, -1)
+        val iindex = similarUserStringIntMap.getOrElse(r.followedUser, -1)
+
+        if (uindex == -1)
+          logger.info(s"Couldn't convert nonexistent user ID ${r.user}"
+            + " to Int index.")
+
+        if (iindex == -1)
+          logger.info(s"Couldn't convert nonexistent followedUser ID ${r.followedUser}"
+            + " to Int index.")
+
+        ((uindex, iindex), 1)
+      }.filter { case ((u, i), v) =>
+        // keep events with valid user and user index
+        (u != -1) && (i != -1)
+      }
+      //.reduceByKey(_ + _) // aggregate all view events of same user-item pair // NOTE: REMOVED!!
+      .map { case ((u, i), v) =>
+        // MLlibRating requires integer index for user and user
+        MLlibRating(u, i, v)
+      }
+      .cache()
+```
+
+The ALSModel and predict() function is also changed accordingly.
+
+
+## Usage
+
+### Event Data Requirements
+
+By default, this template takes the following data from Event Server as Training Data:
+
+- User *$set* events
+- User *follow* User events
+
+### Input Query
+
+- List of UserIDs, which are the targeted users
+- N (number of users to be recommended)
+- List of white-listed UserIds (optional)
+- List of black-listed UserIds (optional)
+
+The template also supports black-list and white-list. If a white-list is provided, the engine will include only those users in its recommendation.
+Likewise, if a black-list is provided, the engine will exclude those users in its recommendation.
+
+## Documentation
+
+May refer to http://docs.prediction.io/templates/similarproduct/quickstart/ with difference mentioned above.
+
+## Development Notes
+
+### import sample data
+
+```
+$ python data/import_eventserver.py --access_key <your_access_key>
+```
+
+### sample queries
+
+normal:
+
+```
+curl -H "Content-Type: application/json" \
+-d '{ "users": ["u1", "u3", "u10", "u2", "u5", "u31", "u9"], "num": 10}' \
+http://localhost:8000/queries.json \
+-w %{time_connect}:%{time_starttransfer}:%{time_total}
+```
+
+```
+curl -H "Content-Type: application/json" \
+-d '{
+  "users": ["u1", "u3", "u10", "u2", "u5", "u31", "u9"],
+  "num": 10
+}' \
+http://localhost:8000/queries.json \
+-w %{time_connect}:%{time_starttransfer}:%{time_total}
+```
+
+```
+curl -H "Content-Type: application/json" \
+-d '{
+  "users": ["u1", "u3", "u10", "u2", "u5", "u31", "u9"],
+  "num": 10,
+  "whiteList": ["u21", "u26", "u40"]
+}' \
+http://localhost:8000/queries.json \
+-w %{time_connect}:%{time_starttransfer}:%{time_total}
+```
+
+```
+curl -H "Content-Type: application/json" \
+-d '{
+  "users": ["u1", "u3", "u10", "u2", "u5", "u31", "u9"],
+  "num": 10,
+  "blackList": ["u21", "u26", "u40"]
+}' \
+http://localhost:8000/queries.json \
+-w %{time_connect}:%{time_starttransfer}:%{time_total}
+```
+
+unknown user:
+
+```
+curl -H "Content-Type: application/json" \
+-d '{ "users": ["unk1", "u3", "u10", "u2", "u5", "u31", "u9"], "num": 10}' \
+http://localhost:8000/queries.json \
+-w %{time_connect}:%{time_starttransfer}:%{time_total}
+```
+
+
+all unknown users:
+
+```
+curl -H "Content-Type: application/json" \
+-d '{ "users": ["unk1", "unk2", "unk3", "unk4"], "num": 10}' \
+http://localhost:8000/queries.json \
+-w %{time_connect}:%{time_starttransfer}:%{time_total}
+```
diff --git a/examples/scala-parallel-similarproduct/recommended-user/build.sbt b/examples/scala-parallel-similarproduct/recommended-user/build.sbt
new file mode 100644
index 0000000..9ba8a52
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/build.sbt
@@ -0,0 +1,10 @@
+assemblySettings
+
+name := "template-scala-parallel-recommendeduser"
+
+organization := "io.prediction"
+
+libraryDependencies ++= Seq(
+  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
+  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
diff --git a/examples/scala-parallel-similarproduct/recommended-user/data/import_eventserver.py b/examples/scala-parallel-similarproduct/recommended-user/data/import_eventserver.py
new file mode 100644
index 0000000..d6527c6
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/data/import_eventserver.py
@@ -0,0 +1,57 @@
+"""
+Import sample data for recommended user engine
+"""
+
+import predictionio
+import argparse
+import random
+
+SEED = 3
+
+def import_events(client):
+  random.seed(SEED)
+  count = 0
+  print client.get_status()
+  print "Importing data..."
+
+  # generate 10 users, with user ids u1,u2,....,u50
+  user_ids = ["u%s" % i for i in range(1, 51)]
+  for user_id in user_ids:
+    print "Set user", user_id
+    client.create_event(
+      event="$set",
+      entity_type="user",
+      entity_id=user_id
+    )
+    count += 1
+
+  # each user randomly follows 10 users
+  for user_id in user_ids:
+    for followed_user in random.sample(user_ids, 10):
+      print "User", user_id ,"follows User", followed_user
+      client.create_event(
+        event="follow",
+        entity_type="user",
+        entity_id=user_id,
+        target_entity_type="user",
+        target_entity_id=followed_user
+      )
+      count += 1
+
+  print "%s events are imported." % count
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+    description="Import sample data for recommended user engine")
+  parser.add_argument('--access_key', default='invald_access_key')
+  parser.add_argument('--url', default="http://localhost:7070")
+
+  args = parser.parse_args()
+  print args
+
+  client = predictionio.EventClient(
+    access_key=args.access_key,
+    url=args.url,
+    threads=5,
+    qsize=500)
+  import_events(client)
diff --git a/examples/scala-parallel-similarproduct/recommended-user/data/send_query.py b/examples/scala-parallel-similarproduct/recommended-user/data/send_query.py
new file mode 100644
index 0000000..989f200
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/data/send_query.py
@@ -0,0 +1,7 @@
+"""
+Send sample query to prediction engine
+"""
+
+import predictionio
+engine_client = predictionio.EngineClient(url="http://localhost:8000")
+print engine_client.send_query({"users": ["u1", "u3"], "num": 10})
diff --git a/examples/scala-parallel-similarproduct/recommended-user/engine.json b/examples/scala-parallel-similarproduct/recommended-user/engine.json
new file mode 100644
index 0000000..8a9da33
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/engine.json
@@ -0,0 +1,21 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.template.recommendeduser.RecommendedUserEngine",
+  "datasource": {
+    "params" : {
+      "appId": 5
+    }
+  },
+  "algorithms": [
+    {
+      "name": "als",
+      "params": {
+        "rank": 10,
+        "numIterations" : 20,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}
diff --git a/examples/scala-parallel-similarproduct/recommended-user/project/assembly.sbt b/examples/scala-parallel-similarproduct/recommended-user/project/assembly.sbt
new file mode 100644
index 0000000..54c3252
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/project/assembly.sbt
@@ -0,0 +1 @@
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")
diff --git a/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt b/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt
new file mode 100644
index 0000000..8346a96
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt
@@ -0,0 +1 @@
+addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
new file mode 100644
index 0000000..c355bb6
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
@@ -0,0 +1,220 @@
+package org.template.recommendeduser
+
+import grizzled.slf4j.Logger
+import io.prediction.controller.{P2LAlgorithm, Params}
+import io.prediction.data.storage.BiMap
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.recommendation.{ALS, Rating => MLlibRating}
+
+import scala.collection.mutable
+
+case class ALSAlgorithmParams(
+  rank: Int,
+  numIterations: Int,
+  lambda: Double,
+  seed: Option[Long]) extends Params
+
+class ALSModel(
+  val similarUserFeatures: Map[Int, Array[Double]],
+  val similarUserStringIntMap: BiMap[String, Int],
+  val similarUsers: Map[Int, User]
+) extends Serializable {
+
+  @transient lazy val similarUserIntStringMap = similarUserStringIntMap.inverse
+
+  override def toString = {
+    s" similarUserFeatures: [${similarUserFeatures.size}]" +
+    s"(${similarUserFeatures.take(2).toList}...)" +
+    s" similarUserStringIntMap: [${similarUserStringIntMap.size}]" +
+    s"(${similarUserStringIntMap.take(2).toString()}...)]" +
+    s" users: [${similarUsers.size}]" +
+    s"(${similarUsers.take(2).toString()}...)]"
+  }
+}
+
+/**
+  * Use ALS to build user x feature matrix
+  */
+class ALSAlgorithm(val ap: ALSAlgorithmParams)
+  extends P2LAlgorithm[PreparedData, ALSModel, Query, PredictedResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  def train(sc: SparkContext, data: PreparedData): ALSModel = {
+    require(data.followEvents.take(1).nonEmpty,
+      s"followEvents in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    require(data.users.take(1).nonEmpty,
+      s"users in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    // create User String ID to integer index BiMap
+    val userStringIntMap = BiMap.stringInt(data.users.keys)
+    val similarUserStringIntMap = userStringIntMap
+
+    // collect SimilarUser as Map and convert ID to Int index
+    val similarUsers: Map[Int, User] = data.users.map { case (id, similarUser) =>
+      (similarUserStringIntMap(id), similarUser)
+    }.collectAsMap().toMap
+
+    val mllibRatings = data.followEvents
+      .map { r =>
+        // Convert user and user String IDs to Int index for MLlib
+        val uindex = userStringIntMap.getOrElse(r.user, -1)
+        val iindex = similarUserStringIntMap.getOrElse(r.followedUser, -1)
+
+        if (uindex == -1)
+          logger.info(s"Couldn't convert nonexistent user ID ${r.user}"
+            + " to Int index.")
+
+        if (iindex == -1)
+          logger.info(s"Couldn't convert nonexistent followedUser ID ${r.followedUser}"
+            + " to Int index.")
+
+        ((uindex, iindex), 1)
+      }.filter { case ((u, i), v) =>
+        // keep events with valid user and user index
+        (u != -1) && (i != -1)
+      }
+      .map { case ((u, i), v) =>
+        // MLlibRating requires integer index for user and user
+        MLlibRating(u, i, v)
+      }
+      .cache()
+
+    // MLLib ALS cannot handle empty training data.
+    require(mllibRatings.take(1).nonEmpty,
+      s"mllibRatings cannot be empty." +
+      " Please check if your events contain valid user and followedUser ID.")
+
+    // seed for MLlib ALS
+    val seed = ap.seed.getOrElse(System.nanoTime)
+
+    val m = ALS.trainImplicit(
+      ratings = mllibRatings,
+      rank = ap.rank,
+      iterations = ap.numIterations,
+      lambda = ap.lambda,
+      blocks = -1,
+      alpha = 1.0,
+      seed = seed)
+
+    new ALSModel(
+      similarUserFeatures = m.productFeatures.collectAsMap().toMap,
+      similarUserStringIntMap = similarUserStringIntMap,
+      similarUsers = similarUsers
+    )
+  }
+
+  def predict(model: ALSModel, query: Query): PredictedResult = {
+
+    val similarUserFeatures = model.similarUserFeatures
+
+    // convert similarUsers to Int index
+    val queryList: Set[Int] = query.users.map(model.similarUserStringIntMap.get)
+      .flatten.toSet
+
+    val queryFeatures: Vector[Array[Double]] = queryList.toVector
+      // similarUserFeatures may not contain the requested user
+      .map { similarUser => similarUserFeatures.get(similarUser) }
+      .flatten
+
+    val whiteList: Option[Set[Int]] = query.whiteList.map( set =>
+      set.map(model.similarUserStringIntMap.get).flatten
+    )
+    val blackList: Option[Set[Int]] = query.blackList.map ( set =>
+      set.map(model.similarUserStringIntMap.get).flatten
+    )
+
+    val ord = Ordering.by[(Int, Double), Double](_._2).reverse
+
+    val indexScores: Array[(Int, Double)] = if (queryFeatures.isEmpty) {
+      logger.info(s"No similarUserFeatures vector for query users ${query.users}.")
+      Array[(Int, Double)]()
+    } else {
+      similarUserFeatures.par // convert to parallel collection
+        .mapValues { f =>
+          queryFeatures.map { qf =>
+            cosine(qf, f)
+          }.sum
+        }
+        .filter(_._2 > 0) // keep similarUsers with score > 0
+        .seq // convert back to sequential collection
+        .toArray
+    }
+
+    val filteredScore = indexScores.view.filter { case (i, v) =>
+      isCandidateSimilarUser(
+        i = i,
+        similarUsers = model.similarUsers,
+        queryList = queryList,
+        whiteList = whiteList,
+        blackList = blackList
+      )
+    }
+
+    val topScores = getTopN(filteredScore, query.num)(ord).toArray
+
+    val similarUserScores = topScores.map { case (i, s) =>
+      new similarUserScore(
+        user = model.similarUserIntStringMap(i),
+        score = s
+      )
+    }
+
+    new PredictedResult(similarUserScores)
+  }
+
+  private
+  def getTopN[T](s: Seq[T], n: Int)(implicit ord: Ordering[T]): Seq[T] = {
+    val q = mutable.PriorityQueue()
+
+    for (x <- s) {
+      if (q.size < n)
+        q.enqueue(x)
+      else {
+        // q is full
+        if (ord.compare(x, q.head) < 0) {
+          q.dequeue()
+          q.enqueue(x)
+        }
+      }
+    }
+
+    q.dequeueAll.toSeq.reverse
+  }
+
+  private
+  def cosine(v1: Array[Double], v2: Array[Double]): Double = {
+    val size = v1.length
+    var i = 0
+    var n1: Double = 0
+    var n2: Double = 0
+    var d: Double = 0
+    while (i < size) {
+      n1 += v1(i) * v1(i)
+      n2 += v2(i) * v2(i)
+      d += v1(i) * v2(i)
+      i += 1
+    }
+    val n1n2 = math.sqrt(n1) * math.sqrt(n2)
+    if (n1n2 == 0) 0 else d / n1n2
+  }
+
+  private
+  def isCandidateSimilarUser(
+    i: Int,
+    similarUsers: Map[Int, User],
+    queryList: Set[Int],
+    whiteList: Option[Set[Int]],
+    blackList: Option[Set[Int]]
+  ): Boolean = {
+    whiteList.map(_.contains(i)).getOrElse(true) &&
+    blackList.map(!_.contains(i)).getOrElse(true) &&
+    // discard similarUsers in query as well
+    (!queryList.contains(i))
+  }
+
+}
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala
new file mode 100644
index 0000000..768d79b
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala
@@ -0,0 +1,84 @@
+package org.template.recommendeduser
+
+import grizzled.slf4j.Logger
+import io.prediction.controller.{EmptyActualResult, EmptyEvaluationInfo, PDataSource, Params}
+import io.prediction.data.storage.Storage
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+case class DataSourceParams(appId: Int) extends Params
+
+class DataSource(val dsp: DataSourceParams)
+  extends PDataSource[TrainingData,
+      EmptyEvaluationInfo, Query, EmptyActualResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  override
+  def readTraining(sc: SparkContext): TrainingData = {
+    val eventsDb = Storage.getPEvents()
+
+    // create a RDD of (entityID, User)
+    val usersRDD: RDD[(String, User)] = eventsDb.aggregateProperties(
+      appId = dsp.appId,
+      entityType = "user"
+    )(sc).map { case (entityId, properties) =>
+      val user = try {
+        User()
+      } catch {
+        case e: Exception => {
+          logger.error(s"Failed to get properties $properties of" +
+            s" user $entityId. Exception: $e.")
+          throw e
+        }
+      }
+      (entityId, user)
+    }.cache()
+
+    // get all "user" "follow" "followedUser" events
+    val followEventsRDD: RDD[FollowEvent] = eventsDb.find(
+      appId = dsp.appId,
+      entityType = Some("user"),
+      eventNames = Some(List("follow")),
+      // targetEntityType is optional field of an event.
+      targetEntityType = Some(Some("user")))(sc)
+      // eventsDb.find() returns RDD[Event]
+      .map { event =>
+        val followEvent = try {
+          event.event match {
+            case "follow" => FollowEvent(
+              user = event.entityId,
+              followedUser = event.targetEntityId.get,
+              t = event.eventTime.getMillis)
+            case _ => throw new Exception(s"Unexpected event $event is read.")
+          }
+        } catch {
+          case e: Exception => {
+            logger.error(s"Cannot convert $event to FollowEvent." +
+              s" Exception: $e.")
+            throw e
+          }
+        }
+        followEvent
+      }.cache()
+
+    new TrainingData(
+      users = usersRDD,
+      followEvents = followEventsRDD
+    )
+  }
+}
+
+case class User()
+
+case class FollowEvent(user: String, followedUser: String, t: Long)
+
+class TrainingData(
+  val users: RDD[(String, User)],
+  val followEvents: RDD[FollowEvent]
+) extends Serializable {
+  override def toString = {
+    s"users: [${users.count()} (${users.take(2).toList}...)]" +
+    s"followEvents: [${followEvents.count()}] (${followEvents.take(2).toList}...)"
+  }
+}
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
new file mode 100644
index 0000000..d8cdc90
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
@@ -0,0 +1,30 @@
+package org.template.recommendeduser
+
+import io.prediction.controller.IEngineFactory
+import io.prediction.controller.Engine
+
+case class Query(
+  users: List[String],
+  num: Int,
+  whiteList: Option[Set[String]],
+  blackList: Option[Set[String]]
+) extends Serializable
+
+case class PredictedResult(
+  similarUserScores: Array[similarUserScore]
+) extends Serializable
+
+case class similarUserScore(
+  user: String,
+  score: Double
+) extends Serializable
+
+object RecommendedUserEngine extends IEngineFactory {
+  def apply() = {
+    new Engine(
+      classOf[DataSource],
+      classOf[Preparator],
+      Map("als" -> classOf[ALSAlgorithm]),
+      classOf[Serving])
+  }
+}
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
new file mode 100644
index 0000000..26a4147
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
@@ -0,0 +1,20 @@
+package org.template.recommendeduser
+
+import io.prediction.controller.PPreparator
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+class Preparator
+  extends PPreparator[TrainingData, PreparedData] {
+
+  def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
+    new PreparedData(
+      users = trainingData.users,
+      followEvents = trainingData.followEvents)
+  }
+}
+
+class PreparedData(
+  val users: RDD[(String, User)],
+  val followEvents: RDD[FollowEvent]
+) extends Serializable
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala
new file mode 100644
index 0000000..89cd696
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala
@@ -0,0 +1,13 @@
+package org.template.recommendeduser
+
+import io.prediction.controller.LServing
+
+class Serving
+  extends LServing[Query, PredictedResult] {
+
+  override
+  def serve(query: Query,
+    predictedResults: Seq[PredictedResult]): PredictedResult = {
+    predictedResults.head
+  }
+}
diff --git a/examples/scala-parallel-similarproduct/recommended-user/template.json b/examples/scala-parallel-similarproduct/recommended-user/template.json
new file mode 100644
index 0000000..932e603
--- /dev/null
+++ b/examples/scala-parallel-similarproduct/recommended-user/template.json
@@ -0,0 +1 @@
+{"pio": {"version": { "min": "0.9.0" }}}
diff --git a/make-distribution.sh b/make-distribution.sh
index 8ea2091..33e4783 100755
--- a/make-distribution.sh
+++ b/make-distribution.sh
@@ -17,42 +17,42 @@
 set -e
 
 FWDIR="$(cd `dirname $0`; pwd)"
-DISTDIR="$FWDIR/dist"
+DISTDIR="${FWDIR}/dist"
 
-VERSION=$(grep version $FWDIR/build.sbt | grep ThisBuild | grep -o '".*"' | sed 's/"//g')
+VERSION=$(grep version ${FWDIR}/build.sbt | grep ThisBuild | grep -o '".*"' | sed 's/"//g')
 
 echo "Building binary distribution for PredictionIO $VERSION..."
 
-cd $FWDIR
+cd ${FWDIR}
 sbt/sbt common/publishLocal core/publishLocal data/publishLocal e2/publishLocal tools/assembly
 
-cd $FWDIR
-rm -rf $DISTDIR
-mkdir -p $DISTDIR/bin
-mkdir -p $DISTDIR/conf
-mkdir -p $DISTDIR/lib
-mkdir -p $DISTDIR/project
-mkdir -p $DISTDIR/sbt
+cd ${FWDIR}
+rm -rf ${DISTDIR}
+mkdir -p ${DISTDIR}/bin
+mkdir -p ${DISTDIR}/conf
+mkdir -p ${DISTDIR}/lib
+mkdir -p ${DISTDIR}/project
+mkdir -p ${DISTDIR}/sbt
 
-cp $FWDIR/bin/* $DISTDIR/bin
-cp $FWDIR/conf/* $DISTDIR/conf
-cp $FWDIR/project/build.properties $DISTDIR/project
-cp $FWDIR/sbt/sbt $DISTDIR/sbt
-cp $FWDIR/sbt/sbt-launch-lib.bash $DISTDIR/sbt
-cp $FWDIR/assembly/*assembly*jar $DISTDIR/lib
+cp ${FWDIR}/bin/* ${DISTDIR}/bin
+cp ${FWDIR}/conf/* ${DISTDIR}/conf
+cp ${FWDIR}/project/build.properties ${DISTDIR}/project
+cp ${FWDIR}/sbt/sbt ${DISTDIR}/sbt
+cp ${FWDIR}/sbt/sbt-launch-lib.bash ${DISTDIR}/sbt
+cp ${FWDIR}/assembly/*assembly*jar ${DISTDIR}/lib
 
-rm -f $DISTDIR/lib/*javadoc.jar
-rm -f $DISTDIR/lib/*sources.jar
-rm -f $DISTDIR/conf/pio-env.sh
-mv $DISTDIR/conf/pio-env.sh.template $DISTDIR/conf/pio-env.sh
+rm -f ${DISTDIR}/lib/*javadoc.jar
+rm -f ${DISTDIR}/lib/*sources.jar
+rm -f ${DISTDIR}/conf/pio-env.sh
+mv ${DISTDIR}/conf/pio-env.sh.template ${DISTDIR}/conf/pio-env.sh
 
-touch $DISTDIR/RELEASE
+touch ${DISTDIR}/RELEASE
 
 TARNAME="PredictionIO-$VERSION.tar.gz"
 TARDIR="PredictionIO-$VERSION"
-cp -r $DISTDIR $TARDIR
+cp -r ${DISTDIR} ${TARDIR}
 
-tar zcvf $TARNAME $TARDIR
-rm -rf $TARDIR
+tar zcvf ${TARNAME} ${TARDIR}
+rm -rf ${TARDIR}
 
-echo "PredictionIO binary distribution created at $TARNAME"
+echo -e "\033[0;32mPredictionIO binary distribution created at $TARNAME\033[0m"
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 7125c87..8c42c91 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -4,8 +4,6 @@
 
 addSbtPlugin("com.typesafe.sbt" % "sbt-twirl" % "1.0.3")
 
-addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
-
 addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "0.2.1")
 
 addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.6.0")
diff --git a/tools/build.sbt b/tools/build.sbt
index 01e23ff..4e330bb 100644
--- a/tools/build.sbt
+++ b/tools/build.sbt
@@ -32,6 +32,8 @@
   "org.json4s"             %% "json4s-ext"     % json4sVersion.value,
   "org.scalaj"             %% "scalaj-http"    % "1.1.0",
   "org.spark-project.akka" %% "akka-actor"     % "2.3.4-spark",
+  "io.spray" %% "spray-testkit" % "1.3.2" % "test",
+  "org.specs2" %% "specs2" % "2.3.13" % "test",
   "org.spark-project.akka" %% "akka-slf4j"     % "2.3.4-spark")
 
 excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
@@ -44,9 +46,10 @@
   }}
 }
 
+// skip test in assembly
+test in assembly := {}
+
 outputPath in assembly := baseDirectory.value.getAbsoluteFile.getParentFile /
   "assembly" / ("pio-assembly-" + version.value + ".jar")
 
 cleanFiles <+= baseDirectory { base => base.getParentFile / "assembly" }
-
-net.virtualvoid.sbt.graph.Plugin.graphSettings
diff --git a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala b/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
index 1a190bd..e30f4a9 100644
--- a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
+++ b/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
@@ -162,8 +162,10 @@
       }) ++
       ca.common.evaluation.map(x => Seq("--evaluation-class", x)).
         getOrElse(Seq()) ++
-      ca.common.engineParamsGenerator.map(x => Seq("--engine-params-generator-class", x)).
-        getOrElse(Seq()) ++
+      // If engineParamsGenerator is specified, it overrides the evaluation.
+      ca.common.engineParamsGenerator.orElse(ca.common.evaluation)
+        .map(x => Seq("--engine-params-generator-class", x))
+        .getOrElse(Seq()) ++ 
       (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq())
     info(s"Submission command: ${sparkSubmit.mkString(" ")}")
     Process(sparkSubmit, None, "SPARK_YARN_USER_ENV" -> pioEnvVars).!
diff --git a/tools/src/main/scala/io/prediction/tools/admin/AdminAPI.scala b/tools/src/main/scala/io/prediction/tools/admin/AdminAPI.scala
new file mode 100644
index 0000000..d8d7930
--- /dev/null
+++ b/tools/src/main/scala/io/prediction/tools/admin/AdminAPI.scala
@@ -0,0 +1,154 @@
+/** Copyright 2014 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.tools.admin
+
+import akka.actor.{Actor, ActorSystem, Props}
+import akka.event.Logging
+import akka.io.IO
+import akka.util.Timeout
+import io.prediction.data.api.StartServer
+import io.prediction.data.storage.Storage
+import org.json4s.DefaultFormats
+
+import java.util.concurrent.TimeUnit
+
+import spray.can.Http
+import spray.http.{MediaTypes, StatusCodes}
+import spray.httpx.Json4sSupport
+import spray.routing._
+
+class AdminServiceActor(val commandClient: CommandClient)
+  extends HttpServiceActor {
+
+  object Json4sProtocol extends Json4sSupport {
+    implicit def json4sFormats = DefaultFormats
+  }
+
+  import Json4sProtocol._
+
+  val log = Logging(context.system, this)
+
+  // we use the enclosing ActorContext's or ActorSystem's dispatcher for our
+  // Futures
+  implicit def executionContext = actorRefFactory.dispatcher
+  implicit val timeout = Timeout(5, TimeUnit.SECONDS)
+
+  // for better message response
+  val rejectionHandler = RejectionHandler {
+    case MalformedRequestContentRejection(msg, _) :: _ =>
+      complete(StatusCodes.BadRequest, Map("message" -> msg))
+    case MissingQueryParamRejection(msg) :: _ =>
+      complete(StatusCodes.NotFound,
+        Map("message" -> s"missing required query parameter ${msg}."))
+    case AuthenticationFailedRejection(cause, challengeHeaders) :: _ =>
+      complete(StatusCodes.Unauthorized, challengeHeaders,
+        Map("message" -> s"Invalid accessKey."))
+  }
+
+  val jsonPath = """(.+)\.json$""".r
+
+  val route: Route =
+    pathSingleSlash {
+      get {
+        respondWithMediaType(MediaTypes.`application/json`) {
+          complete(Map("status" -> "alive"))
+        }
+      }
+    } ~
+      path("cmd" / "app" / Segment / "data") {
+        appName => {
+          delete {
+            respondWithMediaType(MediaTypes.`application/json`) {
+              complete(commandClient.futureAppDataDelete(appName))
+            }
+          }
+        }
+      } ~
+      path("cmd" / "app" / Segment) {
+        appName => {
+          delete {
+            respondWithMediaType(MediaTypes.`application/json`) {
+              complete(commandClient.futureAppDelete(appName))
+            }
+          }
+        }
+      } ~
+      path("cmd" / "app") {
+        get {
+          respondWithMediaType(MediaTypes.`application/json`) {
+            complete(commandClient.futureAppList())
+          }
+        } ~
+          post {
+            entity(as[AppRequest]) {
+              appArgs => respondWithMediaType(MediaTypes.`application/json`) {
+                complete(commandClient.futureAppNew(appArgs))
+              }
+            }
+          }
+      }
+  def receive = runRoute(route)
+}
+
+class AdminServerActor(val commandClient: CommandClient) extends Actor {
+  val log = Logging(context.system, this)
+  val child = context.actorOf(
+    Props(classOf[AdminServiceActor], commandClient),
+    "AdminServiceActor")
+
+  implicit val system = context.system
+
+  def receive = {
+    case StartServer(host, portNum) => {
+      IO(Http) ! Http.Bind(child, interface = host, port = portNum)
+
+    }
+    case m: Http.Bound => log.info("Bound received. AdminServer is ready.")
+    case m: Http.CommandFailed => log.error("Command failed.")
+    case _ => log.error("Unknown message.")
+  }
+}
+
+case class AdminServerConfig(
+  ip: String = "localhost",
+  port: Int = 7071
+)
+
+object AdminServer {
+  def createAdminServer(config: AdminServerConfig) = {
+    implicit val system = ActorSystem("AdminServerSystem")
+
+    val commandClient = new CommandClient(
+      appClient = Storage.getMetaDataApps,
+      accessKeyClient = Storage.getMetaDataAccessKeys,
+      eventClient = Storage.getLEvents()
+    )
+
+    val serverActor = system.actorOf(
+      Props(classOf[AdminServerActor], commandClient),
+      "AdminServerActor")
+    serverActor ! StartServer(config.ip, config.port)
+    system.awaitTermination
+  }
+}
+
+object AdminRun {
+  def main (args: Array[String]) {
+    AdminServer.createAdminServer(AdminServerConfig(
+      ip = "localhost",
+      port = 7071))
+  }
+}
diff --git a/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala b/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
new file mode 100644
index 0000000..61c5c61
--- /dev/null
+++ b/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
@@ -0,0 +1,159 @@
+/** Copyright 2014 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  * http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.tools.admin
+
+import io.prediction.data.storage._
+
+import scala.concurrent.{ExecutionContext, Future}
+
+abstract class BaseResponse()
+
+case class GeneralResponse(
+  status: Int = 0,
+  message: String = ""
+) extends BaseResponse()
+
+case class AppRequest(
+  id: Int = 0,
+  name: String = "",
+  description: String = ""
+)
+
+case class TrainRequest(
+  enginePath: String = ""
+)
+case class AppResponse(
+  id: Int = 0,
+  name: String = "",
+  keys: Seq[AccessKey]
+) extends BaseResponse()
+
+case class AppNewResponse(
+  status: Int = 0,
+  message: String = "",
+  id: Int = 0,
+  name: String = "",
+  key: String
+) extends BaseResponse()
+
+case class AppListResponse(
+  status: Int = 0,
+  message: String = "",
+  apps: Seq[AppResponse]
+) extends BaseResponse()
+
+class CommandClient(
+  val appClient: Apps,
+  val accessKeyClient: AccessKeys,
+  val eventClient: LEvents
+) {
+
+  def futureAppNew(req: AppRequest)(implicit ec: ExecutionContext): Future[BaseResponse] = Future {
+    val response = appClient.getByName(req.name) map { app =>
+      GeneralResponse(0, s"App ${req.name} already exists. Aborting.")
+    } getOrElse {
+      appClient.get(req.id) map {
+        app2 =>
+          GeneralResponse(0, s"App ID ${app2.id} already exists and maps to the app '${app2.name}'. " +
+            "Aborting.")
+      } getOrElse {
+        val appid = appClient.insert(App(
+          id = Option(req.id).getOrElse(0),
+          name = req.name,
+          description = Option(req.description)))
+        appid map { id =>
+          val dbInit = eventClient.init(id)
+          val r = if (dbInit) {
+            val accessKey = AccessKey(
+              key = "",
+              appid = id,
+              events = Seq())
+            val accessKey2 = accessKeyClient.insert(AccessKey(
+              key = "",
+              appid = id,
+              events = Seq()))
+            accessKey2 map { k =>
+              new AppNewResponse(1,"App created successfully.",id, req.name, k)
+            } getOrElse {
+              GeneralResponse(0, s"Unable to create new access key.")
+            }
+          } else {
+            GeneralResponse(0, s"Unable to initialize Event Store for this app ID: ${id}.")
+          }
+          r
+        } getOrElse {
+          GeneralResponse(0, s"Unable to create new app.")
+        }
+      }
+    }
+    response
+  }
+
+  def futureAppList()(implicit ec: ExecutionContext): Future[AppListResponse] = Future {
+    val apps = appClient.getAll().sortBy(_.name)
+    val appsRes = apps.map {
+      app => {
+        new AppResponse(app.id, app.name, accessKeyClient.getByAppid(app.id))
+      }
+    }
+    new AppListResponse(1, "Successful retrieved app list.", appsRes)
+  }
+
+  def futureAppDataDelete(appName: String)(implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
+    val response = appClient.getByName(appName) map { app =>
+      val data = if (eventClient.remove(app.id)) {
+        GeneralResponse(1, s"Removed Event Store for this app ID: ${app.id}")
+      } else {
+        GeneralResponse(0, s"Error removing Event Store for this app.")
+      }
+
+      val dbInit = eventClient.init(app.id)
+      val data2 = if (dbInit) {
+        GeneralResponse(1, s"Initialized Event Store for this app ID: ${app.id}.")
+      } else {
+        GeneralResponse(0, s"Unable to initialize Event Store for this appId:" +
+          s" ${app.id}.")
+      }
+      GeneralResponse(data.status * data2.status, data.message + data2.message)
+    } getOrElse {
+      GeneralResponse(0, s"App ${appName} does not exist.")
+    }
+    response
+  }
+
+  def futureAppDelete(appName: String)(implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
+
+    val response = appClient.getByName(appName) map { app =>
+      val data = if (eventClient.remove(app.id)) {
+        if (Storage.getMetaDataApps.delete(app.id)) {
+          GeneralResponse(1, s"App successfully deleted")
+        } else {
+          GeneralResponse(0, s"Error deleting app ${app.name}.")
+        }
+      } else {
+        GeneralResponse(0, s"Error removing Event Store for app ${app.name}.");
+      }
+      data
+    } getOrElse {
+      GeneralResponse(0, s"App ${appName} does not exist.")
+    }
+    response
+  }
+
+  def futureTrain(req: TrainRequest)(implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
+    return null;
+  }
+}
diff --git a/tools/src/main/scala/io/prediction/tools/admin/README.md b/tools/src/main/scala/io/prediction/tools/admin/README.md
new file mode 100644
index 0000000..475a3de
--- /dev/null
+++ b/tools/src/main/scala/io/prediction/tools/admin/README.md
@@ -0,0 +1,161 @@
+## Admin API (under development)
+
+### Start Admin HTTP Server without bin/pio (for development)
+
+NOTE: elasticsearch and hbase should be running first.
+
+```
+$ sbt/sbt "tools/compile"
+$ set -a
+$ source conf/pio-env.sh
+$ set +a
+$ sbt/sbt "tools/run-main io.prediction.tools.admin.AdminRun"
+```
+
+### Unit test (Very minimal)
+
+```
+$ set -a
+$ source conf/pio-env.sh
+$ set +a
+$ sbt/sbt "tools/test-only io.prediction.tools.admin.AdminAPISpec"
+```
+
+### Start with pio command adminserver
+
+```
+$ pio adminserver
+```
+
+Admin Server url defaults to `http://localhost:7071`
+
+The host and port can be specified by using the 'ip' and 'port' parameters
+
+```
+$ pio adminserver --ip 127.0.0.1 --port 7080
+```
+
+### Current Supported Commands
+
+#### Check status
+
+```
+$ curl -i http://localhost:7071/
+
+{"status":"alive"}
+```
+
+#### Get list of apps
+
+```
+$ curl -i -X GET http://localhost:7071/cmd/app
+
+{"status":1,"message":"Successful retrieved app list.","apps":[{"id":12,"name":"scratch","keys":[{"key":"gtPgVMIr3uthus1QJWFBcIjNf6d1SNuhaOWQAgdLbOBP1eRWMNIJWl6SkHgI1OoN","appid":12,"events":[]}]},{"id":17,"name":"test-ecommercerec","keys":[{"key":"zPkr6sBwQoBwBjVHK2hsF9u26L38ARSe19QzkdYentuomCtYSuH0vXP5fq7advo4","appid":17,"events":[]}]}]}
+```
+
+#### Create a new app
+
+```
+$ curl -i -X POST http://localhost:7071/cmd/app \
+-H "Content-Type: application/json" \
+-d '{ "name" : "my_new_app" }'
+
+{"status":1,"message":"App created successfully.","id":19,"name":"my_new_app","keys":[{"key":"","appid":19,"events":[]}]}
+```
+
+#### Delete data of app
+
+```
+$ curl -i -X DELETE http://localhost:7071/cmd/app/my_new_app/data
+```
+
+#### Delete app
+
+```
+$ curl -i -X DELETE http://localhost:7071/cmd/app/my_new_app
+
+{"status":1,"message":"App successfully deleted"}
+```
+
+
+## API Doc (To be updated)
+
+### app list:
+GET http://localhost:7071/cmd/app
+
+OK Response:
+{
+  “status”: <STATUS>,
+  “message”: <MESSAGE>,
+  “apps” : [
+    { “name': “<APP_NAME>”,
+      “id': <APP_ID>,
+      “accessKey' : “<ACCESS_KEY>” },
+    { “name': “<APP_NAME>”,
+      “id': <APP_ID>,
+      “accessKey' : “<ACCESS_KEY>” }, ... ]
+}
+
+Error Response:
+{“status”: <STATUS>, “message” : “<MESSAGE>”}
+
+### app new
+POST http://localhost:7071/cmd/app
+Request Body:
+{ name”: “<APP_NAME>”, // required
+  “id”: <APP_ID>, // optional
+  “description”: “<DESCRIPTION>” } // optional
+
+OK Response:
+{ “status”: <STATUS>,
+  “message”: <MESSAGE>,
+  “app” : {
+    “name”: “<APP_NAME>”,
+    “id”: <APP_ID>,
+    “accessKey” : “<ACCESS_KEY>” }
+}
+
+Error Response:
+{ “status”: <STATUS>, “message” : “<MESSAGE>”}
+
+### app delete
+DELETE http://localhost:7071/cmd/app/{appName}
+
+OK Response:
+{ "status": <STATUS>, "message" : “<MESSAGE>”}
+
+Error Response:
+{ “status”: <STATUS>, “message” : “<MESSAGE>”}
+
+### app data-delete
+DELETE http://localhost:7071/cmd/app/{appName}/data
+
+OK Response:
+{ "status": <STATUS>, "message" : “<MESSAGE>”}
+
+Error Response:
+{ “status”: <STATUS>, “message” : “<MESSAGE>” }
+
+
+### train TBD
+
+#### Training request:
+POST http://localhost:7071/cmd/train
+Request body: TBD
+
+OK Response: TBD
+
+Error Response: TBD
+
+#### Get training status:
+GET http://localhost:7071/cmd/train/{engineInstanceId}
+
+OK Response: TBD
+INIT
+TRAINING
+DONE
+ERROR
+
+Error Response: TBD
+
+### deploy TBD
diff --git a/tools/src/main/scala/io/prediction/tools/console/App.scala b/tools/src/main/scala/io/prediction/tools/console/App.scala
index 4e8b63c..6e04ca0 100644
--- a/tools/src/main/scala/io/prediction/tools/console/App.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/App.scala
@@ -17,6 +17,7 @@
 
 import io.prediction.data.storage.{AccessKey => StorageAccessKey}
 import io.prediction.data.storage.{App => StorageApp}
+import io.prediction.data.storage.{Channel => StorageChannel}
 import io.prediction.data.storage.Storage
 
 import grizzled.slf4j.Logging
@@ -24,6 +25,9 @@
 case class AppArgs(
   id: Option[Int] = None,
   name: String = "",
+  channel: String = "",
+  dataDeleteChannel: Option[String] = None,
+  all: Boolean = false,
   description: Option[String] = None)
 
 object App extends Logging {
@@ -96,33 +100,105 @@
     0
   }
 
-  def delete(ca: ConsoleArgs): Int = {
+  def show(ca: ConsoleArgs): Int = {
     val apps = Storage.getMetaDataApps
+    val accessKeys = Storage.getMetaDataAccessKeys
+    val channels = Storage.getMetaDataChannels
     apps.getByName(ca.app.name) map { app =>
-      info(s"The following app will be deleted. Are you sure?")
       info(s"    App Name: ${app.name}")
       info(s"      App ID: ${app.id}")
       info(s" Description: ${app.description}")
+      val keys = accessKeys.getByAppid(app.id)
+
+      var firstKey = true
+      keys foreach { k =>
+        val events =
+          if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
+        if (firstKey) {
+          info(f"  Access Key: ${k.key}%s | ${events}%s")
+          firstKey = false
+        } else {
+          info(f"              ${k.key}%s | ${events}%s")
+        }
+      }
+
+      val chans = channels.getByAppid(app.id)
+      var firstChan = true
+      val titleName = "Channel Name"
+      val titleID = "Channel ID"
+      chans.foreach { ch =>
+        if (firstChan) {
+          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
+          firstChan = false
+        }
+        info(f"              ${ch.name}%16s | ${ch.id}%10s")
+      }
+      0
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+  }
+
+  def delete(ca: ConsoleArgs): Int = {
+    val apps = Storage.getMetaDataApps
+    val channels = Storage.getMetaDataChannels
+    val events = Storage.getLEvents()
+    val status = apps.getByName(ca.app.name) map { app =>
+      info(s"The following app (including all channels) will be deleted. Are you sure?")
+      info(s"    App Name: ${app.name}")
+      info(s"      App ID: ${app.id}")
+      info(s" Description: ${app.description}")
+      val chans = channels.getByAppid(app.id)
+      var firstChan = true
+      val titleName = "Channel Name"
+      val titleID = "Channel ID"
+      chans.foreach { ch =>
+        if (firstChan) {
+          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
+          firstChan = false
+        }
+        info(f"              ${ch.name}%16s | ${ch.id}%10s")
+      }
+
       val choice = readLine("Enter 'YES' to proceed: ")
       choice match {
         case "YES" => {
-          val events = Storage.getLEvents()
-          val r = if (events.remove(app.id)) {
-            info(s"Removed Event Store for this app ID: ${app.id}")
-            if (Storage.getMetaDataApps.delete(app.id)) {
-              info(s"Deleted app ${app.name}.")
-              0
+          // delete channels
+          val delChannelStatus: Seq[Int] = chans.map { ch =>
+            if (events.remove(app.id, Some(ch.id))) {
+              info(s"Removed Event Store of the channel ID: ${ch.id}")
+              if (channels.delete(ch.id)) {
+                info(s"Deleted channel ${ch.name}")
+                0
+              } else {
+                error(s"Error deleting channel ${ch.name}.")
+                1
+              }
             } else {
-              error(s"Error deleting app ${app.name}.")
+              error(s"Error removing Event Store of the channel ID: ${ch.id}.")
               1
             }
-          } else {
-            error(s"Error removing Event Store for this app.")
-            1
           }
-          events.close()
-          info("Done.")
-          r
+
+          if (delChannelStatus.filter(_ != 0).isEmpty) {
+            val r = if (events.remove(app.id)) {
+              info(s"Removed Event Store for this app ID: ${app.id}")
+              if (apps.delete(app.id)) {
+                info(s"Deleted app ${app.name}.")
+                0
+              } else {
+                error(s"Error deleting app ${app.name}.")
+                1
+              }
+            } else {
+              error(s"Error removing Event Store for this app.")
+              1
+            }
+
+            info("Done.")
+            r
+          } else 1
         }
         case _ =>
           info("Aborted.")
@@ -132,35 +208,83 @@
       error(s"App ${ca.app.name} does not exist. Aborting.")
       1
     }
+    events.close()
+    status
   }
 
   def dataDelete(ca: ConsoleArgs): Int = {
+    if (ca.app.all) {
+      dataDeleteAll(ca)
+    } else {
+      dataDeleteOne(ca)
+    }
+  }
+
+  def dataDeleteOne(ca: ConsoleArgs): Int = {
     val apps = Storage.getMetaDataApps
+    val channels = Storage.getMetaDataChannels
     apps.getByName(ca.app.name) map { app =>
-      info(s"The data of the following app will be deleted. Are you sure?")
-      info(s"    App Name: ${app.name}")
-      info(s"      App ID: ${app.id}")
-      info(s" Description: ${app.description}")
+
+      val channelId = ca.app.dataDeleteChannel.map { ch =>
+        val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
+        if (!channelMap.contains(ch)) {
+          error(s"Unable to delete data for channel.")
+          error(s"Channel ${ch} doesn't exist.")
+          return 1
+        }
+
+        channelMap(ch)
+      }
+
+      if (channelId.isDefined) {
+        info(s"Data of the following channel will be deleted. Are you sure?")
+        info(s"Channel Name: ${ca.app.dataDeleteChannel.get}")
+        info(s"  Channel ID: ${channelId.get}")
+        info(s"    App Name: ${app.name}")
+        info(s"      App ID: ${app.id}")
+        info(s" Description: ${app.description}")
+      } else {
+        info(s"Data of the following app (default channel only) will be deleted. Are you sure?")
+        info(s"    App Name: ${app.name}")
+        info(s"      App ID: ${app.id}")
+        info(s" Description: ${app.description}")
+      }
+
       val choice = readLine("Enter 'YES' to proceed: ")
+
       choice match {
         case "YES" => {
           val events = Storage.getLEvents()
           // remove table
-          val r1 = if (events.remove(app.id)) {
-            info(s"Removed Event Store for this app ID: ${app.id}")
+          val r1 = if (events.remove(app.id, channelId)) {
+            if (channelId.isDefined)
+              info(s"Removed Event Store for this channel ID: ${channelId.get}")
+            else
+              info(s"Removed Event Store for this app ID: ${app.id}")
             0
           } else {
-            error(s"Error removing Event Store for this app.")
+            if (channelId.isDefined)
+              error(s"Error removing Event Store for this channel.")
+            else
+              error(s"Error removing Event Store for this app.")
+
             1
           }
           // re-create table
-          val dbInit = events.init(app.id)
+          val dbInit = events.init(app.id, channelId)
           val r2 = if (dbInit) {
-            info(s"Initialized Event Store for this app ID: ${app.id}.")
+            if (channelId.isDefined)
+              info(s"Initialized Event Store for this channel ID: ${channelId.get}.")
+            else
+              info(s"Initialized Event Store for this app ID: ${app.id}.")
             0
           } else {
-            error(s"Unable to initialize Event Store for this appId:" +
-              s" ${app.id}.")
+            if (channelId.isDefined)
+              error(s"Unable to initialize Event Store for this channel ID:" +
+                s" ${channelId.get}.")
+            else
+              error(s"Unable to initialize Event Store for this appId:" +
+                s" ${app.id}.")
             1
           }
           events.close()
@@ -176,4 +300,200 @@
       1
     }
   }
+
+  def dataDeleteAll(ca: ConsoleArgs): Int = {
+    val apps = Storage.getMetaDataApps
+    val channels = Storage.getMetaDataChannels
+    val events = Storage.getLEvents()
+    val status = apps.getByName(ca.app.name) map { app =>
+      info(s"All data of the app (including default and all channels) will be deleted." +
+        " Are you sure?")
+      info(s"    App Name: ${app.name}")
+      info(s"      App ID: ${app.id}")
+      info(s" Description: ${app.description}")
+      val chans = channels.getByAppid(app.id)
+      var firstChan = true
+      val titleName = "Channel Name"
+      val titleID = "Channel ID"
+      chans.foreach { ch =>
+        if (firstChan) {
+          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
+          firstChan = false
+        }
+        info(f"              ${ch.name}%16s | ${ch.id}%10s")
+      }
+
+      val choice = readLine("Enter 'YES' to proceed: ")
+      choice match {
+        case "YES" => {
+          // delete channels
+          val delChannelStatus: Seq[Int] = chans.map { ch =>
+            val r1 = if (events.remove(app.id, Some(ch.id))) {
+              info(s"Removed Event Store of the channel ID: ${ch.id}")
+              0
+            } else {
+              error(s"Error removing Event Store of the channel ID: ${ch.id}.")
+              1
+            }
+            // re-create table
+            val dbInit = events.init(app.id, Some(ch.id))
+            val r2 = if (dbInit) {
+              info(s"Initialized Event Store of the channel ID: ${ch.id}")
+              0
+            } else {
+              error(s"Unable to initialize Event Store of the channel ID: ${ch.id}.")
+              1
+            }
+            r1 + r2
+          }
+
+          if (delChannelStatus.filter(_ != 0).isEmpty) {
+            val r1 = if (events.remove(app.id)) {
+              info(s"Removed Event Store for this app ID: ${app.id}")
+              0
+            } else {
+              error(s"Error removing Event Store for this app.")
+              1
+            }
+
+            val dbInit = events.init(app.id)
+            val r2 = if (dbInit) {
+              info(s"Initialized Event Store for this app ID: ${app.id}.")
+              0
+            } else {
+              error(s"Unable to initialize Event Store for this appId: ${app.id}.")
+              1
+            }
+            info("Done.")
+            r1 + r2
+          } else 1
+        }
+        case _ =>
+          info("Aborted.")
+          0
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+    events.close()
+    status
+  }
+
+  def channelNew(ca: ConsoleArgs): Int = {
+    val apps = Storage.getMetaDataApps
+    val channels = Storage.getMetaDataChannels
+    val events = Storage.getLEvents()
+    val newChannel = ca.app.channel
+    val status = apps.getByName(ca.app.name) map { app =>
+      val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
+      if (channelMap.contains(newChannel)) {
+        error(s"Unable to create new channel.")
+        error(s"Channel ${newChannel} already exists.")
+        1
+      } else if (!StorageChannel.isValidName(newChannel)) {
+        error(s"Unable to create new channel.")
+        error(s"The channel name ${newChannel} is invalid.")
+        error(s"${StorageChannel.nameConstraint}")
+        1
+      } else {
+
+        val channelId = channels.insert(StorageChannel(
+          id = 0, // new id will be assigned
+          appid = app.id,
+          name = newChannel
+        ))
+        channelId.map { chanId =>
+          info(s"Updated Channel meta-data.")
+          // initialize storage
+          val dbInit = events.init(app.id, Some(chanId))
+          if (dbInit) {
+            info(s"Initialized Event Store for the channel: ${newChannel}.")
+            info(s"Created new channel:")
+            info(s"    Channel Name: ${newChannel}")
+            info(s"      Channel ID: ${chanId}")
+            info(s"          App ID: ${app.id}")
+            0
+          } else {
+            error(s"Unable to create new channel.")
+            error(s"Failed to initalize Event Store.")
+            // reverted back the meta data
+            val revertedStatus = channels.delete(chanId)
+            if (!revertedStatus) {
+              error(s"Failed to revert back the Channel meta-data change.")
+              error(s"The channel ${newChannel} CANNOT be used!")
+              error(s"Please run 'pio app channel-delete ${app.name} ${newChannel}' " +
+                "to delete this channel!")
+            }
+            1
+          }
+        }.getOrElse {
+          error(s"Unable to create new channel.")
+          error(s"Failed to update Channel meta-data.")
+          1
+        }
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+    events.close()
+    status
+  }
+
+  def channelDelete(ca: ConsoleArgs): Int = {
+    val apps = Storage.getMetaDataApps
+    val channels = Storage.getMetaDataChannels
+    val events = Storage.getLEvents()
+    val deleteChannel = ca.app.channel
+    val status = apps.getByName(ca.app.name) map { app =>
+      val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
+      if (!channelMap.contains(deleteChannel)) {
+        error(s"Unable to delete channel.")
+        error(s"Channel ${deleteChannel} doesn't exist.")
+        1
+      } else {
+        info(s"The following channel will be deleted. Are you sure?")
+        info(s"    Channel Name: ${deleteChannel}")
+        info(s"      Channel ID: ${channelMap(deleteChannel)}")
+        info(s"        App Name: ${app.name}")
+        info(s"          App ID: ${app.id}")
+        val choice = readLine("Enter 'YES' to proceed: ")
+        choice match {
+          case "YES" => {
+            // NOTE: remove storage first before remove meta data (in case remove storage failed)
+            val dbRemoved = events.remove(app.id, Some(channelMap(deleteChannel)))
+            if (dbRemoved) {
+              info(s"Removed Event Store for this channel: ${deleteChannel}")
+              val metaStatus = channels.delete(channelMap(deleteChannel))
+              if (metaStatus) {
+                info(s"Deleted channel: ${deleteChannel}.")
+                0
+              } else {
+                error(s"Unable to delete channel.")
+                error(s"Failed to update Channel meta-data.")
+                error(s"The channel ${deleteChannel} CANNOT be used!")
+                error(s"Please run 'pio app channel-delete ${app.name} ${deleteChannel}' " +
+                  "to delete this channel again!")
+                1
+              }
+            } else {
+              error(s"Unable to delete channel.")
+              error(s"Error removing Event Store for this channel.")
+              1
+            }
+          }
+          case _ =>
+            info("Aborted.")
+            0
+        }
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+    events.close()
+    status
+  }
+
 }
diff --git a/tools/src/main/scala/io/prediction/tools/console/Console.scala b/tools/src/main/scala/io/prediction/tools/console/Console.scala
index 07a8fd7..6f8aedd 100644
--- a/tools/src/main/scala/io/prediction/tools/console/Console.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/Console.scala
@@ -15,36 +15,38 @@
 
 package io.prediction.tools.console
 
+import java.io.File
+
+import grizzled.slf4j.Logging
+
 import io.prediction.controller.Utils
 import io.prediction.core.BuildInfo
+import io.prediction.data.api.EventServer
+import io.prediction.data.api.EventServerConfig
 import io.prediction.data.storage.EngineManifest
 import io.prediction.data.storage.EngineManifestSerializer
 import io.prediction.data.storage.Storage
 import io.prediction.data.storage.hbase.upgrade.Upgrade_0_8_3
-import io.prediction.tools.dashboard.Dashboard
-import io.prediction.tools.dashboard.DashboardConfig
-import io.prediction.data.api.EventServer
-import io.prediction.data.api.EventServerConfig
+import io.prediction.tools.admin.{AdminServer, AdminServerConfig}
 import io.prediction.tools.RegisterEngine
 import io.prediction.tools.RunServer
 import io.prediction.tools.RunWorkflow
+import io.prediction.tools.dashboard.Dashboard
+import io.prediction.tools.dashboard.DashboardConfig
 import io.prediction.workflow.WorkflowUtils
-
-import grizzled.slf4j.Logging
 import org.apache.commons.io.FileUtils
 import org.apache.hadoop.fs.Path
 import org.json4s._
 import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.{read, write}
-import scalaj.http.Http
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
 import semverfi._
 
 import scala.collection.JavaConversions._
 import scala.io.Source
 import scala.sys.process._
 import scala.util.Random
-
-import java.io.File
+import scalaj.http.Http
 
 case class ConsoleArgs(
   common: CommonArgs = CommonArgs(),
@@ -53,6 +55,7 @@
   accessKey: AccessKeyArgs = AccessKeyArgs(),
   deploy: DeployArgs = DeployArgs(),
   eventServer: EventServerArgs = EventServerArgs(),
+  adminServer: AdminServerArgs = AdminServerArgs(),
   dashboard: DashboardArgs = DashboardArgs(),
   upgrade: UpgradeArgs = UpgradeArgs(),
   template: TemplateArgs = TemplateArgs(),
@@ -95,19 +98,23 @@
   forceGeneratePIOSbt: Boolean = false)
 
 case class DeployArgs(
-  ip: String = "localhost",
+  ip: String = "0.0.0.0",
   port: Int = 8000,
   logUrl: Option[String] = None,
   logPrefix: Option[String] = None)
 
 case class EventServerArgs(
   enabled: Boolean = false,
-  ip: String = "localhost",
+  ip: String = "0.0.0.0",
   port: Int = 7070,
   stats: Boolean = false)
 
+case class AdminServerArgs(
+ip: String = "localhost",
+port: Int = 7071)
+
 case class DashboardArgs(
-  ip: String = "localhost",
+  ip: String = "0.0.0.0",
   port: Int = 9000)
 
 case class UpgradeArgs(
@@ -274,9 +281,9 @@
           arg[String]("<evaluation-class>") action { (x, c) =>
             c.copy(common = c.common.copy(evaluation = Some(x)))
           },
-          arg[String]("<engine-parameters-generator-class>") action { (x, c) =>
+          arg[String]("[<engine-parameters-generator-class>]") optional() action { (x, c) =>
             c.copy(common = c.common.copy(engineParamsGenerator = Some(x)))
-          },
+          } text("Optional engine parameters generator class, overriding the first argument"),
           opt[String]("batch") action { (x, c) =>
             c.copy(common = c.common.copy(batch = x))
           } text("Batch label of the run.")
@@ -297,7 +304,7 @@
           } text("Engine instance ID."),
           opt[String]("ip") action { (x, c) =>
             c.copy(deploy = c.deploy.copy(ip = x))
-          } text("IP to bind to. Default: localhost"),
+          },
           opt[Int]("port") action { (x, c) =>
             c.copy(deploy = c.deploy.copy(port = x))
           } text("Port to bind to. Default: 8000"),
@@ -306,10 +313,16 @@
           } text("Enable feedback loop to event server."),
           opt[String]("event-server-ip") action { (x, c) =>
             c.copy(eventServer = c.eventServer.copy(ip = x))
-          } text("Event server IP. Default: localhost"),
+          },
           opt[Int]("event-server-port") action { (x, c) =>
             c.copy(eventServer = c.eventServer.copy(port = x))
           } text("Event server port. Default: 7070"),
+          opt[Int]("admin-server-port") action { (x, c) =>
+            c.copy(adminServer = c.adminServer.copy(port = x))
+          } text("Admin server port. Default: 7071"),
+          opt[String]("admin-server-port") action { (x, c) =>
+          c.copy(adminServer = c.adminServer.copy(ip = x))
+          } text("Admin server IP. Default: localhost"),
           opt[String]("accesskey") action { (x, c) =>
             c.copy(accessKey = c.accessKey.copy(accessKey = x))
           } text("Access key of the App where feedback data will be stored."),
@@ -331,7 +344,7 @@
         } children(
           opt[String]("ip") action { (x, c) =>
             c.copy(deploy = c.deploy.copy(ip = x))
-          } text("IP to unbind from. Default: localhost"),
+          },
           opt[Int]("port") action { (x, c) =>
             c.copy(deploy = c.deploy.copy(port = x))
           } text("Port to unbind from. Default: 8000")
@@ -344,7 +357,7 @@
         } children(
           opt[String]("ip") action { (x, c) =>
             c.copy(dashboard = c.dashboard.copy(ip = x))
-          } text("IP to bind to. Default: localhost"),
+          },
           opt[Int]("port") action { (x, c) =>
             c.copy(dashboard = c.dashboard.copy(port = x))
           } text("Port to bind to. Default: 9000")
@@ -357,7 +370,7 @@
         } children(
           opt[String]("ip") action { (x, c) =>
             c.copy(eventServer = c.eventServer.copy(ip = x))
-          } text("IP to bind to. Default: localhost"),
+          },
           opt[Int]("port") action { (x, c) =>
             c.copy(eventServer = c.eventServer.copy(port = x))
           } text("Port to bind to. Default: 7070"),
@@ -365,6 +378,18 @@
             c.copy(eventServer = c.eventServer.copy(stats = true))
           }
         )
+      cmd("adminserver").
+        text("Launch an Admin Server at the specific IP and port.").
+        action { (_, c) =>
+        c.copy(commands = c.commands :+ "adminserver")
+      } children(
+        opt[String]("ip") action { (x, c) =>
+          c.copy(adminServer = c.adminServer.copy(ip = x))
+        } text("IP to bind to. Default: localhost"),
+        opt[Int]("port") action { (x, c) =>
+          c.copy(adminServer = c.adminServer.copy(port = x))
+        } text("Port to bind to. Default: 7071")
+        )
       note("")
       cmd("run").
         text("Launch a driver program. This command will pass all\n" +
@@ -441,6 +466,16 @@
               c.copy(commands = c.commands :+ "list")
             },
           note(""),
+          cmd("show").
+            text("Show details of an app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "show")
+            } children (
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("Name of the app to be shown.")
+            ),
+          note(""),
           cmd("delete").
             text("Delete an app.").
             action { (_, c) =>
@@ -458,7 +493,39 @@
             } children(
               arg[String]("<name>") action { (x, c) =>
                 c.copy(app = c.app.copy(name = x))
-              } text("Name of the app whose data to be deleted.")
+              } text("Name of the app whose data to be deleted."),
+              opt[String]("channel") action { (x, c) =>
+                c.copy(app = c.app.copy(dataDeleteChannel = Some(x)))
+              } text("Name of channel whose data to be deleted."),
+              opt[Unit]("all") action { (x, c) =>
+                c.copy(app = c.app.copy(all = true))
+              } text("Delete data of all channels including default")
+            ),
+          note(""),
+          cmd("channel-new").
+            text("Create a new channel for the app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "channel-new")
+            } children (
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("App name."),
+              arg[String]("<channel>") action { (x, c) =>
+                c.copy(app = c.app.copy(channel = x))
+              } text ("Channel name to be created.")
+            ),
+          note(""),
+          cmd("channel-delete").
+            text("Delete a channel of the app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "channel-delete")
+            } children (
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("App name."),
+              arg[String]("<channel>") action { (x, c) =>
+                c.copy(app = c.app.copy(channel = x))
+              } text ("Channel name to be deleted.")
             )
         )
       note("")
@@ -515,6 +582,9 @@
               arg[String]("<new engine directory>") action { (x, c) =>
                 c.copy(template = c.template.copy(directory = x))
               },
+              opt[String]("version") action { (x, c) =>
+                c.copy(template = c.template.copy(version = Some(x)))
+              },
               opt[String]("name") action { (x, c) =>
                 c.copy(template = c.template.copy(name = Some(x)))
               },
@@ -542,6 +612,9 @@
           },
           opt[String]("format") action { (x, c) =>
             c.copy(export = c.export.copy(format = x))
+          },
+          opt[String]("channel") action { (x, c) =>
+            c.copy(export = c.export.copy(channel = Some(x)))
           }
         )
       cmd("import").
@@ -553,6 +626,9 @@
           },
           opt[String]("input") required() action { (x, c) =>
             c.copy(imprt = c.imprt.copy(inputPath = x))
+          },
+          opt[String]("channel") action { (x, c) =>
+            c.copy(imprt = c.imprt.copy(channel = Some(x)))
           }
         )
     }
@@ -608,6 +684,9 @@
         case Seq("eventserver") =>
           eventserver(ca)
           0
+        case Seq("adminserver") =>
+          adminserver(ca)
+          0
         case Seq("run") =>
           generateManifestJson(ca.common.manifestJson)
           run(ca)
@@ -620,10 +699,16 @@
           App.create(ca)
         case Seq("app", "list") =>
           App.list(ca)
+        case Seq("app", "show") =>
+          App.show(ca)
         case Seq("app", "delete") =>
           App.delete(ca)
         case Seq("app", "data-delete") =>
           App.dataDelete(ca)
+        case Seq("app", "channel-new") =>
+          App.channelNew(ca)
+        case Seq("app", "channel-delete") =>
+          App.channelDelete(ca)
         case Seq("accesskey", "new") =>
           AccessKey.create(ca)
         case Seq("accesskey", "list") =>
@@ -673,6 +758,7 @@
     "train" -> txt.train().toString,
     "deploy" -> txt.deploy().toString,
     "eventserver" -> txt.eventserver().toString,
+    "adminserver" -> txt.adminserver().toString,
     "app" -> txt.app().toString,
     "accesskey" -> txt.accesskey().toString,
     "import" -> txt.imprt().toString,
@@ -684,6 +770,7 @@
   def version(ca: ConsoleArgs): Unit = println(BuildInfo.version)
 
   def build(ca: ConsoleArgs): Int = {
+    Template.verifyTemplateMinVersion(new File("template.json"))
     compile(ca)
     info("Looking for an engine...")
     val jarFiles = jarFilesForScala
@@ -718,6 +805,7 @@
   }
 
   def train(ca: ConsoleArgs): Int = {
+    Template.verifyTemplateMinVersion(new File("template.json"))
     withRegisteredManifest(
       ca.common.manifestJson,
       ca.common.engineId,
@@ -740,6 +828,7 @@
   }
 
   def deploy(ca: ConsoleArgs): Int = {
+    Template.verifyTemplateMinVersion(new File("template.json"))
     withRegisteredManifest(
       ca.common.manifestJson,
       ca.common.engineId,
@@ -795,6 +884,15 @@
       stats = ca.eventServer.stats))
   }
 
+  def adminserver(ca: ConsoleArgs): Unit = {
+    info(
+      s"Creating Admin Server at ${ca.adminServer.ip}:${ca.adminServer.port}")
+    AdminServer.createAdminServer(AdminServerConfig(
+      ip = ca.adminServer.ip,
+      port = ca.adminServer.port
+    ))
+  }
+
   def undeploy(ca: ConsoleArgs): Int = {
     val serverUrl = s"http://${ca.deploy.ip}:${ca.deploy.port}"
     info(
@@ -941,7 +1039,7 @@
     if (new File(s"${sparkHome}/bin/spark-submit").exists) {
       println(s"Apache Spark")
       println(s"  Installed at: ${sparkHome}")
-      val sparkMinVersion = "1.2.0"
+      val sparkMinVersion = "1.3.0"
       val sparkReleaseFile = new File(s"${sparkHome}/RELEASE")
       if (sparkReleaseFile.exists) {
         val sparkReleaseStrings =
diff --git a/tools/src/main/scala/io/prediction/tools/console/Export.scala b/tools/src/main/scala/io/prediction/tools/console/Export.scala
index 45f07c1..ab04a5e 100644
--- a/tools/src/main/scala/io/prediction/tools/console/Export.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/Export.scala
@@ -21,11 +21,14 @@
 
 case class ExportArgs(
   appId: Int = 0,
+  channel: Option[String] = None,
   outputPath: String = "",
   format: String = "json")
 
 object Export {
   def eventsToFile(ca: ConsoleArgs, core: File): Int = {
+    val channelArg = ca.export.channel
+      .map(ch => Seq("--channel", ch)).getOrElse(Seq())
     Runner.runOnSpark(
       "io.prediction.tools.export.EventsToFile",
       Seq(
@@ -34,7 +37,7 @@
         "--output",
         ca.export.outputPath,
         "--format",
-        ca.export.format),
+        ca.export.format) ++ channelArg,
       ca,
       core)
   }
diff --git a/tools/src/main/scala/io/prediction/tools/console/Import.scala b/tools/src/main/scala/io/prediction/tools/console/Import.scala
index 419c698..c673e6a 100644
--- a/tools/src/main/scala/io/prediction/tools/console/Import.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/Import.scala
@@ -21,17 +21,20 @@
 
 case class ImportArgs(
   appId: Int = 0,
+  channel: Option[String] = None,
   inputPath: String = "")
 
 object Import {
   def fileToEvents(ca: ConsoleArgs, core: File): Int = {
+    val channelArg = ca.imprt.channel
+      .map(ch => Seq("--channel", ch)).getOrElse(Seq())
     Runner.runOnSpark(
       "io.prediction.tools.imprt.FileToEvents",
       Seq(
         "--appid",
         ca.imprt.appId.toString,
         "--input",
-        ca.imprt.inputPath),
+        ca.imprt.inputPath) ++ channelArg,
       ca,
       core)
   }
diff --git a/tools/src/main/scala/io/prediction/tools/console/Template.scala b/tools/src/main/scala/io/prediction/tools/console/Template.scala
index f82d039..4a293df 100644
--- a/tools/src/main/scala/io/prediction/tools/console/Template.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/Template.scala
@@ -15,18 +15,6 @@
 
 package io.prediction.tools.console
 
-import io.prediction.controller.Utils
-
-import grizzled.slf4j.Logging
-import org.apache.commons.io.FileUtils
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.{ read, write }
-import scalaj.http._
-
-import scala.io.Source
-import scala.sys.process._
-
 import java.io.BufferedInputStream
 import java.io.BufferedOutputStream
 import java.io.File
@@ -36,9 +24,24 @@
 import java.net.URI
 import java.util.zip.ZipInputStream
 
+import grizzled.slf4j.Logging
+import io.prediction.controller.Utils
+import io.prediction.core.BuildInfo
+import org.apache.commons.io.FileUtils
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+import semverfi._
+
+import scala.io.Source
+import scala.sys.process._
+import scalaj.http._
+
 case class TemplateArgs(
   directory: String = "",
   repository: String = "",
+  version: Option[String] = None,
   name: Option[String] = None,
   packageName: Option[String] = None,
   email: Option[String] = None)
@@ -60,9 +63,34 @@
 case class TemplateEntry(
   repo: String)
 
+case class TemplateMetaData(
+  pioVersionMin: Option[String] = None)
+
 object Template extends Logging {
   implicit val formats = Utils.json4sDefaultFormats
 
+  def templateMetaData(templateJson: File): TemplateMetaData = {
+    if (!templateJson.exists) {
+      warn(s"$templateJson does not exist. Template metadata will not be available. " +
+        "(This is safe to ignore if you are not working on a template.)")
+      TemplateMetaData()
+    } else {
+      val jsonString = Source.fromFile(templateJson)(scala.io.Codec.ISO8859).mkString
+      val json = try {
+        parse(jsonString)
+      } catch {
+        case e: org.json4s.ParserUtil.ParseException =>
+          warn(s"$templateJson cannot be parsed. Template metadata will not be available.")
+          return TemplateMetaData()
+      }
+      val pioVersionMin = json \ "pio" \ "version" \ "min"
+      pioVersionMin match {
+        case JString(s) => TemplateMetaData(pioVersionMin = Some(s))
+        case _ => TemplateMetaData()
+      }
+    }
+  }
+
   /** Creates a wrapper that provides the functionality of scalaj.http.Http()
     * with automatic proxy settings handling. The proxy settings will first
     * come from "git" followed by system properties "http.proxyHost" and
@@ -172,10 +200,10 @@
     try {
       val templatesJson = Source.fromURL(templatesUrl).mkString("")
       val templates = read[List[TemplateEntry]](templatesJson)
-      println("The following is a list of template IDs officially recognized " +
-        "by PredictionIO:")
+      println("The following is a list of template IDs registered on " +
+        "PredictionIO Template Gallery:")
       println()
-      templates.foreach { template =>
+      templates.sortBy(_.repo.toLowerCase).foreach { template =>
         println(template.repo)
       }
       println()
@@ -263,113 +291,137 @@
     println(s"Retrieving ${ca.template.repository}")
     val tags = read[List[GitHubTag]](repo.body)
     println(s"There are ${tags.size} tags")
-    tags.headOption.map { tag =>
-      println(s"Using the most recent tag ${tag.name}")
-      val url =
-        s"https://github.com/${ca.template.repository}/archive/${tag.name}.zip"
-      println(s"Going to download $url")
-      val trial = try {
-        httpOptionalProxy(url).asBytes
-      } catch {
-        case e: ConnectException =>
-          githubConnectErrorMessage(e)
-          return 1
-      }
-      val finalTrial = try {
-        trial.location.map { loc =>
-          println(s"Redirecting to $loc")
-          httpOptionalProxy(loc).asBytes
-        } getOrElse trial
-      } catch {
-        case e: ConnectException =>
-          githubConnectErrorMessage(e)
-          return 1
-      }
-      val zipFilename =
-        s"${ca.template.repository.replace('/', '-')}-${tag.name}.zip"
-      FileUtils.writeByteArrayToFile(
-        new File(zipFilename),
-        finalTrial.body)
-      val zis = new ZipInputStream(
-        new BufferedInputStream(new FileInputStream(zipFilename)))
-      val bufferSize = 4096
-      val filesToModify = collection.mutable.ListBuffer[String]()
-      var ze = zis.getNextEntry
-      while (ze != null) {
-        val filenameSegments = ze.getName.split(File.separatorChar)
-        val destFilename = (ca.template.directory +: filenameSegments.tail).
-          mkString(File.separator)
-        if (ze.isDirectory) {
-          new File(destFilename).mkdirs
-        } else {
-          val os = new BufferedOutputStream(
-            new FileOutputStream(destFilename),
-            bufferSize)
-          val data = Array.ofDim[Byte](bufferSize)
-          var count = zis.read(data, 0, bufferSize)
-          while (count != -1) {
-            os.write(data, 0, count)
-            count = zis.read(data, 0, bufferSize)
-          }
-          os.flush()
-          os.close()
 
-          val nameOnly = new File(destFilename).getName
-
-          if (organization != "" &&
-            (nameOnly.endsWith(".scala") ||
-              nameOnly == "build.sbt" ||
-              nameOnly == "engine.json")) {
-            filesToModify += destFilename
-          }
-        }
-        ze = zis.getNextEntry
-      }
-      zis.close()
-      new File(zipFilename).delete
-
-      val engineJsonFile =
-        new File(ca.template.directory + File.separator + "engine.json")
-
-      val engineJson = try {
-        Some(parse(Source.fromFile(engineJsonFile).mkString))
-      } catch {
-        case e: java.io.IOException =>
-          error("Unable to read engine.json. Skipping automatic package " +
-            "name replacement.")
-          None
-        case e: MappingException =>
-          error("Unable to parse engine.json. Skipping automatic package " +
-            "name replacement.")
-          None
-      }
-
-      val engineFactory = engineJson.map { ej =>
-        (ej \ "engineFactory").extractOpt[String]
-      } getOrElse None
-
-      engineFactory.map { ef =>
-        val pkgName = ef.split('.').dropRight(1).mkString(".")
-        println(s"Replacing $pkgName with $organization...")
-
-        filesToModify.foreach { ftm =>
-          println(s"Processing $ftm...")
-          val fileContent = Source.fromFile(ftm).getLines()
-          val processedLines =
-            fileContent.map(_.replaceAllLiterally(pkgName, organization))
-          FileUtils.writeStringToFile(
-            new File(ftm),
-            processedLines.mkString("\n"))
-        }
-      } getOrElse {
-        error("engineFactory is not found in engine.json. Skipping automatic " +
-          "package name replacement.")
-      }
-
-      println(s"Engine template ${ca.template.repository} is now ready at " +
-        ca.template.directory)
+    if (tags.size == 0) {
+      println(s"${ca.template.repository} does not have any tag. Aborting.")
+      return 1
     }
 
+    val tag = ca.template.version.map { v =>
+      tags.find(_.name == v).getOrElse {
+        println(s"${ca.template.repository} does not have tag $v. Aborting.")
+        return 1
+      }
+    } getOrElse tags.head
+
+    println(s"Using tag ${tag.name}")
+    val url =
+      s"https://github.com/${ca.template.repository}/archive/${tag.name}.zip"
+    println(s"Going to download $url")
+    val trial = try {
+      httpOptionalProxy(url).asBytes
+    } catch {
+      case e: ConnectException =>
+        githubConnectErrorMessage(e)
+        return 1
+    }
+    val finalTrial = try {
+      trial.location.map { loc =>
+        println(s"Redirecting to $loc")
+        httpOptionalProxy(loc).asBytes
+      } getOrElse trial
+    } catch {
+      case e: ConnectException =>
+        githubConnectErrorMessage(e)
+        return 1
+    }
+    val zipFilename =
+      s"${ca.template.repository.replace('/', '-')}-${tag.name}.zip"
+    FileUtils.writeByteArrayToFile(
+      new File(zipFilename),
+      finalTrial.body)
+    val zis = new ZipInputStream(
+      new BufferedInputStream(new FileInputStream(zipFilename)))
+    val bufferSize = 4096
+    val filesToModify = collection.mutable.ListBuffer[String]()
+    var ze = zis.getNextEntry
+    while (ze != null) {
+      val filenameSegments = ze.getName.split(File.separatorChar)
+      val destFilename = (ca.template.directory +: filenameSegments.tail).
+        mkString(File.separator)
+      if (ze.isDirectory) {
+        new File(destFilename).mkdirs
+      } else {
+        val os = new BufferedOutputStream(
+          new FileOutputStream(destFilename),
+          bufferSize)
+        val data = Array.ofDim[Byte](bufferSize)
+        var count = zis.read(data, 0, bufferSize)
+        while (count != -1) {
+          os.write(data, 0, count)
+          count = zis.read(data, 0, bufferSize)
+        }
+        os.flush()
+        os.close()
+
+        val nameOnly = new File(destFilename).getName
+
+        if (organization != "" &&
+          (nameOnly.endsWith(".scala") ||
+            nameOnly == "build.sbt" ||
+            nameOnly == "engine.json")) {
+          filesToModify += destFilename
+        }
+      }
+      ze = zis.getNextEntry
+    }
+    zis.close()
+    new File(zipFilename).delete
+
+    val engineJsonFile =
+      new File(ca.template.directory, "engine.json")
+
+    val engineJson = try {
+      Some(parse(Source.fromFile(engineJsonFile).mkString))
+    } catch {
+      case e: java.io.IOException =>
+        error("Unable to read engine.json. Skipping automatic package " +
+          "name replacement.")
+        None
+      case e: MappingException =>
+        error("Unable to parse engine.json. Skipping automatic package " +
+          "name replacement.")
+        None
+    }
+
+    val engineFactory = engineJson.map { ej =>
+      (ej \ "engineFactory").extractOpt[String]
+    } getOrElse None
+
+    engineFactory.map { ef =>
+      val pkgName = ef.split('.').dropRight(1).mkString(".")
+      println(s"Replacing $pkgName with $organization...")
+
+      filesToModify.foreach { ftm =>
+        println(s"Processing $ftm...")
+        val fileContent = Source.fromFile(ftm).getLines()
+        val processedLines =
+          fileContent.map(_.replaceAllLiterally(pkgName, organization))
+        FileUtils.writeStringToFile(
+          new File(ftm),
+          processedLines.mkString("\n"))
+      }
+    } getOrElse {
+      error("engineFactory is not found in engine.json. Skipping automatic " +
+        "package name replacement.")
+    }
+
+    verifyTemplateMinVersion(new File(ca.template.directory, "template.json"))
+
+    println(s"Engine template ${ca.template.repository} is now ready at " +
+      ca.template.directory)
+
     0
   }
+
+  def verifyTemplateMinVersion(templateJsonFile: File): Unit = {
+    val metadata = templateMetaData(templateJsonFile)
+
+    metadata.pioVersionMin.foreach { pvm =>
+      if (Version(BuildInfo.version) < Version(pvm))
+        warn(s"This engine template requires at least PredictionIO $pvm. " +
+          s"The template may not work with PredictionIO ${BuildInfo.version}.")
+    }
+  }
+
 }
diff --git a/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala b/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala
index a8fa746..743d57a 100644
--- a/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala
+++ b/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala
@@ -30,6 +30,7 @@
   env: String = "",
   logFile: String = "",
   appId: Int = 0,
+  channel: Option[String] = None,
   outputPath: String = "",
   format: String = "parquet",
   verbose: Boolean = false,
@@ -47,6 +48,9 @@
       opt[Int]("appid") action { (x, c) =>
         c.copy(appId = x)
       }
+      opt[String]("channel") action { (x, c) =>
+        c.copy(channel = Some(x))
+      }
       opt[String]("format") action { (x, c) =>
         c.copy(format = x)
       }
@@ -61,24 +65,40 @@
       }
     }
     parser.parse(args, EventsToFileArgs()) map { args =>
+      // get channelId
+      val channels = Storage.getMetaDataChannels
+      val channelMap = channels.getByAppid(args.appId).map(c => (c.name, c.id)).toMap
+
+      val channelId: Option[Int] = args.channel.map { ch =>
+        if (!channelMap.contains(ch)) {
+          error(s"Channel ${ch} doesn't exist in this app.")
+          sys.exit(1)
+        }
+
+        channelMap(ch)
+      }
+
+      val channelStr = args.channel.map(n => " Channel " + n).getOrElse("")
+
       WorkflowUtils.modifyLogging(verbose = args.verbose)
       @transient lazy implicit val formats = Utils.json4sDefaultFormats +
         new EventJson4sSupport.APISerializer
       val sc = WorkflowContext(
         mode = "Export",
-        batch = "App ID " + args.appId,
+        batch = "App ID " + args.appId + channelStr,
         executorEnv = Runner.envStringToMap(args.env))
       val sqlContext = new SQLContext(sc)
       val events = Storage.getPEvents()
-      val eventsRdd = events.find(appId = args.appId)(sc)
+      val eventsRdd = events.find(appId = args.appId, channelId = channelId)(sc)
       val jsonStringRdd = eventsRdd.map(write(_))
       if (args.format == "json") {
         jsonStringRdd.saveAsTextFile(args.outputPath)
       } else {
         val jsonRdd = sqlContext.jsonRDD(jsonStringRdd)
-        info(jsonRdd.schemaString)
         jsonRdd.saveAsParquetFile(args.outputPath)
       }
+      info(s"Events are exported to ${args.outputPath}/.")
+      info("Done.")
     }
   }
 }
diff --git a/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala b/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
index 5f346bb..f3c98d6 100644
--- a/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
+++ b/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
@@ -30,6 +30,7 @@
   env: String = "",
   logFile: String = "",
   appId: Int = 0,
+  channel: Option[String] = None,
   inputPath: String = "",
   verbose: Boolean = false,
   debug: Boolean = false)
@@ -46,6 +47,9 @@
       opt[Int]("appid") action { (x, c) =>
         c.copy(appId = x)
       }
+      opt[String]("channel") action { (x, c) =>
+        c.copy(channel = Some(x))
+      }
       opt[String]("input") action { (x, c) =>
         c.copy(inputPath = x)
       }
@@ -57,16 +61,35 @@
       }
     }
     parser.parse(args, FileToEventsArgs()) map { args =>
+      // get channelId
+      val channels = Storage.getMetaDataChannels
+      val channelMap = channels.getByAppid(args.appId).map(c => (c.name, c.id)).toMap
+
+      val channelId: Option[Int] = args.channel.map { ch =>
+        if (!channelMap.contains(ch)) {
+          error(s"Channel ${ch} doesn't exist in this app.")
+          sys.exit(1)
+        }
+
+        channelMap(ch)
+      }
+
+      val channelStr = args.channel.map(n => " Channel " + n).getOrElse("")
+
       WorkflowUtils.modifyLogging(verbose = args.verbose)
       @transient lazy implicit val formats = Utils.json4sDefaultFormats +
         new EventJson4sSupport.APISerializer
       val sc = WorkflowContext(
         mode = "Import",
-        batch = "App ID " + args.appId,
+        batch = "App ID " + args.appId + channelStr,
         executorEnv = Runner.envStringToMap(args.env))
       val rdd = sc.textFile(args.inputPath)
       val events = Storage.getPEvents()
-      events.write(rdd.map(read[Event](_)), args.appId)(sc)
+      events.write(events = rdd.map(read[Event](_)),
+        appId = args.appId,
+        channelId = channelId)(sc)
+      info("Events are imported.")
+      info("Done.")
     }
   }
 }
diff --git a/tools/src/main/twirl/io/prediction/tools/console/adminserver.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/adminserver.scala.txt
new file mode 100644
index 0000000..4ec0237
--- /dev/null
+++ b/tools/src/main/twirl/io/prediction/tools/console/adminserver.scala.txt
@@ -0,0 +1,6 @@
+(Experimental Only!) Usage: pio adminserver [--ip <value>] [--port <value>]
+
+  --ip <value>
+      IP to bind to. Default: localhost
+  --port <value>
+      Port to bind to. Default: 7071
diff --git a/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
index 045c298..1a1ec32 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
@@ -9,10 +9,20 @@
   <name>
       App name.
 
+
 Usage: pio app list
 
 List all apps.
 
+
+Usage: pio app show <name>
+
+Show details of an app.
+
+  <name>
+      App name.
+
+
 Usage: pio app delete <name>
 
 Name of the app to be deleted.
@@ -20,9 +30,36 @@
   <name>
       App name.
 
-Usage: pio app data-delete <name>
+
+Usage: pio app data-delete <name> [--channel <name>] [--all]
 
 Delete data of an app.
 
   <name>
       App name.
+  --channel <name>
+      Delete data of the specified channel (default channel if not specified)
+  --all
+      Delete all data of this app (including both default and all channels)
+
+
+Usage: pio app channel-new <name> <channel>
+
+Create a new channel for the app.
+
+  <name>
+      App name.
+
+  <channel>
+      Channel name to be created.
+
+
+Usage: pio app channel-delete <name> <channel>
+
+Delete a channel for the app.
+
+  <name>
+      App name.
+
+  <channel>
+      Channel name to be deleted.
diff --git a/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt
index fa95e47..c188d6f 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt
@@ -8,7 +8,7 @@
 pass-through arguments to its underlying spark-submit command.
 
   --ip <value>
-      IP to bind to. Default: localhost
+      IP to bind to. Default: 0.0.0.0
   --port <value>
       Port to bind to. Default: 8000
   --engine-instance-id <value>
@@ -18,7 +18,7 @@
   --accesskey <value>
       Access key of the App where feedback data will be stored.
   --event-server-ip <value>
-      Event server IP. Default: localhost
+      Event server IP. Default: 0.0.0.0
   --event-server-port <value>
       Event server port. Default: 7070
   --batch <value>
diff --git a/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt
index 2491c54..beda337 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt
@@ -1,7 +1,7 @@
 Usage: pio eventserver [--ip <value>] [--port <value>] [--stats]
 
   --ip <value>
-      IP to bind to. Default: localhost
+      IP to bind to. Default: 0.0.0.0
   --port <value>
       Port to bind to. Default: 7070
   --stats
diff --git a/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt
index c1ba460..28eb665 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt
@@ -1,12 +1,14 @@
-Usage: pio export --appid <value> --output <value> [--format <value>]
+Usage: pio export --appid <value> --output <value> [--format <value>] [--channel <value>]
 
 Exports all events of an app to a file. If Hadoop configuration is present, the
 file will be exported to HDFS instead of local filesystem.
 
   --appid <value>
       App ID of events to be exported.
+  --channel <value>
+      Channel Name (default if this is not specified)
   --output <value>
       Output path of the exported file.
   --format <value>
       The format of the exported file. Valid values are "json" and "parquet".
-      The default format is "json". 
+      The default format is "json".
diff --git a/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt
index 3cd0475..245d749 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt
@@ -1,4 +1,4 @@
-Usage: pio import --appid <value> --input <value>
+Usage: pio import --appid <value> --input <value> [--channel <value>]
 
 Imports all events from a file to an app. Each line of the file should be a JSON
 object that represent a single event. If Hadoop configuration is present, the
@@ -6,5 +6,7 @@
 
   --appid <value>
       App ID of events to be imported.
+  --channel <value>
+      Channel Name (default if this is not specified)
   --input <value>
       Input path of the import file.
diff --git a/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt
index 370822c..a97ecb3 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt
@@ -47,5 +47,6 @@
     run           Launch a driver program
     eval          Kick off an evaluation using an engine
     dashboard     Launch an evaluation dashboard
+    adminserver   Launch an Admin Server
 
 See 'pio help <command>' to read about a specific subcommand.
diff --git a/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt
index 1648b16..f97c8ce 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt
@@ -3,6 +3,7 @@
 Retrieves a list of available template IDs.
 
 Usage: pio template get <template ID> <new engine directory>
+                        [--version <version>]
                         [--name <value>] [--package <value>] [--email <value>]
 
 Seeds a directory with an engine template.
@@ -11,6 +12,9 @@
       Engine template ID.
   <new engine directory>
       Location of the new engine.
+  --version <value>
+      The template version to get. By default, the most recently tagged version
+      will be downloaded.
   --name <value>
       Name of the author of the new engine.
   --package <value>
diff --git a/tools/src/test/scala/io/prediction/tools/admin/AdminAPISpec.scala b/tools/src/test/scala/io/prediction/tools/admin/AdminAPISpec.scala
new file mode 100644
index 0000000..93af23f
--- /dev/null
+++ b/tools/src/test/scala/io/prediction/tools/admin/AdminAPISpec.scala
@@ -0,0 +1,66 @@
+package io.prediction.tools.admin
+
+import akka.actor.{ActorSystem, Props}
+import akka.testkit.TestProbe
+import io.prediction.data.storage.Storage
+import org.specs2.mutable.Specification
+import spray.http._
+import spray.httpx.RequestBuilding._
+import spray.util._
+
+
+class AdminAPISpec extends Specification{
+
+  val system = ActorSystem(Utils.actorSystemNameFrom(getClass))
+  val config = AdminServerConfig(
+    ip = "localhost",
+    port = 7071)
+
+  val commandClient = new CommandClient(
+    appClient = Storage.getMetaDataApps,
+    accessKeyClient = Storage.getMetaDataAccessKeys,
+    eventClient = Storage.getLEvents()
+  )
+
+  val adminActor= system.actorOf(Props(classOf[AdminServiceActor], commandClient))
+
+  "GET / request" should {
+    "properly produce OK HttpResponses" in {
+      val probe = TestProbe()(system)
+      probe.send(adminActor, Get("/"))
+
+      probe.expectMsg(
+        HttpResponse(
+          200,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"status":"alive"}"""
+          )
+        )
+      )
+      success
+    }
+  }
+
+  "GET /cmd/app request" should {
+    "properly produce OK HttpResponses" in {
+      /*
+      val probe = TestProbe()(system)
+      probe.send(adminActor,Get("/cmd/app"))
+
+      //TODO: Need to convert the response string to the corresponding case object to assert some properties on the object
+      probe.expectMsg(
+        HttpResponse(
+          200,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"status":1}"""
+          )
+        )
+      )*/
+      pending
+    }
+  }
+
+  step(system.shutdown())
+}