Merge branch 'livedoc' into develop
diff --git a/.travis.yml b/.travis.yml
index 06c6b9a..ecc13f4 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,44 +1,73 @@
+##########
+# This is .travis.yml configuration file specifically for master and develop branch.
+# The travis job should contains only unit and integration tests.
+#
+# To avoid this file from being overwritten by .travis.yml from other branches,
+# please add the following to your local git config:
+#   git config merge.ours.driver true
+##########
+
 branches:
   only:
     - master
-    - livedoc
+    - develop
 
-language: ruby
-rvm:
-  - 2.2.1
+language: scala
 
-gemfile: docs/manual/Gemfile
+scala:
+  - 2.10.5
+
+jdk:
+  - oraclejdk8
+
+addons:
+    postgresql: "9.3"
 
 sudo: false
 
+env:
+  global:
+    - PIO_HOME=`pwd`
+
+before_script:
+# Download spark, hbase
+  - mkdir vendors
+  - wget http://d3kbcqa49mib13.cloudfront.net/spark-1.3.0-bin-hadoop2.4.tgz
+  - tar zxfC spark-1.3.0-bin-hadoop2.4.tgz vendors
+  - wget http://archive.apache.org/dist/hbase/hbase-1.0.0/hbase-1.0.0-bin.tar.gz
+  - tar zxfC hbase-1.0.0-bin.tar.gz vendors
+
+# Prepare pio environment variables
+  - set -a
+  - source conf/pio-env.sh.travis
+  - set +a
+
+# Create postgres database for PredictionIO
+  - psql -c 'create database predictionio;' -U postgres
+  - ./bin/travis/pio-start-travis
+
 script:
-  - cd docs/manual
-  - bundle exec middleman build
+# Run stylecheck
+  - sbt scalastyle
+
+# Run all unit tests
+  - sbt test
+
+after_script:
+  - ./bin/travis/pio-stop-travis
+
 deploy:
   - provider: s3
     access_key_id:
       secure: "PxUW6LxwsJ2UlakxsPtkgIwbE949QyJbDF31IdjDIVX5H7KLWA0xkpECj+DXW7/lODsuGLxi02w4Y+KE0Ujo27ovnQAu2F1D6NDGn+D/JIu/wqjRJ4OOOg65j/06zyj5jzWMckLh6wYKABMiInsWiXtr4ehLS6pHVyJVXaLuBPE="
     secret_access_key:
       secure: "L3TXxQJZutXrXROf89dTiMuvcnsQ88F+cSqpddhafy4O3agyTMyJcHozizCaKI+VuJVZnUrip/joo4b85PSC8xjU4G7/lOIAoiSyQZU/f4RqUTFyl9ppTHQq2CyZKZy1qhqmBegRcWTy6TLV3JNXzKq+0fgl6/HekUMBuNX98yM="
-    bucket: docs.prediction.io
-    region: us-east-1
-    skip_cleanup: true
-    endpoint: docs.prediction.io.s3-website-us-east-1.amazonaws.com
-    local-dir: build
-    detect_encoding: true
-    on:
-      branch: livedoc
-  - provider: s3
-    access_key_id:
-      secure: "PxUW6LxwsJ2UlakxsPtkgIwbE949QyJbDF31IdjDIVX5H7KLWA0xkpECj+DXW7/lODsuGLxi02w4Y+KE0Ujo27ovnQAu2F1D6NDGn+D/JIu/wqjRJ4OOOg65j/06zyj5jzWMckLh6wYKABMiInsWiXtr4ehLS6pHVyJVXaLuBPE="
-    secret_access_key:
-      secure: "L3TXxQJZutXrXROf89dTiMuvcnsQ88F+cSqpddhafy4O3agyTMyJcHozizCaKI+VuJVZnUrip/joo4b85PSC8xjU4G7/lOIAoiSyQZU/f4RqUTFyl9ppTHQq2CyZKZy1qhqmBegRcWTy6TLV3JNXzKq+0fgl6/HekUMBuNX98yM="
     bucket: install.prediction.io
     region: us-east-1
     skip_cleanup: true
     acl: public_read
     endpoint: install.prediction.io.s3-website-us-east-1.amazonaws.com
-    local-dir: ../../bin
+    local-dir: bin
     detect_encoding: true
     on:
       branch: master
diff --git a/RELEASE.md b/RELEASE.md
index 7a74cb2..65470af 100644
--- a/RELEASE.md
+++ b/RELEASE.md
@@ -2,9 +2,17 @@
 
 **Note:** For upgrade instructions please refer to [this page](/resources/upgrade/).
 
+###v0.9.6
+
+November, 2015 | [Release Notes](https://github.com/PredictionIO/PredictionIO/blob/master/RELEASE.md) have been moved to Github and you are reading them. For a detailed list of commits check [this page](https://github.com/PredictionIO/PredictionIO/commits/master)
+
+- Upgrade components for install/runtime to Hbase 1, Spark 1.5.2 PIO still runs on older HBase and Spark back to 1.3.1, upgrading install of Elaticsearch to 1.5.2 since pio run well on it but also runs on older versions.
+- Support for maintaining a moving window of events by discarding old events from the EventStore
+- Support for doing a deploy without creating a Spark Context
+
 ###v0.9.5 
 
-October 14th, 2015 | [Release Notes](https://github.com/PredictionIO/PredictionIO/blob/master/RELEASE.md) have been moved to Github and you are reading them. For a detailed list of commits check [this page](https://github.com/PredictionIO/PredictionIO/commits/master)
+October 14th, 2015 | [Release Notes](https://github.com/PredictionIO/PredictionIO/blob/master/RELEASE.md) have been moved to Github and you are reading them. For a detailed list of commits check [this page](https://github.com/PredictionIO/PredictionIO/commits/v0.9.5)
 
 - Support batches of events sent to the EventServer as json arrays
 - Support creating an Elasticsearch StorageClient created for an Elasticsearch cluster from variables in pio-env.sh
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh
index 8ea9eba..fbf65fe 100755
--- a/bin/compute-classpath.sh
+++ b/bin/compute-classpath.sh
@@ -60,5 +60,12 @@
 if [ -n "$ES_CONF_DIR" ]; then
   CLASSPATH="$CLASSPATH:$ES_CONF_DIR"
 fi
+if [ -n "$POSTGRES_JDBC_DRIVER" ]; then
+  CLASSPATH="$CLASSPATH:$POSTGRES_JDBC_DRIVER"
+fi
+if [ -n "$MYSQL_JDBC_DRIVER" ]; then
+  CLASSPATH="$CLASSPATH:$MYSQL_JDBC_DRIVER"
+fi
+
 
 echo "$CLASSPATH"
diff --git a/bin/install.sh b/bin/install.sh
index 0fdd388..61febe0 100755
--- a/bin/install.sh
+++ b/bin/install.sh
@@ -9,10 +9,13 @@
 # License: http://www.apache.org/licenses/LICENSE-2.0
 
 OS=`uname`
-PIO_VERSION=0.9.5
-SPARK_VERSION=1.5.1
-ELASTICSEARCH_VERSION=1.4.4
-HBASE_VERSION=1.0.0
+PIO_VERSION=0.9.6
+SPARK_VERSION=1.6.0
+# Looks like support for Elasticsearch 2.0 will require 2.0 so deferring
+ELASTICSEARCH_VERSION=1.7.3
+HBASE_VERSION=1.1.2
+POSTGRES_VERSION=9.4-1204.jdbc41
+MYSQL_VERSION=5.1.37
 PIO_DIR=$HOME/PredictionIO
 USER_PROFILE=$HOME/.profile
 PIO_FILE=PredictionIO-${PIO_VERSION}.tar.gz
@@ -71,7 +74,6 @@
   echo -e "\033[1;33mForcing Docker defaults!\033[0m"
   pio_dir=${PIO_DIR}
   vendors_dir=${pio_dir}/vendors
-  source_setup=${ES_HB}
 
   spark_dir=${vendors_dir}/spark-${SPARK_VERSION}
   elasticsearch_dir=${vendors_dir}/elasticsearch-${ELASTICSEARCH_VERSION}
@@ -172,7 +174,7 @@
       fi
       email=${email:-$guess_email}
 
-      url="http://direct.prediction.io/$PIO_VERSION/install.json/install/install/$email/"
+      url="https://direct.prediction.io/$PIO_VERSION/install.json/install/install/$email/"
       curl --silent ${url} > /dev/null
     fi
 
@@ -239,7 +241,7 @@
         break
         ;;
       "$DISTRO_OTHER")
-        echo -e "\033[1;31mYour disribution not yet supported for automatic install :(\033[0m"
+        echo -e "\033[1;31mYour distribution not yet supported for automatic install :(\033[0m"
         echo -e "\033[1;31mPlease install Java manually!\033[0m"
         exit 2
         ;;
@@ -274,14 +276,6 @@
 # PredictionIO
 echo -e "\033[1;36mStarting PredictionIO setup in:\033[0m $pio_dir"
 cd ${TEMP_DIR}
-
-# delete existing tmp file before download again
-if [[ -e  ${PIO_FILE} ]]; then
-  if confirm "Delete existing $PIO_FILE?"; then
-    rm ${PIO_FILE}
-  fi
-fi
-
 if [[ ! -e ${PIO_FILE} ]]; then
   echo "Downloading PredictionIO..."
   curl -O https://d8k1yxp8elc6b.cloudfront.net/${PIO_FILE}
@@ -304,11 +298,6 @@
 
 # Spark
 echo -e "\033[1;36mStarting Spark setup in:\033[0m $spark_dir"
-if [[ -e spark-${SPARK_VERSION}-bin-hadoop2.6.tgz ]]; then
-  if confirm "Delete existing spark-$SPARK_VERSION-bin-hadoop2.6.tgz?"; then
-    rm spark-${SPARK_VERSION}-bin-hadoop2.6.tgz
-  fi
-fi
 if [[ ! -e spark-${SPARK_VERSION}-bin-hadoop2.6.tgz ]]; then
   echo "Downloading Spark..."
   curl -O http://d3kbcqa49mib13.cloudfront.net/spark-${SPARK_VERSION}-bin-hadoop2.6.tgz
@@ -332,10 +321,12 @@
       sudo -u postgres createuser -P pio
       echo -e "\033[1;36mPlease update $pio_dir/conf/pio-env.sh if you did not enter the default password\033[0m"
     else
-      echo -e "\033[1;31mYour disribution not yet supported for automatic install :(\033[0m"
+      echo -e "\033[1;31mYour distribution not yet supported for automatic install :(\033[0m"
       echo -e "\033[1;31mPlease install PostgreSQL manually!\033[0m"
       exit 3
     fi
+    curl -O https://jdbc.postgresql.org/download/postgresql-${POSTGRES_VERSION}.jar
+    mv postgresql-${POSTGRES_VERSION}.jar ${PIO_DIR}/lib/
     ;;
   "$MYSQL")
     if [[ ${distribution} = "$DISTRO_DEBIAN" ]]; then
@@ -350,19 +341,16 @@
       ${SED_CMD} "s|PIO_STORAGE_SOURCES_PGSQL|# PIO_STORAGE_SOURCES_PGSQL|" ${pio_dir}/conf/pio-env.sh
       ${SED_CMD} "s|# PIO_STORAGE_SOURCES_MYSQL|PIO_STORAGE_SOURCES_MYSQL|" ${pio_dir}/conf/pio-env.sh
     else
-      echo -e "\033[1;31mYour disribution not yet supported for automatic install :(\033[0m"
+      echo -e "\033[1;31mYour distribution not yet supported for automatic install :(\033[0m"
       echo -e "\033[1;31mPlease install MySQL manually!\033[0m"
       exit 4
     fi
+    curl -O http://central.maven.org/maven2/mysql/mysql-connector-java/5.1.37/mysql-connector-java-${MYSQL_VERSION}.jar
+    mv mysql-connector-java-${MYSQL_VERSION}.jar ${PIO_DIR}/lib/
     ;;
   "$ES_HB")
     # Elasticsearch
     echo -e "\033[1;36mStarting Elasticsearch setup in:\033[0m $elasticsearch_dir"
-    if [[ -e elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz ]]; then
-      if confirm "Delete existing elasticsearch-$ELASTICSEARCH_VERSION.tar.gz?"; then
-        rm elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
-      fi
-    fi
     if [[ ! -e elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz ]]; then
       echo "Downloading Elasticsearch..."
       curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
@@ -387,14 +375,9 @@
 
     # HBase
     echo -e "\033[1;36mStarting HBase setup in:\033[0m $hbase_dir"
-    if [[ -e hbase-${HBASE_VERSION}-bin.tar.gz ]]; then
-      if confirm "Delete existing hbase-$HBASE_VERSION-bin.tar.gz?"; then
-        rm hbase-${HBASE_VERSION}-bin.tar.gz
-      fi
-    fi
     if [[ ! -e hbase-${HBASE_VERSION}-bin.tar.gz ]]; then
       echo "Downloading HBase..."
-      curl -O http://archive.apache.org/dist/hbase/hbase-${HBASE_VERSION}/hbase-${HBASE_VERSION}-bin.tar.gz
+      curl -O http://archive.apache.org/dist/hbase/${HBASE_VERSION}/hbase-${HBASE_VERSION}-bin.tar.gz
     fi
     tar zxf hbase-${HBASE_VERSION}-bin.tar.gz
     rm -rf ${hbase_dir}
diff --git a/build.sbt b/build.sbt
index 25ccaa9..08d4e62 100644
--- a/build.sbt
+++ b/build.sbt
@@ -12,13 +12,11 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-import SonatypeKeys._
-
 import UnidocKeys._
 
 name := "pio"
 
-version in ThisBuild := "0.9.5"
+version in ThisBuild := "0.9.6"
 
 organization in ThisBuild := "io.prediction"
 
@@ -37,7 +35,7 @@
 
 json4sVersion in ThisBuild := "3.2.10"
 
-sparkVersion in ThisBuild := "1.3.0"
+sparkVersion in ThisBuild := "1.4.0"
 
 lazy val pioBuildInfoSettings = buildInfoSettings ++ Seq(
   sourceGenerators in Compile <+= buildInfo,
@@ -59,21 +57,18 @@
   e2)
 
 lazy val common = (project in file("common")).
-  settings(sonatypeSettings: _*).
   settings(unmanagedClasspath in Test += conf)
 
 lazy val core = (project in file("core")).
   dependsOn(data).
   settings(genjavadocSettings: _*).
   settings(pioBuildInfoSettings: _*).
-  settings(sonatypeSettings: _*).
   enablePlugins(SbtTwirl).
   settings(unmanagedClasspath in Test += conf)
 
 lazy val data = (project in file("data")).
   dependsOn(common).
   settings(genjavadocSettings: _*).
-  settings(sonatypeSettings: _*).
   settings(unmanagedClasspath in Test += conf)
 
 lazy val tools = (project in file("tools")).
@@ -84,7 +79,6 @@
 
 lazy val e2 = (project in file("e2")).
   settings(genjavadocSettings: _*).
-  settings(sonatypeSettings: _*).
   settings(unmanagedClasspath in Test += conf)
 
 scalaJavaUnidocSettings
@@ -165,7 +159,7 @@
 }
 
 pomExtra in ThisBuild := {
-  <url>http://prediction.io</url>
+  <url>https://prediction.io</url>
   <licenses>
     <license>
       <name>Apache 2</name>
@@ -181,7 +175,15 @@
     <developer>
       <id>pio</id>
       <name>The PredictionIO Team</name>
-      <url>http://prediction.io</url>
+      <url>https://prediction.io</url>
     </developer>
   </developers>
 }
+
+concurrentRestrictions in Global := Seq(
+  Tags.limit(Tags.CPU, 1),
+  Tags.limit(Tags.Network, 1),
+  Tags.limit(Tags.Test, 1),
+  Tags.limitAll( 1 )
+)
+
diff --git a/common/build.sbt b/common/build.sbt
index 2ebc176..9bda9c5 100644
--- a/common/build.sbt
+++ b/common/build.sbt
@@ -13,3 +13,8 @@
 // limitations under the License.
 
 name := "common"
+libraryDependencies ++= Seq(
+  "io.spray"               %% "spray-can"        % "1.3.2",
+  "io.spray"               %% "spray-routing"    % "1.3.2",
+  "org.spark-project.akka" %% "akka-actor"     % "2.3.4-spark"
+)
\ No newline at end of file
diff --git a/common/src/main/resources/application.conf b/common/src/main/resources/application.conf
new file mode 100644
index 0000000..cccab96
--- /dev/null
+++ b/common/src/main/resources/application.conf
@@ -0,0 +1,11 @@
+akka {
+  log-config-on-start = false
+  loggers = ["akka.event.slf4j.Slf4jLogger"]
+  loglevel = "INFO"
+}
+
+spray.can {
+  server {
+    verbose-error-messages = "on"
+  }
+}
diff --git a/common/src/main/scala/io/prediction/authentication/KeyAuthentication.scala b/common/src/main/scala/io/prediction/authentication/KeyAuthentication.scala
new file mode 100644
index 0000000..752b5e1
--- /dev/null
+++ b/common/src/main/scala/io/prediction/authentication/KeyAuthentication.scala
@@ -0,0 +1,55 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.authentication
+
+/**
+  * This is a (very) simple authentication for the dashboard and engine servers
+  * It is highly recommended to implement a stonger authentication mechanism
+  */
+
+import com.typesafe.config.ConfigFactory
+import spray.http.HttpRequest
+import spray.routing.authentication._
+import spray.routing.{AuthenticationFailedRejection, RequestContext}
+
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.Future
+
+trait KeyAuthentication {
+
+  object ServerKey {
+    private val config = ConfigFactory.load("server.conf")
+    val get = config.getString("io.prediction.server.accessKey")
+    val param = "accessKey"
+  }
+
+  def withAccessKeyFromFile: RequestContext => Future[Authentication[HttpRequest]] = {
+    ctx: RequestContext =>
+      val accessKeyParamOpt = ctx.request.uri.query.get(ServerKey.param)
+      Future {
+
+        val passedKey = accessKeyParamOpt.getOrElse {
+          Left(AuthenticationFailedRejection(
+            AuthenticationFailedRejection.CredentialsRejected, List()))
+        }
+
+        if (passedKey.equals(ServerKey.get)) Right(ctx.request)
+        else Left(AuthenticationFailedRejection(
+          AuthenticationFailedRejection.CredentialsRejected, List()))
+
+      }
+  }
+}
diff --git a/common/src/main/scala/io/prediction/configuration/SSLConfiguration.scala b/common/src/main/scala/io/prediction/configuration/SSLConfiguration.scala
new file mode 100644
index 0000000..f784130
--- /dev/null
+++ b/common/src/main/scala/io/prediction/configuration/SSLConfiguration.scala
@@ -0,0 +1,71 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.configuration
+
+/**
+  * Created by ykhodorkovsky on 2/26/16.
+  */
+
+import java.io.FileInputStream
+import java.security.KeyStore
+import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
+
+import com.typesafe.config.ConfigFactory
+import spray.io.ServerSSLEngineProvider
+
+trait SSLConfiguration {
+
+  private val serverConfig = ConfigFactory.load("server.conf")
+
+  private val keyStoreResource =
+    serverConfig.getString("io.prediction.server.ssl-keystore-resource")
+  private val password = serverConfig.getString("io.prediction.server.ssl-keystore-pass")
+  private val keyAlias = serverConfig.getString("io.prediction.server.ssl-key-alias")
+
+  private val keyStore = {
+
+    // Loading keystore from specified file
+    val clientStore = KeyStore.getInstance("JKS")
+    val inputStream = new FileInputStream(
+      getClass().getClassLoader().getResource(keyStoreResource).getFile())
+    clientStore.load(inputStream, password.toCharArray)
+    inputStream.close()
+    clientStore
+  }
+
+  // Creating SSL context
+  implicit def sslContext: SSLContext = {
+    val context = SSLContext.getInstance("TLS")
+    val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
+    val kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
+    kmf.init(keyStore, password.toCharArray)
+    tmf.init(keyStore)
+    context.init(kmf.getKeyManagers, tmf.getTrustManagers, null)
+    context
+  }
+
+  // provide implicit SSLEngine with some protocols
+  implicit def sslEngineProvider: ServerSSLEngineProvider = {
+    ServerSSLEngineProvider { engine =>
+      engine.setEnabledCipherSuites(Array(
+        "TLS_RSA_WITH_AES_256_CBC_SHA",
+        "TLS_ECDH_ECDSA_WITH_RC4_128_SHA",
+        "TLS_RSA_WITH_AES_128_CBC_SHA"))
+      engine.setEnabledProtocols(Array("TLSv1", "TLSv1.2", "TLSv1.1"))
+      engine
+    }
+  }
+}
diff --git a/conf/keystore.jks b/conf/keystore.jks
new file mode 100644
index 0000000..64ec17b
--- /dev/null
+++ b/conf/keystore.jks
Binary files differ
diff --git a/conf/pio-env.sh.template b/conf/pio-env.sh.template
index f259b9e..f24c2d1 100644
--- a/conf/pio-env.sh.template
+++ b/conf/pio-env.sh.template
@@ -10,6 +10,9 @@
 # SPARK_HOME: Apache Spark is a hard dependency and must be configured.
 SPARK_HOME=$PIO_HOME/vendors/spark-1.5.1-bin-hadoop2.6
 
+POSTGRES_JDBC_DRIVER=$PIO_HOME/lib/postgresql-9.4-1204.jdbc41.jar
+MYSQL_JDBC_DRIVER=$PIO_HOME/lib/mysql-connector-java-5.1.37.jar
+
 # ES_CONF_DIR: You must configure this if you have advanced configuration for
 #              your Elasticsearch setup.
 # ES_CONF_DIR=/opt/elasticsearch
diff --git a/conf/server.conf b/conf/server.conf
new file mode 100644
index 0000000..2d2f628
--- /dev/null
+++ b/conf/server.conf
@@ -0,0 +1,13 @@
+# Engine and dashboard Server related configurations
+io.prediction.server {
+
+  # This access key is used by io.prediction.authentication.KeyAuthentication
+  # to authenticate Evalutaion Dashboard, and Engine Server /stop and /reload enpoints
+  # Should be passed as a query string param
+  accessKey = ""
+
+  # configs used by io.prediction.configuration.SSLConfiguration
+  ssl-keystore-resource = "keystore.jks"
+  ssl-keystore-pass = "pioserver"
+  ssl-key-alias = "selfsigned"
+}
diff --git a/core/build.sbt b/core/build.sbt
index 53b98a6..6257c1e 100644
--- a/core/build.sbt
+++ b/core/build.sbt
@@ -15,16 +15,16 @@
 name := "core"
 
 libraryDependencies ++= Seq(
-  "com.github.scopt"       %% "scopt"            % "3.2.0",
-  "com.google.code.gson"    % "gson"             % "2.2.4",
+  "com.github.scopt"       %% "scopt"            % "3.3.0",
+  "com.google.code.gson"    % "gson"             % "2.5",
   "com.google.guava"        % "guava"            % "18.0",
-  "com.twitter"            %% "chill"            % "0.5.0"
+  "com.twitter"            %% "chill"            % "0.7.2"
     exclude("com.esotericsoftware.minlog", "minlog"),
-  "com.twitter"            %% "chill-bijection"  % "0.5.0",
-  "de.javakaffee"           % "kryo-serializers" % "0.28",
+  "com.twitter"            %% "chill-bijection"  % "0.7.2",
+  "de.javakaffee"           % "kryo-serializers" % "0.37",
   "commons-io"              % "commons-io"       % "2.4",
-  "io.spray"               %% "spray-can"        % "1.3.2",
-  "io.spray"               %% "spray-routing"    % "1.3.2",
+  "io.spray"               %% "spray-can"        % "1.3.3",
+  "io.spray"               %% "spray-routing"    % "1.3.3",
   "net.jodah"               % "typetools"        % "0.3.1",
   "org.apache.spark"       %% "spark-core"       % sparkVersion.value % "provided",
   "org.clapper"            %% "grizzled-slf4j"   % "1.0.2",
@@ -33,7 +33,7 @@
   "org.json4s"             %% "json4s-ext"       % json4sVersion.value,
   "org.scalaj"             %% "scalaj-http"      % "1.1.0",
   "org.scalatest"          %% "scalatest"        % "2.1.6" % "test",
-  "org.slf4j"               % "slf4j-log4j12"    % "1.7.7",
+  "org.slf4j"               % "slf4j-log4j12"    % "1.7.13",
   "org.specs2"             %% "specs2"           % "2.3.13" % "test")
 
 //testOptions := Seq(Tests.Filter(s => Seq("Dev").exists(s.contains(_))))
diff --git a/core/src/main/resources/application.conf b/core/src/main/resources/application.conf
deleted file mode 100644
index 93dd8d4..0000000
--- a/core/src/main/resources/application.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-akka {
-  loggers = ["akka.event.slf4j.Slf4jLogger"]
-  loglevel = "INFO"
-}
diff --git a/core/src/main/scala/io/prediction/controller/Deployment.scala b/core/src/main/scala/io/prediction/controller/Deployment.scala
index b9d7c08..49e14d5 100644
--- a/core/src/main/scala/io/prediction/controller/Deployment.scala
+++ b/core/src/main/scala/io/prediction/controller/Deployment.scala
@@ -34,7 +34,7 @@
   }
 
   /** Returns the [[Engine]] contained in this [[Deployment]]. */
-  private [prediction] 
+  private [prediction]
   def engine: BaseEngine[_, _, _, _] = {
     assert(engineSet, "Engine not set")
     _engine
diff --git a/core/src/main/scala/io/prediction/controller/Engine.scala b/core/src/main/scala/io/prediction/controller/Engine.scala
index 92d630f..5cc2e31 100644
--- a/core/src/main/scala/io/prediction/controller/Engine.scala
+++ b/core/src/main/scala/io/prediction/controller/Engine.scala
@@ -251,6 +251,7 @@
               logger.info(
                 s"Loaded model ${m.getClass.getName} for algorithm " +
                 s"${algo.getClass.getName}")
+              sc.stop
               m
             } catch {
               case e: NullPointerException =>
@@ -309,7 +310,7 @@
     *         result, and actual result tuple tuple.
     */
   def eval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     params: WorkflowParams)
   : Seq[(EI, RDD[(Q, P, A)])] = {
@@ -762,7 +763,7 @@
       algoMap.mapValues(_.trainBase(sc,pd))
     }}
 
-    val suppQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalQAsMap.mapValues { qas => 
+    val suppQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalQAsMap.mapValues { qas =>
       qas.map { case (qx, (q, a)) => (qx, (serving.supplementBase(q), a)) }
     }
 
diff --git a/core/src/main/scala/io/prediction/controller/Evaluation.scala b/core/src/main/scala/io/prediction/controller/Evaluation.scala
index 8d22464..a6ee9a7 100644
--- a/core/src/main/scala/io/prediction/controller/Evaluation.scala
+++ b/core/src/main/scala/io/prediction/controller/Evaluation.scala
@@ -32,7 +32,7 @@
   protected [this] var _evaluatorSet: Boolean = false
   protected [this] var _evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = _
 
-  private [prediction] 
+  private [prediction]
   def evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = {
     assert(_evaluatorSet, "Evaluator not set")
     _evaluator
@@ -60,7 +60,7 @@
     */
   def engineEvaluator_=[EI, Q, P, A, R <: BaseEvaluatorResult](
     engineEvaluator: (
-      BaseEngine[EI, Q, P, A], 
+      BaseEngine[EI, Q, P, A],
       BaseEvaluator[EI, Q, P, A, R])) {
     assert(!_evaluatorSet, "Evaluator can be set at most once")
     engine = engineEvaluator._1
@@ -88,7 +88,7 @@
   def engineMetric_=[EI, Q, P, A](
     engineMetric: (BaseEngine[EI, Q, P, A], Metric[EI, Q, P, A, _])) {
     engineEvaluator = (
-      engineMetric._1, 
+      engineMetric._1,
       MetricEvaluator(
         metric = engineMetric._2,
         otherMetrics = Seq[Metric[EI, Q, P, A, _]](),
@@ -112,8 +112,8 @@
     */
   def engineMetrics_=[EI, Q, P, A](
     engineMetrics: (
-      BaseEngine[EI, Q, P, A], 
-      Metric[EI, Q, P, A, _], 
+      BaseEngine[EI, Q, P, A],
+      Metric[EI, Q, P, A, _],
       Seq[Metric[EI, Q, P, A, _]])) {
     engineEvaluator = (
       engineMetrics._1,
diff --git a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala b/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
index 5eb21b1..8e9727e 100644
--- a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
+++ b/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
@@ -42,11 +42,11 @@
 @Experimental
 object FastEvalEngineWorkflow  {
   @transient lazy val logger = Logger[this.type]
-  
+
   type EX = Int
   type AX = Int
   type QX = Long
- 
+
   case class DataSourcePrefix(dataSourceParams: (String, Params)) {
     def this(pp: PreparatorPrefix) = this(pp.dataSourceParams)
     def this(ap: AlgorithmsPrefix) = this(ap.dataSourceParams)
@@ -60,7 +60,7 @@
       this(ap.dataSourceParams, ap.preparatorParams)
     }
   }
-  
+
   case class AlgorithmsPrefix(
     dataSourceParams: (String, Params),
     preparatorParams: (String, Params),
@@ -90,7 +90,7 @@
 
     if (!cache.contains(prefix)) {
       val dataSource = Doer(
-        workflow.engine.dataSourceClassMap(prefix.dataSourceParams._1), 
+        workflow.engine.dataSourceClassMap(prefix.dataSourceParams._1),
         prefix.dataSourceParams._2)
 
       val result = dataSource
@@ -130,7 +130,7 @@
   def computeAlgorithmsResult[TD, EI, PD, Q, P, A](
     workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
     prefix: AlgorithmsPrefix): Map[EX, RDD[(QX, Seq[P])]] = {
-    
+
     val algoMap: Map[AX, BaseAlgorithm[PD, _, Q, P]] = prefix.algorithmParamsList
       .map { case (algoName, algoParams) => {
         try {
@@ -162,12 +162,12 @@
     val algoModelsMap: Map[EX, Map[AX, Any]] = getPreparatorResult(
       workflow,
       new PreparatorPrefix(prefix))
-    .mapValues { 
+    .mapValues {
       pd => algoMap.mapValues(_.trainBase(workflow.sc,pd))
     }
 
     // Predict
-    val dataSourceResult = 
+    val dataSourceResult =
       FastEvalEngineWorkflow.getDataSourceResult(
         workflow = workflow,
         prefix = new DataSourcePrefix(prefix))
@@ -177,22 +177,22 @@
     .map { case (ex, (td, ei, iqaRDD)) => {
       val modelsMap: Map[AX, Any] = algoModelsMap(ex)
       val qs: RDD[(QX, Q)] = iqaRDD.mapValues(_._1)
-  
+
       val algoPredicts: Seq[RDD[(QX, (AX, P))]] = (0 until algoCount)
       .map { ax => {
         val algo = algoMap(ax)
         val model = modelsMap(ax)
         val rawPredicts: RDD[(QX, P)] = algo.batchPredictBase(
-          workflow.sc, 
+          workflow.sc,
           model,
           qs)
-    
-        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map { 
+
+        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map {
           case (qx, p) => (qx, (ax, p))
         }
         predicts
       }}
-        
+
       val unionAlgoPredicts: RDD[(QX, Seq[P])] = workflow.sc
       .union(algoPredicts)
       .groupByKey
@@ -205,7 +205,7 @@
     }}
     .seq
     .toMap
-    
+
     algoResult
   }
 
@@ -262,13 +262,13 @@
     }
     cache(prefix)
   }
-  
+
   def get[TD, EI, PD, Q, P, A](
     workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
     engineParamsList: Seq[EngineParams])
   : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
     engineParamsList.map { engineParams => {
-      (engineParams, 
+      (engineParams,
         getServingResult(workflow, new ServingPrefix(engineParams)))
     }}
   }
@@ -286,12 +286,12 @@
   val workflowParams: WorkflowParams) extends Serializable {
 
   import io.prediction.controller.FastEvalEngineWorkflow._
-  
+
   type DataSourceResult = Map[EX, (TD, EI, RDD[(QX, (Q, A))])]
   type PreparatorResult = Map[EX, PD]
   type AlgorithmsResult = Map[EX, RDD[(QX, Seq[P])]]
   type ServingResult = Seq[(EI, RDD[(Q, P, A)])]
-  
+
   val dataSourceCache = MutableHashMap[DataSourcePrefix, DataSourceResult]()
   val preparatorCache = MutableHashMap[PreparatorPrefix, PreparatorResult]()
   val algorithmsCache = MutableHashMap[AlgorithmsPrefix, AlgorithmsResult]()
@@ -320,8 +320,8 @@
   @transient override lazy val logger = Logger[this.type]
 
   override def eval(
-    sc: SparkContext, 
-    engineParams: EngineParams, 
+    sc: SparkContext,
+    engineParams: EngineParams,
     params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])] = {
     logger.info("FastEvalEngine.eval")
     batchEval(sc, Seq(engineParams), params).head._2
diff --git a/core/src/main/scala/io/prediction/controller/LServing.scala b/core/src/main/scala/io/prediction/controller/LServing.scala
index c14f3fd..accee48 100644
--- a/core/src/main/scala/io/prediction/controller/LServing.scala
+++ b/core/src/main/scala/io/prediction/controller/LServing.scala
@@ -18,7 +18,7 @@
 import io.prediction.annotation.Experimental
 import io.prediction.core.BaseServing
 
-/** Base class of serving. 
+/** Base class of serving.
   *
   * @tparam Q Input query class.
   * @tparam P Output prediction class.
diff --git a/core/src/main/scala/io/prediction/controller/Metric.scala b/core/src/main/scala/io/prediction/controller/Metric.scala
index 89ac490..9e56125 100644
--- a/core/src/main/scala/io/prediction/controller/Metric.scala
+++ b/core/src/main/scala/io/prediction/controller/Metric.scala
@@ -60,11 +60,11 @@
   def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : StatCounter = {
     val doubleRDD = sc.union(
-      evalDataSet.map { case (_, qpaRDD) => 
+      evalDataSet.map { case (_, qpaRDD) =>
         qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
       }
     )
-   
+
     doubleRDD.stats()
   }
 }
@@ -75,11 +75,11 @@
   def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : StatCounter = {
     val doubleRDD = sc.union(
-      evalDataSet.map { case (_, qpaRDD) => 
+      evalDataSet.map { case (_, qpaRDD) =>
         qpaRDD.flatMap { case (q, p, a) => calculate(q, p, a) }
       }
     )
-   
+
     doubleRDD.stats()
   }
 }
@@ -119,7 +119,7 @@
   * @group Evaluation
   */
 abstract class OptionAverageMetric[EI, Q, P, A]
-    extends Metric[EI, Q, P, A, Double] 
+    extends Metric[EI, Q, P, A, Double]
     with StatsOptionMetricHelper[EI, Q, P, A]
     with QPAMetric[Q, P, A, Option[Double]] {
   /** Implement this method to return a score that will be used for averaging
@@ -189,7 +189,7 @@
   }
 }
 
-/** Returns the sum of the score returned by the calculate method. 
+/** Returns the sum of the score returned by the calculate method.
   *
   * @tparam EI Evaluation information
   * @tparam Q Query
@@ -210,7 +210,7 @@
   def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : R = {
     val union: RDD[R] = sc.union(
-      evalDataSet.map { case (_, qpaRDD) => 
+      evalDataSet.map { case (_, qpaRDD) =>
         qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
       }
     )
diff --git a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala b/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
index e28f7ca..41ccc9c 100644
--- a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
+++ b/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
@@ -78,13 +78,13 @@
       new NameParamsSerializer
     write(this)
   }
-  
+
   override def toHTML(): String = html.metric_evaluator().toString()
-  
+
   override def toString: String = {
     implicit lazy val formats = Utils.json4sDefaultFormats +
       new NameParamsSerializer
-    
+
     val bestEPStr = JsonExtractor.engineParamstoPrettyJson(Both, bestEngineParams)
 
     val strings = Seq(
@@ -130,7 +130,7 @@
       otherMetrics,
       None)
   }
-  
+
   def apply[EI, Q, P, A, R](metric: Metric[EI, Q, P, A, R])
   : MetricEvaluator[EI, Q, P, A, R] = {
     new MetricEvaluator[EI, Q, P, A, R](
@@ -194,7 +194,7 @@
 
     val now = DateTime.now
     val evalClassName = evaluation.getClass.getName
-    
+
     val variant = MetricEvaluator.EngineVariant(
       id = s"$evalClassName $now",
       description = "",
@@ -221,14 +221,14 @@
     val evalResultList: Seq[(EngineParams, MetricScores[R])] = engineEvalDataSet
     .zipWithIndex
     .par
-    .map { case ((engineParams, evalDataSet), idx) => 
+    .map { case ((engineParams, evalDataSet), idx) =>
       val metricScores = MetricScores[R](
         metric.calculate(sc, evalDataSet),
         otherMetrics.map(_.calculate(sc, evalDataSet)))
       (engineParams, metricScores)
     }
     .seq
-    
+
     implicit lazy val formats = Utils.json4sDefaultFormats +
       new NameParamsSerializer
 
diff --git a/core/src/main/scala/io/prediction/controller/package.scala b/core/src/main/scala/io/prediction/controller/package.scala
index b344d3e..bcb4b0d 100644
--- a/core/src/main/scala/io/prediction/controller/package.scala
+++ b/core/src/main/scala/io/prediction/controller/package.scala
@@ -119,7 +119,7 @@
     * @group Helper
     */
   type EmptyDataParams = EmptyParams
-  
+
   /** Empty evaluation info.
     * @group Helper
     */
diff --git a/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala b/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
index 6d4a619..a3d3fad 100644
--- a/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
+++ b/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
@@ -25,7 +25,7 @@
 
 /** :: DeveloperApi ::
   * Base trait with default custom query serializer, exposed to engine developer
-  * via [[io.prediction.controller.CustomQuerySerializer
+  * via [[io.prediction.controller.CustomQuerySerializer]]
   */
 @DeveloperApi
 trait BaseQuerySerializer {
diff --git a/core/src/main/scala/io/prediction/core/BaseEngine.scala b/core/src/main/scala/io/prediction/core/BaseEngine.scala
index 546e889..5356fa7 100644
--- a/core/src/main/scala/io/prediction/core/BaseEngine.scala
+++ b/core/src/main/scala/io/prediction/core/BaseEngine.scala
@@ -44,7 +44,7 @@
     */
   @DeveloperApi
   def train(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     engineInstanceId: String,
     params: WorkflowParams): Seq[Any]
@@ -61,7 +61,7 @@
     */
   @DeveloperApi
   def eval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])]
 
@@ -77,11 +77,11 @@
     */
   @DeveloperApi
   def batchEval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParamsList: Seq[EngineParams],
     params: WorkflowParams)
   : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
-    engineParamsList.map { engineParams => 
+    engineParamsList.map { engineParams =>
       (engineParams, eval(sc, engineParams, params))
     }
   }
diff --git a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala b/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
index 19eb8a5..23fe826 100644
--- a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
+++ b/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
@@ -57,10 +57,10 @@
 trait BaseEvaluatorResult extends Serializable {
   /** A short description of the result */
   def toOneLiner(): String = ""
-  
+
   /** HTML portion of the rendered evaluator results */
   def toHTML(): String = ""
-  
+
   /** JSON portion of the rendered evaluator results */
   def toJSON(): String = ""
 
@@ -68,5 +68,5 @@
     * Indicate if this result is inserted into database
     */
   @Experimental
-  val noSave: Boolean = false 
+  val noSave: Boolean = false
 }
diff --git a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala b/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
index 5ef6fb4..ad93b1a 100644
--- a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
@@ -72,7 +72,7 @@
       val instanceId = Storage.getMetaDataEngineInstances
 
       val kryo = KryoInstantiator.newKryoInjection
-      
+
       logger.info("Inserting persistent model")
       Storage.getModelDataModels.insert(Model(
         id = engineInstance.id,
@@ -135,7 +135,7 @@
       evaluator,
       params)
 
-    if (evaluatorResult.noSave) { 
+    if (evaluatorResult.noSave) {
       logger.info(s"This evaluation result is not inserted into database: $evaluatorResult")
     } else {
       val evaluatedEvaluationInstance = evaluationInstance.copy(
diff --git a/core/src/main/scala/io/prediction/workflow/CreateServer.scala b/core/src/main/scala/io/prediction/workflow/CreateServer.scala
index 829c6b6..a664187 100644
--- a/core/src/main/scala/io/prediction/workflow/CreateServer.scala
+++ b/core/src/main/scala/io/prediction/workflow/CreateServer.scala
@@ -30,8 +30,11 @@
 import com.twitter.chill.KryoBase
 import com.twitter.chill.KryoInjection
 import com.twitter.chill.ScalaKryoInstantiator
+import com.typesafe.config.ConfigFactory
 import de.javakaffee.kryoserializers.SynchronizedCollectionsSerializer
 import grizzled.slf4j.Logging
+import io.prediction.authentication.KeyAuthentication
+import io.prediction.configuration.SSLConfiguration
 import io.prediction.controller.Engine
 import io.prediction.controller.Params
 import io.prediction.controller.Utils
@@ -47,10 +50,12 @@
 import org.json4s.native.JsonMethods._
 import org.json4s.native.Serialization.write
 import spray.can.Http
+import spray.can.server.ServerSettings
 import spray.http.MediaTypes._
 import spray.http._
 import spray.httpx.Json4sSupport
 import spray.routing._
+import spray.routing.authentication.{UserPass, BasicAuth}
 
 import scala.concurrent.ExecutionContext.Implicits.global
 import scala.concurrent.Future
@@ -60,6 +65,7 @@
 import scala.util.Failure
 import scala.util.Random
 import scala.util.Success
+import scalaj.http.HttpOptions
 
 class KryoInstantiator(classLoader: ClassLoader) extends ScalaKryoInstantiator {
   override def newKryo(): KryoBase = {
@@ -103,6 +109,7 @@
 case class ReloadServer()
 case class UpgradeCheck()
 
+
 object CreateServer extends Logging {
   val actorSystem = ActorSystem("pio-server")
   val engineInstances = Storage.getMetaDataEngineInstances
@@ -274,11 +281,11 @@
   }
 }
 
-class MasterActor(
+class MasterActor (
     sc: ServerConfig,
     engineInstance: EngineInstance,
     engineFactoryName: String,
-    manifest: EngineManifest) extends Actor {
+    manifest: EngineManifest) extends Actor with SSLConfiguration with KeyAuthentication {
   val log = Logging(context.system, this)
   implicit val system = context.system
   var sprayHttpListener: Option[ActorRef] = None
@@ -286,11 +293,14 @@
   var retry = 3
 
   def undeploy(ip: String, port: Int): Unit = {
-    val serverUrl = s"http://${ip}:${port}"
+    val serverUrl = s"https://${ip}:${port}"
     log.info(
       s"Undeploying any existing engine instance at $serverUrl")
     try {
-      val code = scalaj.http.Http(s"$serverUrl/stop").asString.code
+      val code = scalaj.http.Http(s"$serverUrl/stop")
+        .option(HttpOptions.allowUnsafeSSL)
+        .param(ServerKey.param, ServerKey.get)
+        .method("POST").asString.code
       code match {
         case 200 => Unit
         case 404 => log.error(
@@ -321,7 +331,12 @@
       self ! BindServer()
     case x: BindServer =>
       currentServerActor map { actor =>
-        IO(Http) ! Http.Bind(actor, interface = sc.ip, port = sc.port)
+        val settings = ServerSettings(system)
+        IO(Http) ! Http.Bind(
+          actor,
+          interface = sc.ip,
+          port = sc.port,
+          settings = Some(settings.copy(sslEncryption = true)))
       } getOrElse {
         log.error("Cannot bind a non-existing server backend.")
       }
@@ -345,7 +360,12 @@
         val actor = createServerActor(sc, lr, engineFactoryName, manifest)
         sprayHttpListener.map { l =>
           l ! Http.Unbind(5.seconds)
-          IO(Http) ! Http.Bind(actor, interface = sc.ip, port = sc.port)
+          val settings = ServerSettings(system)
+          IO(Http) ! Http.Bind(
+            actor,
+            interface = sc.ip,
+            port = sc.port,
+            settings = Some(settings.copy(sslEncryption = true)))
           currentServerActor.get ! Kill
           currentServerActor = Some(actor)
         } getOrElse {
@@ -357,7 +377,7 @@
           s"${manifest.version}. Abort reloading.")
       }
     case x: Http.Bound =>
-      val serverUrl = s"http://${sc.ip}:${sc.port}"
+      val serverUrl = s"https://${sc.ip}:${sc.port}"
       log.info(s"Engine is deployed and running. Engine API is live at ${serverUrl}.")
       sprayHttpListener = Some(sender)
     case x: Http.CommandFailed =>
@@ -411,7 +431,7 @@
     val algorithmsParams: Seq[Params],
     val models: Seq[Any],
     val serving: BaseServing[Q, P],
-    val servingParams: Params) extends Actor with HttpService {
+    val servingParams: Params) extends Actor with HttpService with KeyAuthentication {
   val serverStartTime = DateTime.now
   val log = Logging(context.system, this)
 
@@ -641,20 +661,24 @@
       }
     } ~
     path("reload") {
-      get {
-        complete {
-          context.actorSelection("/user/master") ! ReloadServer()
-          "Reloading..."
+      authenticate(withAccessKeyFromFile) { request =>
+        post {
+          complete {
+            context.actorSelection("/user/master") ! ReloadServer()
+            "Reloading..."
+          }
         }
       }
     } ~
     path("stop") {
-      get {
-        complete {
-          context.system.scheduler.scheduleOnce(1.seconds) {
-            context.actorSelection("/user/master") ! StopServer()
+      authenticate(withAccessKeyFromFile) { request =>
+        post {
+          complete {
+            context.system.scheduler.scheduleOnce(1.seconds) {
+              context.actorSelection("/user/master") ! StopServer()
+            }
+            "Shutting down..."
           }
-          "Shutting down..."
         }
       }
     } ~
diff --git a/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala b/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
index ccb600f..350a430 100644
--- a/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
@@ -15,7 +15,7 @@
 
 package io.prediction.workflow
 
-import io.prediction.annotation.Experimental   
+import io.prediction.annotation.Experimental
 // FIXME(yipjustin): Remove wildcard import.
 import io.prediction.core._
 import io.prediction.controller._
@@ -32,7 +32,7 @@
   @transient lazy val logger = Logger[this.type]
 
   def train(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     engineInstanceId: String,
     params: WorkflowParams): Seq[Any] = {
@@ -40,7 +40,7 @@
   }
 
   def eval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     params: WorkflowParams)
   : Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])] = {
@@ -56,7 +56,7 @@
   def evaluateBase(
     sc: SparkContext,
     evaluation: Evaluation,
-    engineEvalDataSet: 
+    engineEvalDataSet:
         Seq[(EngineParams, Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])])],
     params: WorkflowParams): FakeEvalResult = {
     f(sc)
@@ -66,36 +66,36 @@
 
 @Experimental
 private[prediction] case class FakeEvalResult() extends BaseEvaluatorResult {
-  override val noSave: Boolean = true 
+  override val noSave: Boolean = true
 }
 
 /** FakeRun allows user to implement custom function under the exact enviroment
-  * as other PredictionIO workflow. 
+  * as other PredictionIO workflow.
   *
-  * Useful for developing new features. Only need to extend this trait and 
-  * implement a function: (SparkContext => Unit). For example, the code below 
+  * Useful for developing new features. Only need to extend this trait and
+  * implement a function: (SparkContext => Unit). For example, the code below
   * can be run with `pio eval HelloWorld`.
   *
   * {{{
   * object HelloWorld extends FakeRun {
   *   // func defines the function pio runs, must have signature (SparkContext => Unit).
   *   func = f
-  * 
+  *
   *   def f(sc: SparkContext): Unit {
   *     val logger = Logger[this.type]
   *     logger.info("HelloWorld")
   *   }
   * }
-  * }}} 
-  * 
+  * }}}
+  *
   */
 @Experimental
 trait FakeRun extends Evaluation with EngineParamsGenerator {
   private[this] var _runner: FakeRunner = _
 
   def runner: FakeRunner = _runner
-  def runner_=(r: FakeRunner) { 
-    engineEvaluator = (new FakeEngine(), r) 
+  def runner_=(r: FakeRunner) {
+    engineEvaluator = (new FakeEngine(), r)
     engineParamsList = Seq(new EngineParams())
   }
 
diff --git a/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala b/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
index 456c993..d93b9eb 100644
--- a/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
+++ b/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
@@ -261,6 +261,8 @@
     val thirdPartyPaths = Seq(
       "PIO_CONF_DIR",
       "ES_CONF_DIR",
+      "POSTGRES_JDBC_DRIVER",
+      "MYSQL_JDBC_DRIVER",
       "HADOOP_CONF_DIR",
       "HBASE_CONF_DIR")
     thirdPartyPaths.map(p =>
@@ -292,7 +294,7 @@
         "Since 0.8.4, the 'params' field is required in engine.json" +
         " in order to specify parameters for DataSource, Preparator or" +
         " Serving.\n" +
-        "Please go to http://docs.prediction.io/resources/upgrade/" +
+        "Please go to https://docs.prediction.io/resources/upgrade/" +
         " for detailed instruction of how to change engine.json.")
       sys.exit(1)
     }
@@ -386,7 +388,7 @@
     val component: String,
     val engine: String) extends Runnable with Logging {
   val version = BuildInfo.version
-  val versionsHost = "http://direct.prediction.io/"
+  val versionsHost = "https://direct.prediction.io/"
 
   def run(): Unit = {
     val url = if (engine == "") {
diff --git a/core/src/test/scala/io/prediction/controller/EngineTest.scala b/core/src/test/scala/io/prediction/controller/EngineTest.scala
index bd5125f..cc84249 100644
--- a/core/src/test/scala/io/prediction/controller/EngineTest.scala
+++ b/core/src/test/scala/io/prediction/controller/EngineTest.scala
@@ -167,62 +167,6 @@
       Unit, pModel21, pModel22, pModel23, model24, model25)
   }
 
-  test("Engine.prepareDeploy PAlgo") {
-    val engine = new Engine(
-      classOf[PDataSource2],
-      classOf[PPreparator1],
-      Map(
-        "PAlgo2" -> classOf[PAlgo2],
-        "PAlgo3" -> classOf[PAlgo3],
-        "NAlgo2" -> classOf[NAlgo2],
-        "NAlgo3" -> classOf[NAlgo3]
-      ),
-      classOf[LServing1])
-
-    val engineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(0),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(
-        ("PAlgo2", PAlgo2.Params(20)),
-        ("PAlgo3", PAlgo3.Params(21)),
-        ("PAlgo3", PAlgo3.Params(22)),
-        ("NAlgo2", NAlgo2.Params(23)),
-        ("NAlgo3", NAlgo3.Params(24)),
-        ("NAlgo3", NAlgo3.Params(25))
-      ),
-      servingParams = LServing1.Params(3))
-
-    val pd = ProcessedData(1, TrainingData(0))
-    val model20 = PAlgo2.Model(20, pd)
-    val model21 = PAlgo3.Model(21, pd)
-    val model22 = PAlgo3.Model(22, pd)
-    val model23 = NAlgo2.Model(23, pd)
-    val model24 = NAlgo3.Model(24, pd)
-    val model25 = NAlgo3.Model(25, pd)
-
-    val rand = new Random()
-
-    val fakeEngineInstanceId = s"FakeInstanceId-${rand.nextLong()}"
-
-    val persistedModels = engine.train(
-      sc, 
-      engineParams,
-      engineInstanceId = fakeEngineInstanceId,
-      params = WorkflowParams()
-    )
-
-    val deployableModels = engine.prepareDeploy(
-      sc,
-      engineParams,
-      fakeEngineInstanceId,
-      persistedModels,
-      params = WorkflowParams()
-    )
-
-    deployableModels should contain theSameElementsAs Seq(
-      model20, model21, model22, model23, model24, model25)
-  }
-
   test("Engine.eval") {
     val engine = new Engine(
       classOf[PDataSource2],
@@ -274,6 +218,62 @@
       }
     }}
   }
+
+  test("Engine.prepareDeploy PAlgo") {
+    val engine = new Engine(
+      classOf[PDataSource2],
+      classOf[PPreparator1],
+      Map(
+        "PAlgo2" -> classOf[PAlgo2],
+        "PAlgo3" -> classOf[PAlgo3],
+        "NAlgo2" -> classOf[NAlgo2],
+        "NAlgo3" -> classOf[NAlgo3]
+      ),
+      classOf[LServing1])
+
+    val engineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(0),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(
+        ("PAlgo2", PAlgo2.Params(20)),
+        ("PAlgo3", PAlgo3.Params(21)),
+        ("PAlgo3", PAlgo3.Params(22)),
+        ("NAlgo2", NAlgo2.Params(23)),
+        ("NAlgo3", NAlgo3.Params(24)),
+        ("NAlgo3", NAlgo3.Params(25))
+      ),
+      servingParams = LServing1.Params(3))
+
+    val pd = ProcessedData(1, TrainingData(0))
+    val model20 = PAlgo2.Model(20, pd)
+    val model21 = PAlgo3.Model(21, pd)
+    val model22 = PAlgo3.Model(22, pd)
+    val model23 = NAlgo2.Model(23, pd)
+    val model24 = NAlgo3.Model(24, pd)
+    val model25 = NAlgo3.Model(25, pd)
+
+    val rand = new Random()
+
+    val fakeEngineInstanceId = s"FakeInstanceId-${rand.nextLong()}"
+
+    val persistedModels = engine.train(
+      sc,
+      engineParams,
+      engineInstanceId = fakeEngineInstanceId,
+      params = WorkflowParams()
+    )
+
+    val deployableModels = engine.prepareDeploy(
+      sc,
+      engineParams,
+      fakeEngineInstanceId,
+      persistedModels,
+      params = WorkflowParams()
+    )
+
+    deployableModels should contain theSameElementsAs Seq(
+      model20, model21, model22, model23, model24, model25)
+  }
 }
 
 class EngineTrainSuite extends FunSuite with SharedSparkContext {
diff --git a/data/build.sbt b/data/build.sbt
index aae2666..57ef321 100644
--- a/data/build.sbt
+++ b/data/build.sbt
@@ -15,13 +15,13 @@
 name := "data"
 
 libraryDependencies ++= Seq(
-  "com.github.nscala-time" %% "nscala-time"    % "2.0.0",
+  "com.github.nscala-time" %% "nscala-time"    % "2.6.0",
   "commons-codec"           % "commons-codec"  % "1.9",
-  "io.spray"               %% "spray-can"      % "1.3.2",
-  "io.spray"               %% "spray-routing"  % "1.3.2",
-  "io.spray"               %% "spray-testkit"  % "1.3.2" % "test",
-  "mysql"                   % "mysql-connector-java" % "5.1.35",
-  "org.apache.hadoop"       % "hadoop-common"  % "2.5.0"
+  "io.spray"               %% "spray-can"      % "1.3.3",
+  "io.spray"               %% "spray-routing"  % "1.3.3",
+  "io.spray"               %% "spray-testkit"  % "1.3.3" % "test",
+  "mysql"                   % "mysql-connector-java" % "5.1.37",
+  "org.apache.hadoop"       % "hadoop-common"  % "2.6.2"
     exclude("javax.servlet", "servlet-api"),
   "org.apache.hbase"        % "hbase-common"   % "0.98.5-hadoop2",
   "org.apache.hbase"        % "hbase-client"   % "0.98.5-hadoop2"
@@ -34,7 +34,7 @@
     exclude("org.mortbay.jetty", "servlet-api-2.5")
     exclude("org.mortbay.jetty", "jsp-api-2.1")
     exclude("org.mortbay.jetty", "jsp-2.1"),
-  "org.apache.zookeeper"    % "zookeeper"      % "3.4.6"
+  "org.apache.zookeeper"    % "zookeeper"      % "3.4.7"
     exclude("org.slf4j", "slf4j-api")
     exclude("org.slf4j", "slf4j-log4j12"),
   "org.apache.spark"       %% "spark-core"     % sparkVersion.value % "provided",
@@ -43,10 +43,10 @@
   "org.elasticsearch"       % "elasticsearch"  % elasticsearchVersion.value,
   "org.json4s"             %% "json4s-native"  % json4sVersion.value,
   "org.json4s"             %% "json4s-ext"     % json4sVersion.value,
-  "org.postgresql"          % "postgresql"     % "9.4-1201-jdbc41",
+  "org.postgresql"          % "postgresql"     % "9.4-1204-jdbc41",
   "org.scalatest"          %% "scalatest"      % "2.1.6" % "test",
-  "org.scalikejdbc"        %% "scalikejdbc"    % "2.2.6",
-  "org.slf4j"               % "slf4j-log4j12"  % "1.7.7",
+  "org.scalikejdbc"        %% "scalikejdbc"    % "2.3.2",
+  "org.slf4j"               % "slf4j-log4j12"  % "1.7.13",
   "org.spark-project.akka" %% "akka-actor"     % "2.3.4-spark",
   "org.specs2"             %% "specs2"         % "2.3.13" % "test")
 
diff --git a/data/src/main/scala/io/prediction/data/api/EventServer.scala b/data/src/main/scala/io/prediction/data/api/EventServer.scala
index 0ebc279..139f964 100644
--- a/data/src/main/scala/io/prediction/data/api/EventServer.scala
+++ b/data/src/main/scala/io/prediction/data/api/EventServer.scala
@@ -15,12 +15,12 @@
 
 package io.prediction.data.api
 
+import akka.event.Logging
+import sun.misc.BASE64Decoder
+
 import java.util.concurrent.TimeUnit
 
-import akka.actor.Actor
-import akka.actor.ActorSystem
-import akka.actor.Props
-import akka.event.Logging
+import akka.actor._
 import akka.io.IO
 import akka.pattern.ask
 import akka.util.Timeout
@@ -49,7 +49,7 @@
 import scala.concurrent.Future
 import scala.util.{Try, Success, Failure}
 
-class EventServiceActor(
+class  EventServiceActor(
     val eventClient: LEvents,
     val accessKeysClient: AccessKeys,
     val channelsClient: Channels,
@@ -64,35 +64,41 @@
       new DateTimeJson4sSupport.Serializer
   }
 
+
   val MaxNumberOfEventsPerBatchRequest = 50
 
-  val log = Logging(context.system, this)
+  val logger = Logging(context.system, this)
 
   // we use the enclosing ActorContext's or ActorSystem's dispatcher for our
   // Futures
-  implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
+  implicit def executionContext: ExecutionContext = context.dispatcher
+
   implicit val timeout = Timeout(5, TimeUnit.SECONDS)
 
   val rejectionHandler = Common.rejectionHandler
 
   val jsonPath = """(.+)\.json$""".r
-  val formPath = """(.+)$""".r
+  val formPath = """(.+)\.form$""".r
 
-  val pluginContext = EventServerPluginContext(log)
+  val pluginContext = EventServerPluginContext(logger)
+
+  private lazy val base64Decoder = new BASE64Decoder
 
   case class AuthData(appId: Int, channelId: Option[Int], events: Seq[String])
 
-  /* with accessKey in query, return appId if succeed */
+  /* with accessKey in query/header, return appId if succeed */
   def withAccessKey: RequestContext => Future[Authentication[AuthData]] = {
     ctx: RequestContext =>
       val accessKeyParamOpt = ctx.request.uri.query.get("accessKey")
       val channelParamOpt = ctx.request.uri.query.get("channel")
       Future {
+        // with accessKey in query, return appId if succeed
         accessKeyParamOpt.map { accessKeyParam =>
-          val accessKeyOpt = accessKeysClient.get(accessKeyParam)
-          accessKeyOpt.map { k =>
+          accessKeysClient.get(accessKeyParam).map { k =>
             channelParamOpt.map { ch =>
-              val channelMap = channelsClient.getByAppid(k.appid).map(c => (c.name, c.id)).toMap
+              val channelMap =
+                channelsClient.getByAppid(k.appid)
+                .map(c => (c.name, c.id)).toMap
               if (channelMap.contains(ch)) {
                 Right(AuthData(k.appid, Some(channelMap(ch)), k.events))
               } else {
@@ -101,18 +107,40 @@
             }.getOrElse{
               Right(AuthData(k.appid, None, k.events))
             }
-          }.getOrElse{
-            Left(AuthenticationFailedRejection(
-              AuthenticationFailedRejection.CredentialsRejected, List()))
-          }
-        }.getOrElse { Left(AuthenticationFailedRejection(
-          AuthenticationFailedRejection.CredentialsMissing, List()))
+          }.getOrElse(FailedAuth)
+        }.getOrElse {
+          // with accessKey in header, return appId if succeed
+          ctx.request.headers.find(_.name == "Authorization").map { authHeader ⇒
+            authHeader.value.split("Basic ") match {
+              case Array(_, value) ⇒
+                val appAccessKey =
+                  new String(base64Decoder.decodeBuffer(value)).trim.split(":")(0)
+                accessKeysClient.get(appAccessKey) match {
+                  case Some(k) ⇒ Right(AuthData(k.appid, None, k.events))
+                  case None ⇒ FailedAuth
+                }
+
+              case _ ⇒ FailedAuth
+            }
+          }.getOrElse(MissedAuth)
         }
       }
   }
 
-  val statsActorRef = context.actorSelection("/user/StatsActor")
-  val pluginsActorRef = context.actorSelection("/user/PluginsActor")
+  private val FailedAuth = Left(
+    AuthenticationFailedRejection(
+      AuthenticationFailedRejection.CredentialsRejected, List()
+    )
+  )
+
+  private val MissedAuth = Left(
+    AuthenticationFailedRejection(
+      AuthenticationFailedRejection.CredentialsMissing, List()
+    )
+  )
+
+  lazy val statsActorRef = actorRefFactory.actorSelection("/user/StatsActor")
+  lazy val pluginsActorRef = actorRefFactory.actorSelection("/user/PluginsActor")
 
   val route: Route =
     pathSingleSlash {
@@ -189,7 +217,7 @@
               val channelId = authData.channelId
               respondWithMediaType(MediaTypes.`application/json`) {
                 complete {
-                  log.debug(s"GET event ${eventId}.")
+                  logger.debug(s"GET event ${eventId}.")
                   val data = eventClient.futureGet(eventId, appId, channelId).map { eventOpt =>
                     eventOpt.map( event =>
                       (StatusCodes.OK, event)
@@ -212,7 +240,7 @@
               val channelId = authData.channelId
               respondWithMediaType(MediaTypes.`application/json`) {
                 complete {
-                  log.debug(s"DELETE event ${eventId}.")
+                  logger.debug(s"DELETE event ${eventId}.")
                   val data = eventClient.futureDelete(eventId, appId, channelId).map { found =>
                     if (found) {
                       (StatusCodes.OK, Map("message" -> "Found"))
@@ -241,7 +269,6 @@
               val events = authData.events
               entity(as[Event]) { event =>
                 complete {
-                  log.debug(s"POST events")
                   if (events.isEmpty || authData.events.contains(event.event)) {
                     pluginContext.inputBlockers.values.foreach(
                       _.process(EventInfo(
@@ -292,7 +319,7 @@
                   limit, reversed) =>
                 respondWithMediaType(MediaTypes.`application/json`) {
                   complete {
-                    log.debug(
+                    logger.debug(
                       s"GET events of appId=${appId} " +
                       s"st=${startTimeStr} ut=${untilTimeStr} " +
                       s"et=${entityType} eid=${entityId} " +
@@ -440,7 +467,6 @@
       }  // stats.json get
     } ~
     path("webhooks" / jsonPath ) { web =>
-
       import Json4sProtocol._
 
       post {
@@ -458,7 +484,7 @@
                       web = web,
                       data = jObj,
                       eventClient = eventClient,
-                      log = log,
+                      log = logger,
                       stats = config.stats,
                       statsActorRef = statsActorRef)
                   }
@@ -480,7 +506,7 @@
                     appId = appId,
                     channelId = channelId,
                     web = web,
-                    log = log)
+                    log = logger)
                 }
               }
             }
@@ -497,7 +523,7 @@
               val channelId = authData.channelId
               respondWithMediaType(MediaTypes.`application/json`) {
                 entity(as[FormData]){ formData =>
-                  // log.debug(formData.toString)
+                  // logger.debug(formData.toString)
                   complete {
                     // respond with JSON
                     import Json4sProtocol._
@@ -508,7 +534,7 @@
                       web = web,
                       data = formData,
                       eventClient = eventClient,
-                      log = log,
+                      log = logger,
                       stats = config.stats,
                       statsActorRef = statsActorRef)
                   }
@@ -533,7 +559,7 @@
                     appId = appId,
                     channelId = channelId,
                     web = web,
-                    log = log)
+                    log = logger)
                 }
               }
             }
@@ -544,7 +570,6 @@
     }
 
   def receive: Actor.Receive = runRoute(route)
-
 }
 
 
@@ -556,8 +581,7 @@
     val eventClient: LEvents,
     val accessKeysClient: AccessKeys,
     val channelsClient: Channels,
-    val config: EventServerConfig) extends Actor {
-  val log = Logging(context.system, this)
+    val config: EventServerConfig) extends Actor with ActorLogging {
   val child = context.actorOf(
     Props(classOf[EventServiceActor],
       eventClient,
@@ -598,7 +622,8 @@
         accessKeysClient,
         channelsClient,
         config),
-      "EventServerActor")
+      "EventServerActor"
+    )
     if (config.stats) system.actorOf(Props[StatsActor], "StatsActor")
     system.actorOf(Props[PluginsActor], "PluginsActor")
     serverActor ! StartServer(config.ip, config.port)
diff --git a/data/src/main/scala/io/prediction/data/api/Webhooks.scala b/data/src/main/scala/io/prediction/data/api/Webhooks.scala
index 7928bfe..ff18888 100644
--- a/data/src/main/scala/io/prediction/data/api/Webhooks.scala
+++ b/data/src/main/scala/io/prediction/data/api/Webhooks.scala
@@ -60,7 +60,7 @@
 
     eventFuture.flatMap { eventOpt =>
       if (eventOpt.isEmpty) {
-        Future {
+        Future successful {
           val message = s"webhooks connection for ${web} is not supported."
           (StatusCodes.NotFound, Map("message" -> message))
         }
diff --git a/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala b/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala
index 7a7d2fd..f197e78 100644
--- a/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala
+++ b/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala
@@ -15,9 +15,10 @@
 
 package io.prediction.data.storage
 
-import io.prediction.annotation.DeveloperApi
+import java.security.SecureRandom
 
-import scala.util.Random
+import io.prediction.annotation.DeveloperApi
+import org.apache.commons.codec.binary.Base64
 
 /** :: DeveloperApi ::
   * Stores mapping of access keys, app IDs, and lists of allowed event names
@@ -61,5 +62,10 @@
   def delete(k: String): Unit
 
   /** Default implementation of key generation */
-  def generateKey: String = Random.alphanumeric.take(64).mkString
+  def generateKey: String = {
+    val sr = SecureRandom.getInstanceStrong
+    val srBytes = Array.fill(48)(0.toByte)
+    sr.nextBytes(srBytes)
+    Base64.encodeBase64URLSafeString(srBytes)
+  }
 }
diff --git a/data/src/main/scala/io/prediction/data/storage/BiMap.scala b/data/src/main/scala/io/prediction/data/storage/BiMap.scala
index 9153782..cbf3e12 100644
--- a/data/src/main/scala/io/prediction/data/storage/BiMap.scala
+++ b/data/src/main/scala/io/prediction/data/storage/BiMap.scala
@@ -130,7 +130,7 @@
     new BiMap(HashMap(ki : _*))
   }
 
-  /** Create a BiMap[String, Double] from a set of String. The Double index 
+  /** Create a BiMap[String, Double] from a set of String. The Double index
     * starts from 0.
     * @param keys a set of String
     * @return a String to Double BiMap
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
index 80fd06f..ee5e9e7 100644
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
+++ b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
@@ -12,7 +12,7 @@
   * See the License for the specific language governing permissions and
   * limitations under the License.
   */
-  
+
 package io.prediction.data.storage.elasticsearch
 
 import grizzled.slf4j.Logging
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala b/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
index 89b3f3b..1027930 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
@@ -12,7 +12,7 @@
   * See the License for the specific language governing permissions and
   * limitations under the License.
   */
-  
+
 package org.apache.hadoop.hbase.mapreduce
 
 /* Pretends to be hbase.mapreduce package in order to expose its
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala
index 51e5913..3bd3922 100644
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala
+++ b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala
@@ -31,8 +31,8 @@
     create table if not exists $tableName (
       id varchar(100) not null primary key,
       status text not null,
-      startTime timestamp not null,
-      endTime timestamp not null,
+      startTime timestamp DEFAULT CURRENT_TIMESTAMP,
+      endTime timestamp DEFAULT CURRENT_TIMESTAMP,
       engineId text not null,
       engineVersion text not null,
       engineVariant text not null,
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala
index 38ffd05..78c2c93 100644
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala
+++ b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala
@@ -31,8 +31,8 @@
     create table if not exists $tableName (
       id varchar(100) not null primary key,
       status text not null,
-      startTime timestamp not null,
-      endTime timestamp not null,
+      startTime timestamp DEFAULT CURRENT_TIMESTAMP,
+      endTime timestamp DEFAULT CURRENT_TIMESTAMP,
       evaluationClass text not null,
       engineParamsGeneratorClass text not null,
       batch text not null,
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
index 7a6de4e..48a624f 100644
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
+++ b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
@@ -32,15 +32,44 @@
 
 /** JDBC implementation of [[LEvents]] */
 class JDBCLEvents(
-    client: String, 
-    config: StorageClientConfig, 
+    client: String,
+    config: StorageClientConfig,
     namespace: String) extends LEvents with Logging {
   implicit private val formats = org.json4s.DefaultFormats
 
   def init(appId: Int, channelId: Option[Int] = None): Boolean = {
+
+    // To use index, it must be varchar less than 255 characters on a VARCHAR column
+    val useIndex = config.properties.contains("INDEX") &&
+      config.properties("INDEX").equalsIgnoreCase("enabled")
+
+    val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
+    val entityIdIndexName = s"idx_${tableName}_ei"
+    val entityTypeIndexName = s"idx_${tableName}_et"
     DB autoCommit { implicit session =>
-      SQL(s"""
-      create table if not exists ${JDBCUtils.eventTableName(namespace, appId, channelId)} (
+      if (useIndex) {
+        SQL(s"""
+      create table if not exists $tableName (
+        id varchar(32) not null primary key,
+        event varchar(255) not null,
+        entityType varchar(255) not null,
+        entityId varchar(255) not null,
+        targetEntityType text,
+        targetEntityId text,
+        properties text,
+        eventTime timestamp DEFAULT CURRENT_TIMESTAMP,
+        eventTimeZone varchar(50) not null,
+        tags text,
+        prId text,
+        creationTime timestamp DEFAULT CURRENT_TIMESTAMP,
+        creationTimeZone varchar(50) not null)""").execute().apply()
+
+        // create index
+        SQL(s"create index $entityIdIndexName on $tableName (entityId)").execute().apply()
+        SQL(s"create index $entityTypeIndexName on $tableName (entityType)").execute().apply()
+      } else {
+        SQL(s"""
+      create table if not exists $tableName (
         id varchar(32) not null primary key,
         event text not null,
         entityType text not null,
@@ -48,12 +77,13 @@
         targetEntityType text,
         targetEntityId text,
         properties text,
-        eventTime timestamp not null,
+        eventTime timestamp DEFAULT CURRENT_TIMESTAMP,
         eventTimeZone varchar(50) not null,
         tags text,
         prId text,
-        creationTime timestamp not null,
+        creationTime timestamp DEFAULT CURRENT_TIMESTAMP,
         creationTimeZone varchar(50) not null)""").execute().apply()
+      }
       true
     }
   }
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala
index 413895f..b9b26c5 100644
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala
+++ b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala
@@ -15,20 +15,15 @@
 
 package io.prediction.data.storage.jdbc
 
-import java.sql.DriverManager
-import java.sql.ResultSet
+import java.sql.{DriverManager, ResultSet}
 
 import com.github.nscala_time.time.Imports._
-import io.prediction.data.storage.DataMap
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.PEvents
-import io.prediction.data.storage.StorageClientConfig
+import io.prediction.data.storage.{DataMap, Event, PEvents, StorageClientConfig}
 import org.apache.spark.SparkContext
-import org.apache.spark.rdd.JdbcRDD
-import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.{JdbcRDD, RDD}
+import org.apache.spark.sql.{SQLContext, SaveMode}
 import org.json4s.JObject
 import org.json4s.native.Serialization
-import scalikejdbc._
 
 /** JDBC implementation of [[PEvents]] */
 class JDBCPEvents(client: String, config: StorageClientConfig, namespace: String) extends PEvents {
@@ -119,32 +114,47 @@
   }
 
   def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
-    @transient lazy val tableName =
-      sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
-    events.foreachPartition { events =>
-      val batchParams = events.map { event =>
-        Seq(
-          event.eventId.getOrElse(JDBCUtils.generateId),
-          event.event,
-          event.entityType,
-          event.entityId,
-          event.targetEntityType,
-          event.targetEntityId,
-          Serialization.write(event.properties.toJObject),
-          event.eventTime,
-          event.eventTime.getZone.getID,
-          if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else None,
-          event.prId,
-          event.creationTime,
-          event.creationTime.getZone.getID)
-      }.toSeq
-      DB localTx { implicit session =>
-        @transient lazy val q =
-          sql"""
-          insert into $tableName values(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
-          """
-        q.batch(batchParams: _*).apply()
-      }
-    }
+    val sqlContext = new SQLContext(sc)
+
+    import sqlContext.implicits._
+
+    val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
+
+    val eventTableColumns = Seq[String](
+        "id"
+      , "event"
+      , "entityType"
+      , "entityId"
+      , "targetEntityType"
+      , "targetEntityId"
+      , "properties"
+      , "eventTime"
+      , "eventTimeZone"
+      , "tags"
+      , "prId"
+      , "creationTime"
+      , "creationTimeZone")
+
+    val eventDF = events.map { event =>
+      (event.eventId.getOrElse(JDBCUtils.generateId)
+        , event.event
+        , event.entityType
+        , event.entityId
+        , event.targetEntityType.orNull
+        , event.targetEntityId.orNull
+        , if (!event.properties.isEmpty) Serialization.write(event.properties.toJObject) else null
+        , new java.sql.Timestamp(event.eventTime.getMillis)
+        , event.eventTime.getZone.getID
+        , if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else null
+        , event.prId
+        , new java.sql.Timestamp(event.creationTime.getMillis)
+        , event.creationTime.getZone.getID)
+    }.toDF(eventTableColumns:_*)
+
+    // spark version 1.4.0 or higher
+    val prop = new java.util.Properties
+    prop.setProperty("user", config.properties("USERNAME"))
+    prop.setProperty("password", config.properties("PASSWORD"))
+    eventDF.write.mode(SaveMode.Append).jdbc(client, tableName, prop)
   }
 }
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala
index 79490d0..585ca71 100644
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala
+++ b/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala
@@ -36,10 +36,15 @@
     throw new StorageClientException("The PASSWORD variable is not set!", null)
   }
 
+  // set max size of connection pool
+  val maxSize: Int = config.properties.getOrElse("CONNECTIONS", "8").toInt
+  val settings = ConnectionPoolSettings(maxSize = maxSize)
+
   ConnectionPool.singleton(
     config.properties("URL"),
     config.properties("USERNAME"),
-    config.properties("PASSWORD"))
+    config.properties("PASSWORD"),
+    settings)
   /** JDBC connection URL. Connections are managed by ScalikeJDBC. */
   val client = config.properties("URL")
 }
diff --git a/data/src/main/scala/io/prediction/data/store/LEventStore.scala b/data/src/main/scala/io/prediction/data/store/LEventStore.scala
index 1845942..be543eb 100644
--- a/data/src/main/scala/io/prediction/data/store/LEventStore.scala
+++ b/data/src/main/scala/io/prediction/data/store/LEventStore.scala
@@ -87,7 +87,7 @@
   }
 
   /** Reads events generically. If entityType or entityId is not specified, it
-    * results in table scan. 
+    * results in table scan.
     *
     * @param appName return events of this app
     * @param entityType return events of this entityType
diff --git a/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
index 1d35d30..b2793a0 100644
--- a/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
+++ b/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
@@ -76,7 +76,7 @@
     "data[merges][LNAME]": "API",
     "data[merges][INTERESTS]": "Group1,Group2",
     "data[ip_opt]": "10.20.10.30",
-    "data[ip_signup]": "10.20.10.30"    
+    "data[ip_signup]": "10.20.10.30"
     */
 
     // convert to ISO8601 format
@@ -209,11 +209,11 @@
     import org.json4s.JsonDSL._
 
     /*
-    "type": "upemail", 
-    "fired_at": "2009-03-26 22:15:09", 
+    "type": "upemail",
+    "fired_at": "2009-03-26 22:15:09",
     "data[list_id]": "a6b5da1054",
-    "data[new_id]": "51da8c3259", 
-    "data[new_email]": "api+new@mailchimp.com", 
+    "data[new_id]": "51da8c3259",
+    "data[new_email]": "api+new@mailchimp.com",
     "data[old_email]": "api+old@mailchimp.com"
     */
 
@@ -242,8 +242,8 @@
 
     /*
     Reason will be one of "hard" (for hard bounces) or "abuse"
-    "type": "cleaned", 
-    "fired_at": "2009-03-26 22:01:00", 
+    "type": "cleaned",
+    "fired_at": "2009-03-26 22:01:00",
     "data[list_id]": "a6b5da1054",
     "data[campaign_id]": "4fjk2ma9xd",
     "data[reason]": "hard",
@@ -273,12 +273,12 @@
     import org.json4s.JsonDSL._
 
     /*
-    "type": "campaign", 
-    "fired_at": "2009-03-26 21:31:21", 
+    "type": "campaign",
+    "fired_at": "2009-03-26 21:31:21",
     "data[id]": "5aa2102003",
-    "data[subject]": "Test Campaign Subject", 
+    "data[subject]": "Test Campaign Subject",
     "data[status]": "sent",
-    "data[reason]": "", 
+    "data[reason]": "",
     "data[list_id]": "a6b5da1054"
     */
 
diff --git a/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
index f936a25..318043c 100644
--- a/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
+++ b/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
@@ -20,11 +20,24 @@
 
 private[prediction] object SegmentIOConnector extends JsonConnector {
 
+  // private lazy val supportedAPI = Vector("2", "2.0", "2.0.0")
+
   implicit val json4sFormats: Formats = DefaultFormats
 
   override
   def toEventJson(data: JObject): JObject = {
-    // TODO: check segmentio API version
+    try {
+      val version: String = data.values("version").toString
+/*
+      if (!supportedAPI.contains(version)) {
+        throw new ConnectorException(
+          s"Supported segment.io API versions: [2]. got [$version]"
+        )
+      }
+*/
+    } catch { case _: Throwable ⇒
+      throw new ConnectorException(s"Failed to get segment.io API version.")
+    }
 
     val common = try {
       data.extract[Common]
@@ -102,7 +115,7 @@
 
   def toEventJson(common: Common, alias: Events.Alias): JObject = {
     import org.json4s.JsonDSL._
-    toJson(common, "previousId" → alias.previousId)
+    toJson(common, "previous_id" → alias.previous_id)
   }
 
   def toEventJson(common: Common, screen: Events.Screen): JObject = {
@@ -124,7 +137,7 @@
   def toEventJson(common: Common, group: Events.Group): JObject = {
     import org.json4s.JsonDSL._
     val eventProperties =
-      ("groupId" → group.groupId) ~
+      ("group_id" → group.group_id) ~
       ("traits" → group.traits)
     toJson(common, eventProperties)
   }
@@ -153,7 +166,7 @@
 
   private def commonToJson(common: Common, typ: String): JObject = {
     import org.json4s.JsonDSL._
-      common.userId.orElse(common.anonymousId) match {
+      common.user_id.orElse(common.anonymous_id) match {
         case Some(userId) ⇒
           ("event" → typ) ~
             ("entityType" → "user") ~
@@ -175,25 +188,25 @@
     properties: Option[JObject] = None
   )
 
-  private[prediction] case class Alias(previousId: String, userId: String)
+  private[prediction] case class Alias(previous_id: String, user_id: String)
 
   private[prediction] case class Group(
-    groupId: String,
+    group_id: String,
     traits: Option[JObject] = None
   )
 
   private[prediction] case class Screen(
-    name: String,
+    name: Option[String] = None,
     properties: Option[JObject] = None
   )
 
   private[prediction] case class Page(
-    name: String,
+    name: Option[String] = None,
     properties: Option[JObject] = None
   )
 
   private[prediction] case class Identify(
-    userId: String,
+    user_id: String,
     traits: Option[JObject]
   )
 
@@ -209,19 +222,18 @@
   )
 
   private[prediction] case class Context(
-    app: App,
-    campaign: Campaign,
-    device: Device,
     ip: String,
     library: Library,
-    locale: String,
-    network: Network,
-    location: Location,
-    os: OS,
-    referrer: Referrer,
-    screen: Screen,
-    timezone: String,
-    userAgent: String
+    user_agent: String,
+    app: Option[App] = None,
+    campaign: Option[Campaign] = None,
+    device: Option[Device] = None,
+    network: Option[Network] = None,
+    location: Option[Location] = None,
+    os: Option[OS] = None,
+    referrer: Option[Referrer] = None,
+    screen: Option[Screen] = None,
+    timezone: Option[String] = None
   )
 
   private[prediction] case class Screen(width: Int, height: Int, density: Int)
@@ -238,6 +250,14 @@
     speed: Option[Int] = None
   )
 
+  case class Page(
+    path: String,
+    referrer: String,
+    search: String,
+    title: String,
+    url: String
+  )
+
   private[prediction] case class Network(
     bluetooth: Option[Boolean] = None,
     carrier: Option[String] = None,
@@ -249,8 +269,8 @@
 
   private[prediction] case class Device(
     id: Option[String] = None,
-    advertisingId: Option[String] = None,
-    adTrackingEnabled: Option[Boolean] = None,
+    advertising_id: Option[String] = None,
+    ad_tracking_enabled: Option[Boolean] = None,
     manufacturer: Option[String] = None,
     model: Option[String] = None,
     name: Option[String] = None,
@@ -276,10 +296,11 @@
 
 private[prediction] case class Common(
   `type`: String,
-  sendAt: String,
+  sent_at: String,
   timestamp: String,
-  anonymousId: Option[String] = None,
-  userId: Option[String] = None,
+  version: String,
+  anonymous_id: Option[String] = None,
+  user_id: Option[String] = None,
   context: Option[Common.Context] = None,
   integrations: Option[Common.Integrations] = None
 )
diff --git a/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala b/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
index 9a0d1bf..9f7a74e 100644
--- a/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
+++ b/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
@@ -16,8 +16,6 @@
 package io.prediction.data.api
 
 import io.prediction.data.storage.Storage
-import io.prediction.data.webhooks.JsonConnector
-import io.prediction.data.webhooks.FormConnector
 
 import akka.testkit.TestProbe
 import akka.actor.ActorSystem
@@ -39,11 +37,15 @@
   val channelsClient = Storage.getMetaDataChannels()
   
   val eventServiceActor = system.actorOf(
-    Props(classOf[EventServiceActor],
-      eventClient,
-      accessKeysClient,
-      channelsClient,
-      EventServerConfig()))
+    Props(
+      new EventServiceActor(
+        eventClient,
+        accessKeysClient,
+        channelsClient,
+        EventServerConfig()
+      )
+    )
+  )
 
   "GET / request" should {
     "properly produce OK HttpResponses" in {
diff --git a/data/src/test/scala/io/prediction/data/api/SegmentIOAuthSpec.scala b/data/src/test/scala/io/prediction/data/api/SegmentIOAuthSpec.scala
new file mode 100644
index 0000000..bae0f0b
--- /dev/null
+++ b/data/src/test/scala/io/prediction/data/api/SegmentIOAuthSpec.scala
@@ -0,0 +1,175 @@
+package io.prediction.data.api
+
+import akka.actor.{ActorSystem, Props}
+import akka.testkit.TestProbe
+import io.prediction.data.storage._
+import org.joda.time.DateTime
+import org.specs2.mutable.Specification
+import spray.http.HttpHeaders.RawHeader
+import spray.http.{ContentTypes, HttpEntity, HttpResponse}
+import spray.httpx.RequestBuilding._
+import sun.misc.BASE64Encoder
+
+import scala.concurrent.{Future, ExecutionContext}
+
+class SegmentIOAuthSpec extends Specification {
+
+  val system = ActorSystem("EventServiceSpecSystem")
+  sequential
+  isolated
+  val eventClient = new LEvents {
+    override def init(appId: Int, channelId: Option[Int]): Boolean = true
+
+    override def futureInsert(event: Event, appId: Int, channelId: Option[Int])
+        (implicit ec: ExecutionContext): Future[String] =
+      Future successful "event_id"
+
+    override def futureFind(
+      appId: Int, channelId: Option[Int], startTime: Option[DateTime],
+      untilTime: Option[DateTime], entityType: Option[String],
+      entityId: Option[String], eventNames: Option[Seq[String]],
+      targetEntityType: Option[Option[String]],
+      targetEntityId: Option[Option[String]], limit: Option[Int],
+      reversed: Option[Boolean])
+        (implicit ec: ExecutionContext): Future[Iterator[Event]] =
+      Future successful List.empty[Event].iterator
+
+    override def futureGet(eventId: String, appId: Int, channelId: Option[Int])
+        (implicit ec: ExecutionContext): Future[Option[Event]] =
+      Future successful None
+
+    override def remove(appId: Int, channelId: Option[Int]): Boolean = true
+
+    override def futureDelete(eventId: String, appId: Int, channelId: Option[Int])
+        (implicit ec: ExecutionContext): Future[Boolean] =
+      Future successful true
+
+    override def close(): Unit = {}
+  }
+  val appId = 0
+  val accessKeysClient = new AccessKeys {
+    override def insert(k: AccessKey): Option[String] = null
+    override def getByAppid(appid: Int): Seq[AccessKey] = null
+    override def update(k: AccessKey): Unit = {}
+    override def delete(k: String): Unit = {}
+    override def getAll(): Seq[AccessKey] = null
+
+    override def get(k: String): Option[AccessKey] =
+      k match {
+        case "abc" ⇒ Some(AccessKey(k, appId, Seq.empty))
+        case _ ⇒ None
+      }
+  }
+
+  val channelsClient = Storage.getMetaDataChannels()
+  val eventServiceActor = system.actorOf(
+    Props(
+      new EventServiceActor(
+        eventClient,
+        accessKeysClient,
+        channelsClient,
+        EventServerConfig()
+      )
+    )
+  )
+
+  val base64Encoder = new BASE64Encoder
+
+  "Event Service" should {
+
+    "reject with CredentialsRejected with invalid credentials" in {
+      val accessKey = "abc123:"
+      val probe = TestProbe()(system)
+      probe.send(
+        eventServiceActor,
+        Post("/webhooks/segmentio.json")
+          .withHeaders(
+            List(
+              RawHeader("Authorization", s"Basic $accessKey")
+            )
+          )
+      )
+      probe.expectMsg(
+        HttpResponse(
+          401,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"message":"Invalid accessKey."}"""
+          )
+        )
+      )
+      success
+    }
+
+    "reject with CredentialsMissed without credentials" in {
+      val probe = TestProbe()(system)
+      probe.send(
+        eventServiceActor,
+        Post("/webhooks/segmentio.json")
+      )
+      probe.expectMsg(
+        HttpResponse(
+          401,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"message":"Missing accessKey."}"""
+          )
+        )
+      )
+      success
+    }
+
+    "process SegmentIO identity request properly" in {
+      val jsonReq =
+        """
+          |{
+          |  "anonymous_id": "507f191e810c19729de860ea",
+          |  "channel": "browser",
+          |  "context": {
+          |    "ip": "8.8.8.8",
+          |    "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)"
+          |  },
+          |  "message_id": "022bb90c-bbac-11e4-8dfc-aa07a5b093db",
+          |  "timestamp": "2015-02-23T22:28:55.387Z",
+          |  "sent_at": "2015-02-23T22:28:55.111Z",
+          |  "traits": {
+          |    "name": "Peter Gibbons",
+          |    "email": "peter@initech.com",
+          |    "plan": "premium",
+          |    "logins": 5
+          |  },
+          |  "type": "identify",
+          |  "user_id": "97980cfea0067",
+          |  "version": "2"
+          |}
+        """.stripMargin
+
+      val accessKey = "abc:"
+      val accessKeyEncoded = base64Encoder.encodeBuffer(accessKey.getBytes)
+      val probe = TestProbe()(system)
+      probe.send(
+        eventServiceActor,
+        Post(
+          "/webhooks/segmentio.json",
+          HttpEntity(ContentTypes.`application/json`, jsonReq.getBytes)
+        ).withHeaders(
+            List(
+              RawHeader("Authorization", s"Basic $accessKeyEncoded")
+            )
+          )
+      )
+      probe.expectMsg(
+        HttpResponse(
+          201,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"eventId":"event_id"}"""
+          )
+        )
+      )
+      success
+    }
+  }
+
+  step(system.shutdown())
+}
diff --git a/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
index 5822fc3..d7587cd 100644
--- a/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
+++ b/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
@@ -25,8 +25,9 @@
 
   val commonFields =
     s"""
-       |  "anonymousId": "id",
-       |  "sendAt": "sendAt",
+       |  "anonymous_id": "id",
+       |  "sent_at": "sendAt",
+       |  "version": "2",
      """.stripMargin
 
   "SegmentIOConnector" should {
@@ -49,8 +50,8 @@
           |    },
           |    "device": {
           |      "id": "B5372DB0-C21E-11E4-8DFC-AA07A5B093DB",
-          |      "advertisingId": "7A3CBEA0-BDF5-11E4-8DFC-AA07A5B093DB",
-          |      "adTrackingEnabled": true,
+          |      "advertising_id": "7A3CBEA0-BDF5-11E4-8DFC-AA07A5B093DB",
+          |      "ad_tracking_enabled": true,
           |      "manufacturer": "Apple",
           |      "model": "iPhone7,2",
           |      "name": "maguro",
@@ -62,7 +63,6 @@
           |      "name": "analytics-ios",
           |      "version": "1.8.0"
           |    },
-          |    "locale": "nl-NL",
           |    "network": {
           |      "bluetooth": false,
           |      "carrier": "T-Mobile NL",
@@ -90,7 +90,7 @@
           |      "density": 2
           |    },
           |    "timezone": "Europe/Amsterdam",
-          |    "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)"
+          |    "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)"
           |  }
         """.stripMargin
 
@@ -98,8 +98,8 @@
         s"""
            |{ $commonFields
             |  "type": "group",
-            |  "groupId": "groupId",
-            |  "userId": "userIdValue",
+            |  "group_id": "groupId",
+            |  "user_id": "userIdValue",
             |  "timestamp" : "2012-12-02T00:30:08.276Z",
             |  "traits": {
             |    "name": "groupName",
@@ -117,7 +117,7 @@
           |  "entityId": "userIdValue",
           |  "properties": {
           |    $context,
-          |    "groupId": "groupId",
+          |    "group_id": "groupId",
           |    "traits": {
           |      "name": "groupName",
           |      "employees": 329
@@ -135,8 +135,8 @@
         s"""
           |{ $commonFields
           |  "type": "group",
-          |  "groupId": "groupId",
-          |  "userId": "userIdValue",
+          |  "group_id": "groupId",
+          |  "user_id": "userIdValue",
           |  "timestamp" : "2012-12-02T00:30:08.276Z",
           |  "traits": {
           |    "name": "groupName",
@@ -152,7 +152,7 @@
           |  "entityType": "user",
           |  "entityId": "userIdValue",
           |  "properties": {
-          |    "groupId": "groupId",
+          |    "group_id": "groupId",
           |    "traits": {
           |      "name": "groupName",
           |      "employees": 329
@@ -171,7 +171,7 @@
           |{ $commonFields
           |  "type": "screen",
           |  "name": "screenName",
-          |  "userId": "userIdValue",
+          |  "user_id": "userIdValue",
           |  "timestamp" : "2012-12-02T00:30:08.276Z",
           |  "properties": {
           |    "variation": "screenVariation"
@@ -204,7 +204,7 @@
           |{ $commonFields
           |  "type": "page",
           |  "name": "pageName",
-          |  "userId": "userIdValue",
+          |  "user_id": "userIdValue",
           |  "timestamp" : "2012-12-02T00:30:08.276Z",
           |  "properties": {
           |    "title": "pageTitle",
@@ -238,8 +238,8 @@
         s"""
           |{ $commonFields
           |  "type": "alias",
-          |  "previousId": "previousIdValue",
-          |  "userId": "userIdValue",
+          |  "previous_id": "previousIdValue",
+          |  "user_id": "userIdValue",
           |  "timestamp" : "2012-12-02T00:30:08.276Z"
           |}
         """.stripMargin
@@ -251,7 +251,7 @@
           |  "entityType": "user",
           |  "entityId": "userIdValue",
           |  "properties": {
-          |    "previousId" : "previousIdValue"
+          |    "previous_id" : "previousIdValue"
           |  },
           |  "eventTime" : "2012-12-02T00:30:08.276Z"
           |}
@@ -264,7 +264,7 @@
       val track =
        s"""
           |{ $commonFields
-          |  "userId": "some_user_id",
+          |  "user_id": "some_user_id",
           |  "type": "track",
           |  "event": "Registered",
           |  "timestamp" : "2012-12-02T00:30:08.276Z",
@@ -299,11 +299,11 @@
       val identify = s"""
         { $commonFields
           "type"      : "identify",
-          "userId"    : "019mr8mf4r",
+          "user_id"    : "019mr8mf4r",
           "traits"    : {
               "email"            : "achilles@segment.com",
               "name"             : "Achilles",
-              "subscriptionPlan" : "Premium",
+              "subscription_plan" : "Premium",
               "friendCount"      : 29
           },
           "timestamp" : "2012-12-02T00:30:08.276Z"
@@ -319,7 +319,7 @@
             "traits" : {
               "email"            : "achilles@segment.com",
               "name"             : "Achilles",
-              "subscriptionPlan" : "Premium",
+              "subscription_plan" : "Premium",
               "friendCount"      : 29
             }
           },
diff --git a/data/test-segmentio.sh b/data/test-segmentio.sh
index 4737edf..6626598 100755
--- a/data/test-segmentio.sh
+++ b/data/test-segmentio.sh
@@ -2,22 +2,73 @@
 accessKey=$1
 
 # normal case
-curl -i -X POST http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
--H "Content-Type: application/json" \
--d '{
-  "version"   : 1,
-  "type"      : "identify",
-  "userId"    : "019mr8mf4r",
-  "traits"    : {
-      "email"            : "achilles@segment.com",
-      "name"             : "Achilles",
-      "subscriptionPlan" : "Premium",
-      "friendCount"      : 29
-  },
-  "timestamp" : "2012-12-02T00:30:08.276Z"
-}' \
--w %{time_total}
+curl -H "Accept: application/json; version=2.0" \
+     http://spec.segment.com/generate/identify | \
+curl -X POST \
+    -H "Content-Type: application/json" \
+    -d @- \
+    http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey
+echo ''
 
+# normal case api key in header for identify event
+curl -H "Accept: application/json; version=2.0" \
+     http://spec.segment.com/generate/identify | \
+curl -X POST \
+     --user "$accessKey:" \
+     -H "Content-Type: application/json" \
+     -d @- \
+     http://localhost:7070/webhooks/segmentio.json
+echo ''
+
+# normal case api key in header for track event
+curl -H "Accept: application/json; version=2.0" \
+     http://spec.segment.com/generate/track | \
+curl -X POST \
+     --user "$accessKey:" \
+     -H "Content-Type: application/json" \
+     -d @- \
+     http://localhost:7070/webhooks/segmentio.json
+echo ''
+
+# normal case api key in header for page event
+curl -H "Accept: application/json; version=2.0" \
+     http://spec.segment.com/generate/page | \
+curl -X POST \
+     --user "$accessKey:" \
+     -H "Content-Type: application/json" \
+     -d @- \
+     http://localhost:7070/webhooks/segmentio.json
+echo ''
+
+# normal case api key in header for screen event
+curl -H "Accept: application/json; version=2.0" \
+     http://spec.segment.com/generate/screen | \
+curl -X POST \
+     --user "$accessKey:" \
+     -H "Content-Type: application/json" \
+     -d @- \
+     http://localhost:7070/webhooks/segmentio.json
+echo ''
+
+# normal case api key in header for group event
+curl -H "Accept: application/json; version=2.0" \
+     http://spec.segment.com/generate/group | \
+curl -X POST \
+     --user "$accessKey:" \
+     -H "Content-Type: application/json" \
+     -d @- \
+     http://localhost:7070/webhooks/segmentio.json
+echo ''
+
+# normal case api key in header for alias event
+curl -H "Accept: application/json; version=2.0" \
+     http://spec.segment.com/generate/alias | \
+curl -X POST \
+     --user "$accessKey:" \
+     -H "Content-Type: application/json" \
+     -d @- \
+     http://localhost:7070/webhooks/segmentio.json
+echo ''
 
 # invalid type
 curl -i -X POST http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
@@ -26,6 +77,7 @@
   "version"   : 1,
   "type"      : "invalid_type",
   "userId"    : "019mr8mf4r",
+  "sent_at":"2015-08-21T15:25:32.799Z",
   "traits"    : {
       "email"            : "achilles@segment.com",
       "name"             : "Achilles",
@@ -35,6 +87,7 @@
   "timestamp" : "2012-12-02T00:30:08.276Z"
 }' \
 -w %{time_total}
+echo ''
 
 # invalid data format
 curl -i -X POST http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
@@ -42,6 +95,7 @@
 -d '{
   "version"   : 1,
   "userId"    : "019mr8mf4r",
+  "sent_at":"2015-08-21T15:25:32.799Z",
   "traits"    : {
       "email"            : "achilles@segment.com",
       "name"             : "Achilles",
@@ -51,7 +105,7 @@
   "timestamp" : "2012-12-02T00:30:08.276Z"
 }' \
 -w %{time_total}
-
+echo ''
 
 # invalid webhooks path
 curl -i -X POST http://localhost:7070/webhooks/invalidpath.json?accessKey=$accessKey \
@@ -60,6 +114,7 @@
   "version"   : 1,
   "type"      : "identify",
   "userId"    : "019mr8mf4r",
+  "sent_at":"2015-08-21T15:25:32.799Z",
   "traits"    : {
       "email"            : "achilles@segment.com",
       "name"             : "Achilles",
@@ -69,14 +124,16 @@
   "timestamp" : "2012-12-02T00:30:08.276Z"
 }' \
 -w %{time_total}
-
+echo ''
 
 # get request
 curl -i -X GET http://localhost:7070/webhooks/segmentio.json?accessKey=$accessKey \
 -H "Content-Type: application/json" \
 -w %{time_total}
+echo ''
 
 # get invalid
 curl -i -X GET http://localhost:7070/webhooks/invalidpath.json?accessKey=$accessKey \
 -H "Content-Type: application/json" \
 -w %{time_total}
+echo ''
\ No newline at end of file
diff --git a/docs/manual/data/versions.yml b/docs/manual/data/versions.yml
index b605ffb..36ac734 100644
--- a/docs/manual/data/versions.yml
+++ b/docs/manual/data/versions.yml
@@ -1,5 +1,5 @@
-pio: 0.9.5
-spark: 1.5.1
+pio: 0.9.6
+spark: 1.5.2
 spark_download_filename: spark-1.5.1-bin-hadoop2.6
 elasticsearch_download_filename: elasticsearch-1.4.4
 hbase_basename: hbase-1.0.0
diff --git a/docs/manual/source/deploy/index.html.md b/docs/manual/source/deploy/index.html.md
index 84baea4..be18a0d 100644
--- a/docs/manual/source/deploy/index.html.md
+++ b/docs/manual/source/deploy/index.html.md
@@ -4,6 +4,8 @@
 
 An engine must be **built** (i.e. `pio build`) and **trained** (i.e. `pio train`)  before it can be deployed as a web service.
 
+WARNING: The engine server is not protected by authentication, and the instructions below assume deployment in a trusted environment. See the section [Deploying with AWS](/system/deploy-cloudformation/) for a production deployment example.
+
 ## Deploying an Engine the First Time
 
 After you have [downloaded an Engine Template](/start/download/),  you can deploy it with these steps:
diff --git a/docs/manual/source/evaluation/evaluationdashboard.html.md b/docs/manual/source/evaluation/evaluationdashboard.html.md
index 387bb41..3d0bd2d 100644
--- a/docs/manual/source/evaluation/evaluationdashboard.html.md
+++ b/docs/manual/source/evaluation/evaluationdashboard.html.md
@@ -2,6 +2,8 @@
 title: Evaluation Dashboard
 ---
 
+WARNING: This is an experimental development tool, which exposes environment variables and other sensitive information about the PredictionIO application (e.g. storage configs, credentials etc.). It is not recommended to be run in production.
+
 PredictionIO provides a web dashboard which allows you to see previous
 evaluation and a drill down page about each evaluation. It is particularly
 useful when we ran multiple [hyperparameter tunings](/evaluation/paramtuning/)
@@ -17,3 +19,5 @@
 order. A high level description of each evaluation can be seen directly from the
 dashboard. We can also click on the *HTML* button to see the evaluation drill
 down page.
+
+*Note:* The dashboard server has SSL enabled and is authenticated by a key passed as a query string param `accessKey`. The configuration is in `conf/server.conf`
\ No newline at end of file
diff --git a/docs/manual/source/system/anotherdatastore.html.md b/docs/manual/source/system/anotherdatastore.html.md
index 18b8c39..563430b 100644
--- a/docs/manual/source/system/anotherdatastore.html.md
+++ b/docs/manual/source/system/anotherdatastore.html.md
@@ -192,6 +192,18 @@
     use when it reads from the JDBC connection, e.g.
     `PIO_STORAGE_SOURCES_PGSQL_PARTITIONS=4`
 
+-   CONNECTIONS (optional, default to 8)
+
+    This value is used by scalikejdbc library to determine the max size of connection pool, e.g.
+    `PIO_STORAGE_SOURCES_PGSQL_CONNECTIONS=8`
+
+-   INDEX (optional since v0.9.6, default to disabled)
+
+    This value is used by creating indexes on entityId and entityType columns to
+    improve performance when findByEntity function is called. Note that these columns 
+    of entityId and entityType will be created as varchar(255), e.g.
+    `PIO_STORAGE_SOURCES_PGSQL_INDEX=enabled`
+
 
 #### Apache HBase Configuration
 
diff --git a/e2/build.sbt b/e2/build.sbt
index 1910dfa..1bd8c22 100644
--- a/e2/build.sbt
+++ b/e2/build.sbt
@@ -20,4 +20,4 @@
   "org.apache.spark" %% "spark-core" % sparkVersion.value % "provided",
   "org.apache.spark" %% "spark-mllib" % sparkVersion.value % "provided",
   "org.clapper" %% "grizzled-slf4j" % "1.0.2",
-  "org.scalatest" %% "scalatest" % "2.2.1" % "test")
+  "org.scalatest" %% "scalatest" % "2.2.5" % "test")
diff --git a/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala b/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
index b57a24f..6c0d5d3 100644
--- a/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
+++ b/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
@@ -21,7 +21,7 @@
 import scala.collection.immutable.HashMap
 import scala.collection.immutable.HashSet
 
-class BinaryVectorizer(propertyMap : HashMap[(String, String), Int]) 
+class BinaryVectorizer(propertyMap : HashMap[(String, String), Int])
 extends Serializable {
 
   val properties: Array[(String, String)] = propertyMap.toArray.sortBy(_._2).map(_._1)
@@ -58,3 +58,4 @@
     new BinaryVectorizer(HashMap(indexed:_*))
   }
 }
+
diff --git a/examples/experimental/scala-local-regression/Run.scala b/examples/experimental/scala-local-regression/Run.scala
index 96aa57e..3708a1c 100644
--- a/examples/experimental/scala-local-regression/Run.scala
+++ b/examples/experimental/scala-local-regression/Run.scala
@@ -26,8 +26,7 @@
 case class DataSourceParams(val filepath: String, val seed: Int = 9527)
   extends Params
 
-case class TrainingData(x: Vector[Vector[Double]], y: Vector[Double])
-  extends Serializable {
+case class TrainingData(x: Vector[Vector[Double]], y: Vector[Double]) {
   val r = x.length
   val c = x.head.length
 }
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala
index 2886260..6b586f4 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala
@@ -24,7 +24,7 @@
 case class TrainingData(
   val g:Graph[Int,Int],
   val identityMatrix:RDD[(VertexId,Double)]
-) extends Serializable
+)
 
 class DataSource(val dsp: DataSourceParams)
   extends PDataSource[TrainingData, EmptyEvaluationInfo, Query, Double] {
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala
index efbba5e..e189d0f 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala
@@ -6,16 +6,16 @@
 case class Query(
   val item1: Long,
   val item2: Long
-) extends Serializable
+)
 
 case class PredictedResult(
   val productScores: Array[ProductScore]
-) extends Serializable
+)
 
 case class ProductScore(
   product: Int,
   score: Double
-) extends Serializable
+)
 
 object PSimRankEngineFactory extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala
index eb8ff29..8fe6361 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala
@@ -9,16 +9,16 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala
index cc5542c..132755e 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala
index 22add39..edb7767 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala
@@ -6,16 +6,16 @@
 case class Query(
   user: String,
   num: Int
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala
index cc5542c..132755e 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala
index 22add39..edb7767 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala
@@ -6,16 +6,16 @@
 case class Query(
   user: String,
   num: Int
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala
index 22add39..edb7767 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala
@@ -6,16 +6,16 @@
 case class Query(
   user: String,
   num: Int
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala
index cc5542c..132755e 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala
index e961ee2..02a3212 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala
@@ -9,16 +9,16 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object SimilarProductEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala
index 7a7f124..e7dadab 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala
index 9815ebf..8d594fc 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala
@@ -9,16 +9,16 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object SimilarProductEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala
index 7a7f124..e7dadab 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala b/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala
index b6410e5..8726c87 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala
@@ -6,11 +6,11 @@
 //import io.prediction.workflow.CoreWorkflow
 import grizzled.slf4j.Logger
 
-case class Query(q: Int) extends Serializable
+case class Query(q: Int)
 
-case class PredictedResult(p: Int) extends Serializable
+case class PredictedResult(p: Int)
 
-case class ActualResult() extends Serializable
+case class ActualResult()
 
 object VanillaEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala b/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala
index 004d864..91a37ef 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala
@@ -7,8 +7,7 @@
   extends LServing[Query, PredictedResult] {
 
   @transient lazy val logger = Logger[this.type]
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     logger.error("Serving.serve")
     predictedResults.head
diff --git a/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala b/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala
index 6b991a6..d691a4f 100644
--- a/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala
+++ b/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala
@@ -32,24 +32,24 @@
 extends Serializable {}
 
 case class DailyStat (
-  val time: Long,
-  val nav: Double,
-  val ret: Double,
-  val market: Double,
-  val positionCount: Int
-) extends Serializable
+  time: Long,
+  nav: Double,
+  ret: Double,
+  market: Double,
+  positionCount: Int
+)
 
 case class OverallStat (
-  val ret: Double,
-  val vol: Double,
-  val sharpe: Double,
-  val days: Int
-) extends Serializable
+  ret: Double,
+  vol: Double,
+  sharpe: Double,
+  days: Int
+)
 
 case class BacktestingResult(
-  val daily: Seq[DailyStat],
-  val overall: OverallStat
-) extends Serializable with NiceRendering {
+  daily: Seq[DailyStat],
+  overall: OverallStat
+) with NiceRendering {
   override def toString(): String = overall.toString
 
   def toHTML(): String = {
diff --git a/examples/experimental/scala-stock/src/main/scala/Data.scala b/examples/experimental/scala-stock/src/main/scala/Data.scala
index dba9a9f..a57ef28 100644
--- a/examples/experimental/scala-stock/src/main/scala/Data.scala
+++ b/examples/experimental/scala-stock/src/main/scala/Data.scala
@@ -72,18 +72,17 @@
 
 // Training data visible to the user is [untilIdx - windowSize, untilIdx).
 case class TrainingData(
-  val untilIdx: Int,
-  val maxWindowSize: Int,
-  val rawDataB: Broadcast[RawData])
-  extends Serializable {
+  untilIdx: Int,
+  maxWindowSize: Int,
+  rawDataB: Broadcast[RawData]) {
  
   def view(): DataView = DataView(rawDataB.value, untilIdx - 1, maxWindowSize)
 }
 
-case class DataParams(val rawDataB: Broadcast[RawData]) extends Serializable
+case class DataParams(rawDataB: Broadcast[RawData])
 
 // Date
-case class QueryDate(val idx: Int) extends Serializable {}
+case class QueryDate(idx: Int)
 
 case class Query(
   val idx: Int,
@@ -92,7 +91,7 @@
   val mktTicker: String)
 
 // Prediction
-case class Prediction(val data: HashMap[String, Double]) extends Serializable {}
+case class Prediction(data: HashMap[String, Double])
 
 object SaddleWrapper {
   def ToFrame[A](
diff --git a/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala b/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala
index 1359818..3c8d4f0 100644
--- a/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala
+++ b/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala
@@ -27,13 +27,13 @@
 //import org.saddle._
 
 case class HistoricalData(
-  val ticker: String,
-  val timeIndex: Array[DateTime],
-  val close: Array[Double],
-  val adjClose: Array[Double],
-  val adjReturn: Array[Double],
-  val volume: Array[Double],
-  val active: Array[Boolean]) extends Serializable {
+  ticker: String,
+  timeIndex: Array[DateTime],
+  close: Array[Double],
+  adjClose: Array[Double],
+  adjReturn: Array[Double],
+  volume: Array[Double],
+  active: Array[Boolean]) {
 
   override def toString(): String = {
     s"HistoricalData($ticker, ${timeIndex.head}, ${timeIndex.last}, " +
@@ -335,31 +335,31 @@
 
 object YahooDataSource {
   case class Params(
-    val windowParams: DataSourceParams,
+    windowParams: DataSourceParams,
     // Below filters with DataAPISpecific details
-    val appId: Int,  // Ignore appId in DataSourceParams
-    val entityType: String,
-    val startTime: Option[DateTime] = None,
-    val untilTime: Option[DateTime] = None
+    appId: Int,  // Ignore appId in DataSourceParams
+    entityType: String,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None
   ) extends BaseParams
 
   case class Daily(
-    val close: Double,
-    val adjClose: Double,
-    val adjReturn: Double,
-    val volume: Double,
-    val active: Boolean,
+    close: Double,
+    adjClose: Double,
+    adjReturn: Double,
+    volume: Double,
+    active: Boolean,
     // prevDate is used to verify continuity
-    val prevDate: DateTime)
+    prevDate: DateTime)
 
   /** Intermediate storage for constructing historical data
     * @param timeIndexSet Only datetime in this set is used to create historical
     * data.
     */
   case class Intermediate(
-    val ticker: String = "",
-    val dailyMap: Map[DateTime, Daily] = Map[DateTime, Daily]()
-    ) extends Serializable {
+    ticker: String = "",
+    dailyMap: Map[DateTime, Daily] = Map[DateTime, Daily]()
+    ) {
     override def toString(): String =
       s"YDS.Intermediate($ticker, size=${dailyMap.size})"
   }
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala
index 42ec4d4..1b453de 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala
@@ -9,16 +9,16 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object ECommerceRecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
index 42ec4d4..1b453de 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
@@ -9,16 +9,16 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object ECommerceRecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala
index 22add39..edb7767 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala
@@ -6,16 +6,16 @@
 case class Query(
   user: String,
   num: Int
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala
index cc5542c..132755e 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
index d1f6e88..8982d94 100644
--- a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
@@ -4,13 +4,11 @@
 import io.prediction.controller.Engine
 
 case class Query(user: String, num: Int, creationYear: Option[Int] = None)
-  extends Serializable
 
-case class PredictedResult(itemScores: Array[ItemScore]) extends Serializable
+case class PredictedResult(itemScores: Array[ItemScore])
 
 // HOWTO: added movie creation year to predicted result.
 case class ItemScore(item: String, score: Double, creationYear: Option[Int])
-  extends Serializable
 
 object RecommendationEngine extends IEngineFactory {
   def apply() =
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala
index 22add39..edb7767 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala
@@ -6,16 +6,16 @@
 case class Query(
   user: String,
   num: Int
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala
index 6468e4b..7b6c7c9 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala
@@ -14,6 +14,6 @@
   }
 }
 
-class PreparedData(
-  val ratings: RDD[Rating]
-) extends Serializable
+case class PreparedData(
+  ratings: RDD[Rating]
+)
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala
index 7a1640a..b5ff72a 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala
@@ -82,10 +82,10 @@
   rating: Double
 )
 
-class TrainingData(
-  val items: RDD[Item],
-  val ratings: RDD[Rating]
-) extends Serializable {
+case class TrainingData(
+  items: RDD[Item],
+  ratings: RDD[Rating]
+) {
   override def toString = {
     s"items: [${items.count()}] (${items.take(2).toList}...)" +
     s" ratings: [${ratings.count()}] (${ratings.take(2).toList}...)"
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala
index dd8b258..c922d05 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala
@@ -7,16 +7,16 @@
   user: String,
   num: Int,
   categories: Array[String]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendationEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala
index a5644f3..748c132 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala
@@ -113,7 +113,7 @@
   val users: RDD[(String, User)],
   val items: RDD[(String, Item)],
   val viewEvents: RDD[ViewEvent]
-) extends Serializable {
+) {
   override def toString = {
     s"users: [${users.count()} (${users.take(2).toList}...)]" +
     s"items: [${items.count()} (${items.take(2).toList}...)]" +
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala
index 7feeac3..518467b 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala
@@ -9,11 +9,11 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
@@ -21,7 +21,7 @@
   date: String,
   imdbUrl: String,
   score: Double
-) extends Serializable
+)
 
 object SimilarProductEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala
index 6af180b..3578552 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala
@@ -102,11 +102,11 @@
 
 case class RateEvent(user: String, item: String, rating: Double, t: Long)
 
-class TrainingData(
-  val users: RDD[(String, User)],
-  val items: RDD[(String, Item)],
-  val rateEvents: RDD[RateEvent]
-) extends Serializable {
+case class TrainingData(
+  users: RDD[(String, User)],
+  items: RDD[(String, Item)],
+  rateEvents: RDD[RateEvent]
+) {
   override def toString = {
     s"users: [${users.count()} (${users.take(2).toList}...)]" +
     s"items: [${items.count()} (${items.take(2).toList}...)]" +
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala
index 9815ebf..8d594fc 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala
@@ -9,16 +9,16 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object SimilarProductEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala
index c5b132e..d6717a6 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala
@@ -10,17 +10,17 @@
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]],
   recommendFromYear: Option[Int]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double,
   year: Int
-) extends Serializable
+)
 
 object SimilarProductEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala
index 1111d94..2e6922d 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala
index 8611fe5..51a708c 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala
@@ -9,18 +9,18 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable {
+) {
   override def toString = itemScores.mkString(",")
 }
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object SimilarProductEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala
index 59dcaa1..d3823a5 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala
@@ -9,8 +9,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
 
     // MODFIED
diff --git a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala
index 9815ebf..8d594fc 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala
@@ -9,16 +9,16 @@
   categories: Option[Set[String]],
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   itemScores: Array[ItemScore]
-) extends Serializable
+)
 
 case class ItemScore(
   item: String,
   score: Double
-) extends Serializable
+)
 
 object SimilarProductEngine extends IEngineFactory {
   def apply() = {
diff --git a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala
index 7a7f124..e7dadab 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala
@@ -5,8 +5,7 @@
 class Serving
   extends LServing[Query, PredictedResult] {
 
-  override
-  def serve(query: Query,
+  override def serve(query: Query,
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
index d8cdc90..8122789 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
@@ -8,16 +8,16 @@
   num: Int,
   whiteList: Option[Set[String]],
   blackList: Option[Set[String]]
-) extends Serializable
+)
 
 case class PredictedResult(
   similarUserScores: Array[similarUserScore]
-) extends Serializable
+)
 
 case class similarUserScore(
   user: String,
   score: Double
-) extends Serializable
+)
 
 object RecommendedUserEngine extends IEngineFactory {
   def apply() = {
diff --git a/project/assembly.sbt b/project/assembly.sbt
index 54c3252..49085ee 100644
--- a/project/assembly.sbt
+++ b/project/assembly.sbt
@@ -1 +1 @@
-addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.1")
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 1b6e305..d0d2591 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1,10 +1,10 @@
 addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.3.2")
 
-addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.3")
+addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
 
 addSbtPlugin("com.typesafe.sbt" % "sbt-twirl" % "1.0.3")
 
-addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "0.2.1")
+addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.1")
 
 addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.6.0")
 
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index d0afc1b..0f9b0b2 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -95,4 +95,7 @@
     <check enabled="true"
            class="org.scalastyle.scalariform.PublicMethodsHaveTypeChecker"
            level="error"/>
+    <check level="error"
+           class="org.scalastyle.file.WhitespaceEndOfLineChecker"
+           enabled="true"/>
 </scalastyle>
diff --git a/tools/build.sbt b/tools/build.sbt
index 865386f..9c60607 100644
--- a/tools/build.sbt
+++ b/tools/build.sbt
@@ -12,19 +12,19 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-import AssemblyKeys._
 
-assemblySettings
+
+import sbtassembly.AssemblyPlugin.autoImport._
 
 name := "tools"
 
 libraryDependencies ++= Seq(
   "com.github.scopt"       %% "scopt"          % "3.2.0",
-  "io.spray"               %% "spray-can"      % "1.3.2",
-  "io.spray"               %% "spray-routing"  % "1.3.2",
+  "io.spray"               %% "spray-can"      % "1.3.3",
+  "io.spray"               %% "spray-routing"  % "1.3.3",
   "me.lessis"              % "semverfi_2.10"  % "0.1.3",
-  "org.apache.hadoop"       % "hadoop-common"  % "2.5.0",
-  "org.apache.hadoop"       % "hadoop-hdfs"    % "2.5.0",
+  "org.apache.hadoop"       % "hadoop-common"  % "2.7.1",
+  "org.apache.hadoop"       % "hadoop-hdfs"    % "2.7.1",
   "org.apache.spark"       %% "spark-core"     % sparkVersion.value % "provided",
   "org.apache.spark"       %% "spark-sql"      % sparkVersion.value % "provided",
   "org.clapper"            %% "grizzled-slf4j" % "1.0.2",
@@ -32,7 +32,7 @@
   "org.json4s"             %% "json4s-ext"     % json4sVersion.value,
   "org.scalaj"             %% "scalaj-http"    % "1.1.0",
   "org.spark-project.akka" %% "akka-actor"     % "2.3.4-spark",
-  "io.spray" %% "spray-testkit" % "1.3.2" % "test",
+  "io.spray" %% "spray-testkit" % "1.3.3" % "test",
   "org.specs2" %% "specs2" % "2.3.13" % "test",
   "org.spark-project.akka" %% "akka-slf4j"     % "2.3.4-spark")
 
@@ -40,12 +40,20 @@
   cp filter { _.data.getName match {
     case "asm-3.1.jar" => true
     case "commons-beanutils-1.7.0.jar" => true
+    case "reflectasm-1.10.1.jar" => true
     case "commons-beanutils-core-1.8.0.jar" => true
+    case "kryo-3.0.3.jar" => true
     case "slf4j-log4j12-1.7.5.jar" => true
     case _ => false
   }}
 }
 
+assemblyShadeRules in assembly := Seq(
+  ShadeRule.rename("org.objenesis.**" -> "shadeio.@1").inLibrary("com.esotericsoftware.kryo" % "kryo" % "2.21").inProject,
+  ShadeRule.rename("com.esotericsoftware.reflectasm.**" -> "shadeio.@1").inLibrary("com.esotericsoftware.kryo" % "kryo" % "2.21").inProject,
+  ShadeRule.rename("com.esotericsoftware.minlog.**" -> "shadeio.@1").inLibrary("com.esotericsoftware.kryo" % "kryo" % "2.21").inProject
+)
+
 // skip test in assembly
 test in assembly := {}
 
diff --git a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala b/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
index f5d440e..b18690e 100644
--- a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
+++ b/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
@@ -162,7 +162,7 @@
       // If engineParamsGenerator is specified, it overrides the evaluation.
       ca.common.engineParamsGenerator.orElse(ca.common.evaluation)
         .map(x => Seq("--engine-params-generator-class", x))
-        .getOrElse(Seq()) ++ 
+        .getOrElse(Seq()) ++
       (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
       Seq("--json-extractor", ca.common.jsonExtractor.toString)
 
diff --git a/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala b/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
index 9389d0e..924b6f0 100644
--- a/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
+++ b/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
@@ -67,7 +67,7 @@
     } getOrElse {
       appClient.get(req.id) map {
         app2 =>
-          GeneralResponse(0, 
+          GeneralResponse(0,
               s"App ID ${app2.id} already exists and maps to the app '${app2.name}'. " +
               "Aborting.")
       } getOrElse {
diff --git a/tools/src/main/scala/io/prediction/tools/console/App.scala b/tools/src/main/scala/io/prediction/tools/console/App.scala
index dac89b3..2056f9d 100644
--- a/tools/src/main/scala/io/prediction/tools/console/App.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/App.scala
@@ -25,6 +25,7 @@
   channel: String = "",
   dataDeleteChannel: Option[String] = None,
   all: Boolean = false,
+  force: Boolean = false,
   description: Option[String] = None)
 
 object App extends Logging {
@@ -171,7 +172,7 @@
         info(f"              ${ch.name}%16s | ${ch.id}%10s")
       }
 
-      val choice = readLine("Enter 'YES' to proceed: ")
+      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
       choice match {
         case "YES" => {
           // delete channels
@@ -280,7 +281,7 @@
         info(s" Description: ${app.description}")
       }
 
-      val choice = readLine("Enter 'YES' to proceed: ")
+      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
 
       choice match {
         case "YES" => {
@@ -356,7 +357,7 @@
         info(f"              ${ch.name}%16s | ${ch.id}%10s")
       }
 
-      val choice = readLine("Enter 'YES' to proceed: ")
+      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
       choice match {
         case "YES" => {
           // delete channels
@@ -494,7 +495,7 @@
         info(s"      Channel ID: ${channelMap(deleteChannel)}")
         info(s"        App Name: ${app.name}")
         info(s"          App ID: ${app.id}")
-        val choice = readLine("Enter 'YES' to proceed: ")
+        val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
         choice match {
           case "YES" => {
             // NOTE: remove storage first before remove meta data (in case remove storage failed)
diff --git a/tools/src/main/scala/io/prediction/tools/console/Console.scala b/tools/src/main/scala/io/prediction/tools/console/Console.scala
index a0fa8cf..81e2d7a 100644
--- a/tools/src/main/scala/io/prediction/tools/console/Console.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/Console.scala
@@ -114,11 +114,11 @@
   stats: Boolean = false)
 
 case class AdminServerArgs(
-ip: String = "localhost",
+ip: String = "127.0.0.1",
 port: Int = 7071)
 
 case class DashboardArgs(
-  ip: String = "0.0.0.0",
+  ip: String = "127.0.0.1",
   port: Int = 9000)
 
 case class UpgradeArgs(
@@ -511,7 +511,10 @@
             } children(
               arg[String]("<name>") action { (x, c) =>
                 c.copy(app = c.app.copy(name = x))
-              } text("Name of the app to be deleted.")
+              } text("Name of the app to be deleted."),
+              opt[Unit]("force") abbr("f") action { (x, c) =>
+                c.copy(app = c.app.copy(force = true))
+              } text("Delete an app without prompting for confirmation")
             ),
           note(""),
           cmd("data-delete").
@@ -527,7 +530,10 @@
               } text("Name of channel whose data to be deleted."),
               opt[Unit]("all") action { (x, c) =>
                 c.copy(app = c.app.copy(all = true))
-              } text("Delete data of all channels including default")
+              } text("Delete data of all channels including default"),
+              opt[Unit]("force") abbr("f") action { (x, c) =>
+                c.copy(app = c.app.copy(force = true))
+              } text("Delete data of an app without prompting for confirmation")
             ),
           note(""),
           cmd("channel-new").
@@ -553,7 +559,10 @@
               } text("App name."),
               arg[String]("<channel>") action { (x, c) =>
                 c.copy(app = c.app.copy(channel = x))
-              } text ("Channel name to be deleted.")
+              } text ("Channel name to be deleted."),
+              opt[Unit]("force") abbr("f") action { (x, c) =>
+                c.copy(app = c.app.copy(force = true))
+              } text("Delete a channel of the app without prompting for confirmation")
             )
         )
       note("")
diff --git a/tools/src/main/scala/io/prediction/tools/console/Template.scala b/tools/src/main/scala/io/prediction/tools/console/Template.scala
index f41e0c2..a0b4376 100644
--- a/tools/src/main/scala/io/prediction/tools/console/Template.scala
+++ b/tools/src/main/scala/io/prediction/tools/console/Template.scala
@@ -179,7 +179,7 @@
       "email" -> email,
       "org" -> org)
     try {
-      httpOptionalProxy("http://update.prediction.io/templates.subscribe").
+      httpOptionalProxy("https://update.prediction.io/templates.subscribe").
         postData("json=" + write(data)).asString
     } catch {
       case e: Throwable => error("Unable to subscribe.")
@@ -189,14 +189,14 @@
   def meta(repo: String, name: String, org: String): Unit = {
     try {
       httpOptionalProxy(
-        s"http://meta.prediction.io/templates/$repo/$org/$name").asString
+        s"https://meta.prediction.io/templates/$repo/$org/$name").asString
     } catch {
       case e: Throwable => debug("Template metadata unavailable.")
     }
   }
 
   def list(ca: ConsoleArgs): Int = {
-    val templatesUrl = "http://templates.prediction.io/index.json"
+    val templatesUrl = "https://templates.prediction.io/index.json"
     try {
       val templatesJson = Source.fromURL(templatesUrl).mkString("")
       val templates = read[List[TemplateEntry]](templatesJson)
diff --git a/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala b/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
index d0708f5..3d2c888 100644
--- a/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
+++ b/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
@@ -28,7 +28,7 @@
 // see also https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS
 trait CORSSupport {
   this: HttpService =>
-  
+
   private val allowOriginHeader = `Access-Control-Allow-Origin`(AllOrigins)
   private val optionsCorsHeaders = List(
     `Access-Control-Allow-Headers`("""Origin,
@@ -42,18 +42,18 @@
                                       |User-Agent""".stripMargin.replace("\n", " ")),
     `Access-Control-Max-Age`(1728000)
   )
- 
-  def cors[T]: Directive0 = mapRequestContext { ctx => 
+
+  def cors[T]: Directive0 = mapRequestContext { ctx =>
     ctx.withRouteResponseHandling {
       // OPTION request for a resource that responds to other methods
-      case Rejected(x) if (ctx.request.method.equals(HttpMethods.OPTIONS) && 
+      case Rejected(x) if (ctx.request.method.equals(HttpMethods.OPTIONS) &&
           x.exists(_.isInstanceOf[MethodRejection])) => {
-        val allowedMethods: List[HttpMethod] = x.collect { 
+        val allowedMethods: List[HttpMethod] = x.collect {
           case rejection: MethodRejection => rejection.supported
         }
         ctx.complete {
           HttpResponse().withHeaders(
-            `Access-Control-Allow-Methods`(HttpMethods.OPTIONS, allowedMethods :_*) :: 
+            `Access-Control-Allow-Methods`(HttpMethods.OPTIONS, allowedMethods :_*) ::
             allowOriginHeader ::
             optionsCorsHeaders
           )
@@ -63,11 +63,11 @@
       allowOriginHeader :: headers
     }
   }
-  
+
   override def timeoutRoute: StandardRoute = complete {
     HttpResponse(
       StatusCodes.InternalServerError,
-      HttpEntity(ContentTypes.`text/plain(UTF-8)`, 
+      HttpEntity(ContentTypes.`text/plain(UTF-8)`,
           "The server was not able to produce a timely response to your request."),
       List(allowOriginHeader)
     )
diff --git a/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala b/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala
index 30fdbd2..154ba4e 100644
--- a/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala
+++ b/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala
@@ -15,8 +15,13 @@
 
 package io.prediction.tools.dashboard
 
+import com.typesafe.config.ConfigFactory
+import io.prediction.authentication.KeyAuthentication
+import io.prediction.configuration.SSLConfiguration
 import io.prediction.data.storage.Storage
-
+import spray.can.server.ServerSettings
+import spray.routing.directives.AuthMagnet
+import scala.concurrent.{Future, ExecutionContext}
 import akka.actor.{ActorContext, Actor, ActorSystem, Props}
 import akka.io.IO
 import akka.pattern.ask
@@ -27,6 +32,7 @@
 import spray.http._
 import spray.http.MediaTypes._
 import spray.routing._
+import spray.routing.authentication.{Authentication, UserPass, BasicAuth}
 
 import scala.concurrent.duration._
 
@@ -34,7 +40,7 @@
   ip: String = "localhost",
   port: Int = 9000)
 
-object Dashboard extends Logging {
+object Dashboard extends Logging with SSLConfiguration{
   def main(args: Array[String]): Unit = {
     val parser = new scopt.OptionParser[DashboardConfig]("Dashboard") {
       opt[String]("ip") action { (x, c) =>
@@ -55,7 +61,12 @@
     val service =
       system.actorOf(Props(classOf[DashboardActor], dc), "dashboard")
     implicit val timeout = Timeout(5.seconds)
-    IO(Http) ? Http.Bind(service, interface = dc.ip, port = dc.port)
+    val settings = ServerSettings(system)
+    IO(Http) ? Http.Bind(
+      service,
+      interface = dc.ip,
+      port = dc.port,
+      settings = Some(settings.copy(sslEncryption = true)))
     system.awaitTermination
   }
 }
@@ -67,22 +78,26 @@
   def receive: Actor.Receive = runRoute(dashboardRoute)
 }
 
-trait DashboardService extends HttpService with CORSSupport {
+trait DashboardService extends HttpService with KeyAuthentication with CORSSupport {
+
+  implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
   val dc: DashboardConfig
   val evaluationInstances = Storage.getMetaDataEvaluationInstances
   val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_"))
   val serverStartTime = DateTime.now
   val dashboardRoute =
     path("") {
-      get {
-        respondWithMediaType(`text/html`) {
-          complete {
-            val completedInstances = evaluationInstances.getCompleted
-            html.index(
-              dc,
-              serverStartTime,
-              pioEnvVars,
-              completedInstances).toString
+      authenticate(withAccessKeyFromFile) { request =>
+        get {
+          respondWithMediaType(`text/html`) {
+            complete {
+              val completedInstances = evaluationInstances.getCompleted
+              html.index(
+                dc,
+                serverStartTime,
+                pioEnvVars,
+                completedInstances).toString
+            }
           }
         }
       }
diff --git a/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala b/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
index f3c98d6..9a19a33 100644
--- a/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
+++ b/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
@@ -26,6 +26,8 @@
 import grizzled.slf4j.Logging
 import org.json4s.native.Serialization._
 
+import scala.util.{Failure, Try}
+
 case class FileToEventsArgs(
   env: String = "",
   logFile: String = "",
@@ -83,9 +85,15 @@
         mode = "Import",
         batch = "App ID " + args.appId + channelStr,
         executorEnv = Runner.envStringToMap(args.env))
-      val rdd = sc.textFile(args.inputPath)
+      val rdd = sc.textFile(args.inputPath).filter(_.trim.nonEmpty).map { json =>
+        Try(read[Event](json)).recoverWith {
+          case e: Throwable =>
+            error(s"\nmalformed json => $json")
+            Failure(e)
+        }.get
+      }
       val events = Storage.getPEvents()
-      events.write(events = rdd.map(read[Event](_)),
+      events.write(events = rdd,
         appId = args.appId,
         channelId = channelId)(sc)
       info("Events are imported.")
diff --git a/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
index 29b84ad..49f21b1 100644
--- a/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
+++ b/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
@@ -26,15 +26,17 @@
       App name.
 
 
-Usage: pio app delete <name>
+Usage: pio app delete <name> [--force]
 
 Name of the app to be deleted.
 
   <name>
       App name.
+  --force, -f
+      Delete data without prompting for confirmation
 
 
-Usage: pio app data-delete <name> [--channel <name>] [--all]
+Usage: pio app data-delete <name> [--channel <name>] [--all] [--force]
 
 Delete data of an app.
 
@@ -44,6 +46,8 @@
       Delete data of the specified channel (default channel if not specified)
   --all
       Delete all data of this app (including both default and all channels)
+  --force, -f
+      Delete data without prompting for confirmation
 
 
 Usage: pio app channel-new <name> <channel>
@@ -57,7 +61,7 @@
       Channel name to be created.
 
 
-Usage: pio app channel-delete <name> <channel>
+Usage: pio app channel-delete <name> <channel> [--force]
 
 Delete a channel for the app.
 
@@ -66,3 +70,5 @@
 
   <channel>
       Channel name to be deleted.
+  --force, -f
+      Delete data without prompting for confirmation