Merge branch 'livedoc' into develop
diff --git a/.travis.yml b/.travis.yml
index 65f4166..d2a8d2f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -137,17 +137,17 @@
 before_install:
   - unset SBT_OPTS JVM_OPTS
   - sudo rm /usr/local/bin/docker-compose
-  - curl -L https://github.com/docker/compose/releases/download/1.11.1/docker-compose-`uname -s`-`uname -m` > docker-compose
+  - travis_retry curl -L https://github.com/docker/compose/releases/download/1.11.1/docker-compose-`uname -s`-`uname -m` > docker-compose
   - chmod +x docker-compose
   - sudo mv docker-compose /usr/local/bin
 
 before_script:
   - sudo sysctl -w vm.max_map_count=262144
   - docker-compose -v
-  - ./tests/before_script.travis.sh
+  - travis_retry ./tests/before_script.travis.sh
 
 script:
-  - ./tests/script.travis.sh
+  - travis_retry ./tests/script.travis.sh
 
 after_script:
   - ./tests/after_script.travis.sh
diff --git a/bin/pio-class b/bin/pio-class
index 94cc520..9e01a40 100755
--- a/bin/pio-class
+++ b/bin/pio-class
@@ -44,7 +44,7 @@
 
 # Make sure the Apache Spark version meets the prerequisite if it is a binary
 # distribution
-MIN_SPARK_VERSION="1.3.0"
+MIN_SPARK_VERSION="1.6.3"
 if [ -z "$SPARK_HOME" ]; then
   echo -e "\033[0;31mSPARK_HOME must be set in conf/pio-env.sh, or in the environment!\033[0m"
   exit 1
diff --git a/build.sbt b/build.sbt
index ec54104..0243610 100644
--- a/build.sbt
+++ b/build.sbt
@@ -50,7 +50,7 @@
 
 name := "apache-predictionio-parent"
 
-version in ThisBuild := "0.12.1"
+version in ThisBuild := "0.13.0-SNAPSHOT"
 
 organization in ThisBuild := "org.apache.predictionio"
 
@@ -65,7 +65,7 @@
 scalacOptions in (ThisBuild, Test) ++= Seq("-Yrangepos")
 fork in (ThisBuild, run) := true
 
-javacOptions in (ThisBuild, compile) ++= Seq("-source", "1.7", "-target", "1.7",
+javacOptions in (ThisBuild, compile) ++= Seq("-source", "1.8", "-target", "1.8",
   "-Xlint:deprecation", "-Xlint:unchecked")
 
 // Ignore differentiation of Spark patch levels
@@ -323,6 +323,10 @@
 testOptions in Test += Tests.Argument("-oDF")
 
 printBuildInfo := {
+  if (scalaBinaryVersion.value == "2.10")
+    streams.value.log.warn("Support for Scala 2.10 is deprecated. Please upgrade to a newer version of Scala.")
+  if (sparkBinaryVersion.value == "1.6")
+    streams.value.log.warn("Support for Spark 1.6 is deprecated. Please upgrade to a newer version of Spark.")
   println(s"PIO_SCALA_VERSION=${scalaVersion.value}")
   println(s"PIO_SPARK_VERSION=${sparkVersion.value}")
   println(s"PIO_ELASTICSEARCH_VERSION=${elasticsearchVersion.value}")
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Engine.scala b/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
index 1f9d0ab..3b5f363 100644
--- a/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
+++ b/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
@@ -268,7 +268,7 @@
 
   /** Extract model for persistent layer.
     *
-    * PredictionIO presist models for future use. It allows custom
+    * PredictionIO persists models for future use. It allows custom
     * implementation for persisting models. You need to implement the
     * [[org.apache.predictionio.controller.PersistentModel]] interface. This method
     * traverses all models in the workflow. If the model is a
@@ -642,7 +642,7 @@
       dataSource.readTrainingBase(sc)
     } catch {
       case e: StorageClientException =>
-        logger.error(s"Error occured reading from data source. (Reason: " +
+        logger.error(s"Error occurred reading from data source. (Reason: " +
           e.getMessage + ") Please see the log for debugging details.", e)
         sys.exit(1)
     }
diff --git a/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala b/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala
index 5da48ce..04d781f 100644
--- a/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala
+++ b/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala
@@ -48,7 +48,7 @@
 
     // Subclasses only allows two kind of constructors.
     // 1. Constructor with P <: Params.
-    // 2. Emtpy constructor.
+    // 2. Empty constructor.
     // First try (1), if failed, try (2).
     try {
       val constr = cls.getConstructor(params.getClass)
diff --git a/core/src/main/scala/org/apache/predictionio/core/SelfCleaningDataSource.scala b/core/src/main/scala/org/apache/predictionio/core/SelfCleaningDataSource.scala
index cadf6b8..be3fe05 100644
--- a/core/src/main/scala/org/apache/predictionio/core/SelfCleaningDataSource.scala
+++ b/core/src/main/scala/org/apache/predictionio/core/SelfCleaningDataSource.scala
@@ -69,7 +69,7 @@
 
   /** :: DeveloperApi ::
     *
-    * Returns RDD of events happend after duration in event window params.
+    * Returns RDD of events happened after duration in event window params.
     *
     * @return RDD[Event] most recent PEvents.
     */
@@ -87,7 +87,7 @@
 
   /** :: DeveloperApi ::
     *
-    * Returns Iterator of events happend after duration in event window params.
+    * Returns Iterator of events happened after duration in event window params.
     *
     * @return Iterator[Event] most recent LEvents.
     */
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala b/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
index c9b1d23..8e4db51 100644
--- a/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
+++ b/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
@@ -72,7 +72,7 @@
   override val noSave: Boolean = true
 }
 
-/** FakeRun allows user to implement custom function under the exact enviroment
+/** FakeRun allows user to implement custom function under the exact environment
   * as other PredictionIO workflow.
   *
   * Useful for developing new features. Only need to extend this trait and
diff --git a/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala b/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
index add9c6f..9bb1847 100644
--- a/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
+++ b/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
@@ -24,7 +24,7 @@
 
 class MailChimpConnectorSpec extends Specification with ConnectorTestUtil {
 
-  // TOOD: test other events
+  // TODO: test other events
   // TODO: test different optional fields
 
   "MailChimpConnector" should {
@@ -87,7 +87,7 @@
         "data[merges][EMAIL]" -> "api+unsub@mailchimp.com",
         "data[merges][FNAME]" -> "MailChimp",
         "data[merges][LNAME]" -> "API",
-        "data[merges][INTERESTS]" -> "Group1,Group2", //optional 
+        "data[merges][INTERESTS]" -> "Group1,Group2", //optional
         "data[ip_opt]" -> "10.20.10.30",
         "data[campaign_id]" -> "cb398d21d2"
       )
@@ -120,7 +120,7 @@
       check(MailChimpConnector, unsubscribe, expected)
     }
 
-    //check profile update to event Json 
+    //check profile update to event Json
     "convert profile update to event JSON" in {
 
       val profileUpdate = Map(
@@ -162,7 +162,7 @@
       check(MailChimpConnector, profileUpdate, expected)
     }
 
-    //check email update to event Json 
+    //check email update to event Json
     "convert email update to event JSON" in {
 
       val emailUpdate = Map(
@@ -192,7 +192,7 @@
       check(MailChimpConnector, emailUpdate, expected)
     }
 
-    //check cleaned email to event Json 
+    //check cleaned email to event Json
     "convert cleaned email to event JSON" in {
 
       val cleanedEmail = Map(
@@ -221,7 +221,7 @@
       check(MailChimpConnector, cleanedEmail, expected)
     }
 
-    //check campaign sending status to event Json 
+    //check campaign sending status to event Json
     "convert campaign sending status to event JSON" in {
 
       val campaign = Map(
diff --git a/data/test.sh b/data/test.sh
index be0eb07..8481f47 100755
--- a/data/test.sh
+++ b/data/test.sh
@@ -481,7 +481,7 @@
 checkPOST "/batch/events.json?accessKey=$accessKey" "$testdata" 200
 
 # request with a malformed event (2nd event)
-# the response code is succesful but the error for individual event is reflected in the response's body.
+# the response code is successful but the error for individual event is reflected in the response's body.
 testdata='[{
   "event" : "my_event_1",
   "entityType" : "user",
diff --git a/docs/manual/source/install/index.html.md.erb b/docs/manual/source/install/index.html.md.erb
index 167d3bf..3b34871 100644
--- a/docs/manual/source/install/index.html.md.erb
+++ b/docs/manual/source/install/index.html.md.erb
@@ -41,6 +41,9 @@
 * Apache HBase 0.98.5
 * Elasticsearch 1.7.6
 
+WARNING: **Note that support for Scala 2.10 and Spark 1.6.x are deprecated as of PredictionIO 0.13.0,
+and may be removed in a future release.**
+
 If you are running on a single machine, we recommend a minimum of 2GB memory.
 
 INFO: If you are using Linux, Apache Spark local mode, which is the default
diff --git a/docs/manual/source/install/install-sourcecode.html.md.erb b/docs/manual/source/install/install-sourcecode.html.md.erb
index a16acde..8abc448 100644
--- a/docs/manual/source/install/install-sourcecode.html.md.erb
+++ b/docs/manual/source/install/install-sourcecode.html.md.erb
@@ -108,9 +108,9 @@
 versions of dependencies. As of writing, one could build PredictionIO against
 these different dependencies:
 
-* Scala 2.10.x, 2.11.x
-* Spark 1.6.x, 2.0.x, 2.1.x
-* Hadoop 2.4.x to 2.7.x
+* Scala 2.10.x(deprecated), 2.11.x
+* Spark 1.6.x(deprecated), 2.0.x, 2.1.x
+* Hadoop 2.6.x, 2.7.x
 * Elasticsearch 1.7.x, 5.x
 
 As an example, if you want to build PredictionIO to support Scala 2.11.8,
diff --git a/docs/manual/source/system/index.html.md b/docs/manual/source/system/index.html.md
index 1030fec..dcff342 100644
--- a/docs/manual/source/system/index.html.md
+++ b/docs/manual/source/system/index.html.md
@@ -19,13 +19,6 @@
 limitations under the License.
 -->
 
-During the [installation](/install), you have installed the latest stable versions of the following software:
-
-* Apache Hadoop up to 2.7.2 (required only if YARN and HDFS are needed)
-* Apache HBase up to 1.2.4
-* Apache Spark up to 1.6.3 for Hadoop 2.6 (not Spark 2.x version)
-* Elasticsearch up to 1.7.5 (not the Elasticsearch 2.x version)
-
 This section explains general rules-of-thumb for how they are used in PredictionIO. The actual implementation of the Template will define how much of this applies. PredictionIO is flexible about much of this configuration but its Templates generally fit the Lambda model for integrating real-time serving with background periodic model updates.
 
 ![PredictionIO Systems](/images/pio-architecture.svg)
diff --git a/docs/scaladoc/README.md b/docs/scaladoc/README.md
index 2f64966..2faa7e3 100644
--- a/docs/scaladoc/README.md
+++ b/docs/scaladoc/README.md
@@ -23,4 +23,4 @@
     $ sbt/sbt pioUnidoc
     ```
 
-2.  Point your web browser at `target/scala-2.10/unidoc/index.html`.
+2.  Point your web browser at `target/scala-2.11/unidoc/index.html`.
diff --git a/storage/hbase/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala b/storage/hbase/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala
index 64487fb..4b0ad9a 100644
--- a/storage/hbase/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala
+++ b/storage/hbase/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala
@@ -148,7 +148,7 @@
     val rowKey = event.eventId.map { id =>
       RowKey(id) // create rowKey from eventId
     }.getOrElse {
-      // TOOD: use real UUID. not pseudo random
+      // TODO: use real UUID. not pseudo random
       val uuidLow: Long = UUID.randomUUID().getLeastSignificantBits
       RowKey(
         entityType = event.entityType,
diff --git a/tests/pio_tests/README.md b/tests/pio_tests/README.md
index 070ac8b..cb426da 100644
--- a/tests/pio_tests/README.md
+++ b/tests/pio_tests/README.md
@@ -33,14 +33,14 @@
 You can pass it arguments to:
 * suppress the output of executed shell commands within the tests
 * enable logging
-* specify which tests should be exectued (by names)
+* specify which tests should be executed (by names)
 
 For more information run:
 ```shell
 python3 tests.py -h
 ```
 
-As soon as the tests are finishied an XML file with JUnit-like test reports 
+As soon as the tests are finished an XML file with JUnit-like test reports
 is created in the directory of execution.
 
 ### Adding new tests
diff --git a/tests/pio_tests/utils.py b/tests/pio_tests/utils.py
index 4659e54..6eecf89 100644
--- a/tests/pio_tests/utils.py
+++ b/tests/pio_tests/utils.py
@@ -55,7 +55,7 @@
 
 def obtain_template(engine_dir, template):
   """Given a directory with engines and a template downloads an engine
-  if neccessary
+  if necessary
   Args:
     engine_dir (str): directory where engines are stored
     template (str): either the name of an engine from the engines directory
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/commands/App.scala b/tools/src/main/scala/org/apache/predictionio/tools/commands/App.scala
index 5884ebd..06c1641 100644
--- a/tools/src/main/scala/org/apache/predictionio/tools/commands/App.scala
+++ b/tools/src/main/scala/org/apache/predictionio/tools/commands/App.scala
@@ -298,7 +298,7 @@
               Right(channel.copy(id = chanId))
             } else {
               errStr = s"""Unable to create new channel.
-                          |Failed to initalize Event Store.""".stripMargin
+                          |Failed to initialize Event Store.""".stripMargin
               error(errStr)
               // reverted back the meta data
               try {
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/commands/Management.scala b/tools/src/main/scala/org/apache/predictionio/tools/commands/Management.scala
index ee8cd50..cd71fdd 100644
--- a/tools/src/main/scala/org/apache/predictionio/tools/commands/Management.scala
+++ b/tools/src/main/scala/org/apache/predictionio/tools/commands/Management.scala
@@ -109,7 +109,7 @@
     val sparkHomePath = Common.getSparkHome(sparkHome)
     if (new File(s"$sparkHomePath/bin/spark-submit").exists) {
       info(s"Apache Spark is installed at $sparkHomePath")
-      val sparkMinVersion = "1.3.0"
+      val sparkMinVersion = "1.6.3"
       pioStatus = pioStatus.copy(
         sparkHome = sparkHomePath,
         sparkMinVersion = sparkMinVersion)