[TOREE-405] Update documentation as in current live site
diff --git a/site/_data/documentation.yml b/site/_data/documentation.yml
new file mode 100644
index 0000000..fc17a44
--- /dev/null
+++ b/site/_data/documentation.yml
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to you under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+- section_name: "User"
+  section_id: "user"
+  section_url: "/documentation/user/quick-start"
+
+- section_name: "Developer"
+  section_id: "developer"
+  section_url: "/documentation/developer/contributing-to-the-project"
+
+- section_name: "References"
+  section_id: "references"
+  section_url: "/documentation/references/scaladocs"
+
diff --git a/site/_data/navigation.yml b/site/_data/navigation.yml
index dbb9a41..80e25f0 100644
--- a/site/_data/navigation.yml
+++ b/site/_data/navigation.yml
@@ -15,6 +15,15 @@
 
 topnav:
 
+- title: Documentation
+  subcategories:
+  - title: User
+    url: /docs/current/user/quick-start
+  - title: Deveveloper
+    url: /docs/current/developer/contributing-to-the-project
+  - title: References
+    url: /docs/current/references/scaladocs
+
 - title: Community
   subcategories:
   - title: Get Involved
diff --git a/site/_includes/themes/apache/_navigation_doc.html b/site/_includes/themes/apache/_navigation_doc.html
new file mode 100644
index 0000000..b9e9bff
--- /dev/null
+++ b/site/_includes/themes/apache/_navigation_doc.html
@@ -0,0 +1,57 @@
+<nav id="navbar" class="doc-nav" data-spy="affix" data-offset-top="60" data-offset-bottom="200">
+    {% assign DOC_PAGES = .site.pages | where: "type", "doc" %}
+    {% assign DOC_GROUPS = DOC_PAGES | group_by: "section" %}
+    {% assign ACTIVE_PAGE_ID = .page.path %}
+
+    <style>
+        .doc-nav h3 {
+            text-transform: uppercase;
+            font-size: 2rem;
+            color: black;
+        }
+
+        .doc-nav a {
+            color: #666;
+            display: block;
+            margin-left: 0px;
+            font-weight: 400;
+            text-decoration: none;
+            padding-left: 0px !important;
+        }
+
+        .doc-nav a.active {
+            color: #C15757;
+        }
+
+        .doc-nav li .doc-section {
+            border-bottom: 1px solid #ccc;
+        }
+        .doc-nav li:last-child .doc-section {
+            border-bottom: 0px;
+        }
+    </style>
+    <ul class="nav">
+    {% for s in site.data.documentation %}
+        {% assign g = DOC_GROUPS | where: "name", s.section_id | first %}
+        {% assign pages = g["items"] | default: [] %}
+        {% if pages | size > 0 %}
+            {% assign pages = pages | sort: "weight" %}
+        {% endif %}
+
+        <li>
+            <div class="doc-section">
+                <h3>{{ s.section_name }}</h3>
+                <ul class="nav">
+                    {% for p in pages %}
+                        <li>
+                            <a class="{% if p.path == ACTIVE_PAGE_ID %}active{% endif %}"
+                                href="{{ p.url }}">{{ p.title }}</a>
+                        </li>
+                    {% endfor %}
+                </ul>
+            </div>
+        </li>
+    {% endfor %}
+    </ul>
+</nav>
+
diff --git a/site/_includes/themes/apache/doc.html b/site/_includes/themes/apache/doc.html
new file mode 100644
index 0000000..eb3883d
--- /dev/null
+++ b/site/_includes/themes/apache/doc.html
@@ -0,0 +1,13 @@
+<!--<div class="hero-unit {{ page.title | lowercase }}">
+  <h1>{% if page.tagline %} <small>{{ page.tagline }}</small>{% endif %}</h1>
+</div>
+-->
+
+<div class="row">
+    <div class="col-md-3" role="complementary">
+        {% include themes/apache/_navigation_doc.html %}
+    </div>
+    <div class="col-md-9" role="main">
+        {{ content }}
+    </div>
+</div>
diff --git a/site/_layouts/doc.html b/site/_layouts/doc.html
new file mode 100644
index 0000000..5dfc257
--- /dev/null
+++ b/site/_layouts/doc.html
@@ -0,0 +1,6 @@
+---
+layout: default
+---
+
+{% include JB/setup %}
+{% include themes/apache/doc.html %}
diff --git a/site/assets/images/batch_mode.png b/site/assets/images/batch_mode.png
new file mode 100644
index 0000000..18082f3
--- /dev/null
+++ b/site/assets/images/batch_mode.png
Binary files differ
diff --git a/site/assets/images/interactive_mode.png b/site/assets/images/interactive_mode.png
new file mode 100644
index 0000000..55abbc2
--- /dev/null
+++ b/site/assets/images/interactive_mode.png
Binary files differ
diff --git a/site/assets/images/toree-quick-start-notebook.gif b/site/assets/images/toree-quick-start-notebook.gif
new file mode 100644
index 0000000..c842614
--- /dev/null
+++ b/site/assets/images/toree-quick-start-notebook.gif
Binary files differ
diff --git a/site/assets/images/toree-quick-start-spark.gif b/site/assets/images/toree-quick-start-spark.gif
new file mode 100644
index 0000000..ed44c40
--- /dev/null
+++ b/site/assets/images/toree-quick-start-spark.gif
Binary files differ
diff --git a/site/assets/images/toree_spark_gateway.png b/site/assets/images/toree_spark_gateway.png
new file mode 100644
index 0000000..a18daa0
--- /dev/null
+++ b/site/assets/images/toree_spark_gateway.png
Binary files differ
diff --git a/site/assets/images/toree_with_notebook.png b/site/assets/images/toree_with_notebook.png
new file mode 100644
index 0000000..873142c
--- /dev/null
+++ b/site/assets/images/toree_with_notebook.png
Binary files differ
diff --git a/site/docs/current/developer/contributing-to-the-project.md b/site/docs/current/developer/contributing-to-the-project.md
new file mode 100644
index 0000000..a047f61
--- /dev/null
+++ b/site/docs/current/developer/contributing-to-the-project.md
@@ -0,0 +1,17 @@
+---
+layout: doc
+title: Contributing to the Project
+type: doc
+section: developer
+weight: 0
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+- Requirements
+- Makefile
+    - Build
+    - Dev
+    - Test
+    - Publish
diff --git a/site/docs/current/developer/creating-extensions.md b/site/docs/current/developer/creating-extensions.md
new file mode 100644
index 0000000..48eedf2
--- /dev/null
+++ b/site/docs/current/developer/creating-extensions.md
@@ -0,0 +1,14 @@
+---
+layout: doc
+title: Creating Extensions
+type: doc
+section: developer
+weight: 10
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+- What is pluggable
+- Creating a magic
+
diff --git a/site/docs/current/references/scaladocs.md b/site/docs/current/references/scaladocs.md
new file mode 100644
index 0000000..3997cc7
--- /dev/null
+++ b/site/docs/current/references/scaladocs.md
@@ -0,0 +1,13 @@
+---
+layout: doc
+title: ScalaDocs
+type: doc
+section: references
+weight: 0
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+- Should redirect to ScalaDocs
+
diff --git a/site/docs/current/user/advanced-topics.md b/site/docs/current/user/advanced-topics.md
new file mode 100644
index 0000000..f8de178
--- /dev/null
+++ b/site/docs/current/user/advanced-topics.md
@@ -0,0 +1,13 @@
+---
+layout: doc
+title: Advanced Topics
+type: doc
+section: user
+weight: 60
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+- Comm API
+
diff --git a/site/docs/current/user/faq.md b/site/docs/current/user/faq.md
new file mode 100644
index 0000000..00a1bd5
--- /dev/null
+++ b/site/docs/current/user/faq.md
@@ -0,0 +1,48 @@
+---
+layout: doc
+title: FAQ
+type: doc
+section: user
+weight: 50
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+# FAQ
+
+## How do I access Apache Spark?
+
+You can access Spark through a `SparkContext` which is created by Apache Toree when the kernel starts. You can access
+the context through the `sc` variable.
+
+## How do I add a jar?
+Jars are added through the `AddJar` magic. You simply need to supply an URL for the jar to be added.
+
+```
+%AddJar http://myproject.com/myproject/my.jar
+```
+
+For more information about the `AddJar` magic see the [Magic Tutorial Notebook][1].
+
+## How do I add a library/dependency?
+
+Dependencies stored in repositories can be added through the `AddDeps` magic. An example usage would be:
+
+```
+%AddDeps my.company artifact-id version
+```
+
+If the dependency you are trying to add has transitive dependencies, you can add the `--transitive` flag to add those dependencies as well.
+For more information about the `AddDeps` magic see the [Magic Tutorial Notebook][1].
+
+## How do I visualize data?
+The most straightforward way to add data visualization with Apache Toree is through the [Jupyter Declarative Widgets][2] project.
+
+## How do I create dashboards with interactive widgets?
+Notebooks can be changed into dashboards through the [Jupyter Dashboards][3] project. This project allows you to use 
+[Jupyter Declarative Widgets][2] in your dashboards.
+
+[1]: https://github.com/apache/incubator-toree/blob/master/etc/examples/notebooks/magic-tutorial.ipynb
+[2]: https://github.com/jupyter-incubator/declarativewidgets
+[3]: https://github.com/jupyter-incubator/dashboards
\ No newline at end of file
diff --git a/site/docs/current/user/how-it-works.md b/site/docs/current/user/how-it-works.md
new file mode 100644
index 0000000..0540344
--- /dev/null
+++ b/site/docs/current/user/how-it-works.md
@@ -0,0 +1,69 @@
+---
+layout: doc
+title: How it works
+type: doc
+section: user
+weight: 20
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+# How it works
+
+Toree provides an interactive programming interface to a Spark Cluster. It's API takes in `code` in a variety of 
+languages and executes it. The `code` can perform Spark tasks using the provided Spark Context. 
+
+To further understand how Toree works, it is worth exploring the role that it plays in several usage scenarios. 
+
+### As a Kernel to Jupyter Notebooks
+
+Toree's primary role is as a [Jupyter](http://jupyter.org/) Kernel. It was originally created to add full Spark API 
+support to a Jupyter Notebook using the Scala language. It since has grown to also support Python an R. The diagram 
+below shows Toree in relation to a running Jupyter Notebook.
+
+![Toree with Jupyter Notebook](/assets/images/toree_with_notebook.png)
+
+When the user creates a new Notebook and selects Toree, the Notebook server launches a new Toree process that is
+configured to connect to a Spark cluster. Once in the Notebook, the user can interact with Spark by writing code that
+uses the managed Spark Context instance.
+
+The Notebook server and Toree communicate using the [Jupyter Kernel Protocol](https://ipython.org/ipython-doc/3/development/messaging.html). 
+This is a [0MQ](http://zeromq.org/) based protocol that is language agnostic and allows for bidirectional communication
+between the client and the kernel (i.e. Toree). This protocol is the __ONLY__ network interface for communicating with a 
+Toree process. 
+
+When using Toree within a Jupyter Notebook, these technical details can be ignored, but they are very relevant when 
+building custom clients. Several options are discussed in the next section.
+
+### As an Interactive Gateway to Spark
+
+One way of using Spark is what is commonly referred to as 'Batch' mode. Very similar to other Big Data systems, such as 
+Hadoop, this mode has the user create a program that is submitted to the cluster. This program runs tasks in the
+cluster and ultimately writes data to some persistent store (i.e. HDFS or No-SQL store). Spark provided `Batch` mode
+support through [Spark Submit](http://spark.apache.org/docs/latest/submitting-applications.html).
+
+![Toree Gateway to Spark](/assets/images/batch_mode.png)
+
+This mode of using Spark, although valid, suffers from lots of friction. For example, packaging and submitting of jobs, as
+well as the reading and writing from storage, tend to introduce unwanted latencies. Spark alleviates some of the 
+frictions by relying on memory to hold data along with the concept of a SparkContext as a way to tie jobs together. What
+is missing from Spark is a way for applications to interact with a long living SparkContext. 
+
+![Toree Gateway to Spark](/assets/images/interactive_mode.png)
+
+Toree provides this through a communication channel between an application and a SparkContext that allows access to the 
+entire Spark API. Through this channel, the application interacts with Spark by exchanging code and data.
+
+The Jupyter Notebook is a good example of an application that relies on the presence of these interactive channels and
+uses Toree to access Spark. Other Spark enabled applications can be built that directly connect to Toree through the 
+`0MQ` protocol, but there are also other ways.
+
+![Toree Gateway to Spark](/assets/images/toree_spark_gateway.png)
+
+As shown above, the [Jupyter Kernel Gateway](https://github.com/jupyter/kernel_gateway) can be used to expose a Web 
+Socket based protocol to Toree. This makes Toree easier to integrate. In combination with the
+[jupyter-js-services](https://github.com/jupyter/jupyter-js-services) library, other web applications can access Spark
+interactively. The [Jupyter Dashboard Server](https://github.com/jupyter-incubator/dashboards_server) is an example of
+a web application that uses Toree as the backend to dynamic dashboards.
+
diff --git a/site/docs/current/user/installation.md b/site/docs/current/user/installation.md
new file mode 100644
index 0000000..5e0f329
--- /dev/null
+++ b/site/docs/current/user/installation.md
@@ -0,0 +1,175 @@
+---
+layout: doc
+title: Installation
+type: doc
+section: user
+weight: 10
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+# Installation
+
+## Setup 
+
+An Apache Spark distribution is required to be installed before installing Apache Toree. You can download a copy of Apache Spark [here](http://spark.apache.org/downloads.html). Throughout the rest of this guide we will assume you have downloaded and extracted the Apache Spark distribution to `/usr/local/bin/apache-spark/`.
+
+## Installing Toree via Pip
+
+The quickest way to install Apache Toree is through the toree pip package. 
+
+```
+pip install toree
+```
+
+This will install a jupyter application called `toree`, which can be used to install and configure different Apache Toree kernels.
+
+```
+jupyter toree install --spark_home=/usr/local/bin/apache-spark/
+```
+
+You can confirm the installation by verifying the `apache_toree_scala` kernel is listed in the following command:
+
+```
+jupyter kernelspec list
+```
+
+## Options
+Arguments that take values are actually convenience aliases to full
+Configurables, whose aliases are listed on the help line. For more information
+on full configurables, see '--help-all'.
+
+```
+--user
+    Install to the per-user kernel registry
+--debug
+    set log level to logging.DEBUG (maximize logging output)
+--replace
+    Replace any existing kernel spec with this name.
+--sys-prefix
+    Install to Python's sys.prefix. Useful in conda/virtual environments.
+--interpreters=<Unicode> (ToreeInstall.interpreters)
+    Default: 'Scala'
+    A comma separated list of the interpreters to install. The names of the
+    interpreters are case sensitive.
+--toree_opts=<Unicode> (ToreeInstall.toree_opts)
+    Default: ''
+    Specify command line arguments for Apache Toree.
+--python_exec=<Unicode> (ToreeInstall.python_exec)
+    Default: 'python'
+    Specify the python executable. Defaults to "python"
+--kernel_name=<Unicode> (ToreeInstall.kernel_name)
+    Default: 'Apache Toree'
+    Install the kernel spec with this name. This is also used as the base of the
+    display name in jupyter.
+--log-level=<Enum> (Application.log_level)
+    Default: 30
+    Choices: (0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL')
+    Set the log level by value or name.
+--config=<Unicode> (JupyterApp.config_file)
+    Default: ''
+    Full path of a config file.
+--spark_home=<Unicode> (ToreeInstall.spark_home)
+    Default: '/usr/local/spark'
+    Specify where the spark files can be found.
+--spark_opts=<Unicode> (ToreeInstall.spark_opts)
+    Default: ''
+    Specify command line arguments to proxy for spark config.
+```
+
+# Configuring Spark
+
+Toree is started using the `spark-submit` script. All configuration options from Spark are consistent with configuring
+a [Spark Submit](http://spark.apache.org/docs/latest/submitting-applications.html) job. There are two ways of 
+setting configuration options for Spark. 
+
+The first is at install time with the `--spark_opts` command line option.
+
+```
+jupyter toree instal --spark_opts='--master=local[4]'
+```
+
+The second option is configured at run time through the `SPARK_OPTS` environment variable.
+
+```
+SPARK_OPTS='--master=local[4]' jupyter notebook 
+```
+
+__Note:__ There is an order of precedence to the configuration options. `SPARK_OPTS` will overwrite any values configured in `--spark_opts`.
+
+
+# Configuring Toree
+
+There are some configuration options that are specific to Toree. 
+
+```
+Option                               Description                          
+------                               -----------                                     
+--default-interpreter                default interpreter for the kernel   
+--default-repositories               comma seperated list of additional   
+                                       repositories to resolve            
+--default-repository-credentials     comma seperated list of credential   
+                                       files to use                       
+-h, --help                           display help information                      
+--interpreter-plugin                                                                                       ip used to bind sockets              
+--jar-dir                            directory where user added jars are  
+                                       stored (MUST EXIST)                
+--magic-url                          path to a magic jar                  
+--max-interpreter-threads <Integer>  total number of worker threads to use
+                                       to execute code                    
+--nosparkcontext                     kernel should not create a spark context                                         
+-v, --version                        display version information 
+```
+
+There are two way of setting these configuration options. 
+
+The first is at install time with the `--toree_opts` command line option.
+
+```
+jupyter toree instal --toree_opts='--nosparkcontext'
+```
+
+The second option is configured at run time through the `TOREE_OPTS` environment variable.
+
+```
+TOREE_OPTS='--nosparkcontext' jupyter notebook 
+```
+
+__Note:__ There is an order of precedence to the configuration options. `TOREE_OPTS` will overwrite any values configured in `--toree_opts`.
+
+
+## Installing Multiple Kernels
+
+Apache Toree provides support for multiple languages. To enable this you need to install the configurations for these
+interpreters as a comma seperated list to the `--interpreters` flag:
+
+```
+jupyter toree install --interpreters=Scala,PySpark,SparkR,SQL
+```
+
+The available interpreters and their supported languages are:
+
+| Language | Spark Implementation | Value to provide to Apache Toree |
+|----------|----------------------|----------------------------------|
+| Scala    | Scala with Spark     | Scala                            |
+| Python   | Python with PySpark  | PySpark                          |
+| R        | R with SparkR        | SparkR                           |
+| SQL      | Spark SQL            | SQL                              |
+
+### Interpreter Requirements
+* R version 3.2+ 
+* Make sure that the packages directory used by R when installing packages is writable, necessary to installed modified SparkR library. This is done automatically before any R code is run. 
+
+If the package directory is not writable by the Apache Toree, then you should see an error similar to the following:
+
+```
+Installing package into ‘/usr/local/lib/R/site-library’
+(as ‘lib’ is unspecified)
+Warning in install.packages("sparkr_bundle.tar.gz", repos = NULL, type = "source") :
+'lib = "/usr/local/lib/R/site-library"' is not writable
+Error in install.packages("sparkr_bundle.tar.gz", repos = NULL, type = "source") :
+unable to install packages
+Execution halted
+```
+
diff --git a/site/docs/current/user/quick-start.md b/site/docs/current/user/quick-start.md
new file mode 100644
index 0000000..ade3f03
--- /dev/null
+++ b/site/docs/current/user/quick-start.md
@@ -0,0 +1,61 @@
+---
+layout: doc
+title: Quick Start
+type: doc
+section: user
+weight: 0
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+# Quick Start
+
+## What is Apache Toree
+Apache Toree has one main goal: provide the foundation for interactive applications to connect and use [Apache Spark][1].
+
+The project intends to provide applications with the ability to send both packaged jars and code snippets. As it
+implements the latest Jupyter message protocol, Apache Toree can easily plug into the Jupyter ecosystem for quick, interactive data exploration.
+
+## Installing as kernel in Jupyter
+
+This requires you to have a distribution of [Apache Spark][1] downloaded to the system where Apache Toree will run. The
+following commands will install Apache Toree.
+
+```
+pip install toree
+jupyter toree install --spark_home=/usr/local/bin/apache-spark/
+```
+
+## Your Hello World example
+
+One of the most common ways to use Apache Toree is for interactive data exploration in a Jupyter Notebook. You will
+first need to install the notebook and get the notebook server running:
+
+```
+pip install notebook
+jupyter notebook
+```
+
+The following clip shows a simple notebook running Scala code to print `Hello, World!`. Each of the code cells can be
+run by pressing `Shift-Enter` on your keyboard.
+
+<img src="/assets/images/toree-quick-start-notebook.gif" alt="Drawing" style="width: 100%;"/>
+
+A key component to Apache Toree is that is will automatically create a `SparkContext` binding for you. This can be accessed
+through the variable `sc`. The following clip shows code accessing the `SparkContext` and returning a value.
+
+<img src="/assets/images/toree-quick-start-spark.gif" alt="Drawing" style="width: 100%;"/>
+
+
+## Where to try Apache Toree?
+* [![Binder](http://mybinder.org/badge.svg)][2]
+* [Try Jupyter][3] (_Spark With Scala Notbeook_)
+* [IBM Bluemix][4]
+
+
+
+[1]: https://spark.apache.org/
+[2]: http://mybinder.org/badge.svg)](http://mybinder.org/repo/apache/incubator-toree
+[3]: http://try.jupyter.org
+[4]: https://console.ng.bluemix.net/catalog/services/apache-spark
diff --git a/site/docs/current/user/using-standalone.md b/site/docs/current/user/using-standalone.md
new file mode 100644
index 0000000..2c318c5
--- /dev/null
+++ b/site/docs/current/user/using-standalone.md
@@ -0,0 +1,16 @@
+---
+layout: doc
+title: Using Standalone
+type: doc
+section: user
+weight: 40
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+- Connecting to Toree with custom clients
+    - Kernel gateway/Jupyter-js-service
+    - Python libraries to ZeroMQ
+- Restful access
+
diff --git a/site/docs/current/user/using-with-jupyter-notebooks.md b/site/docs/current/user/using-with-jupyter-notebooks.md
new file mode 100644
index 0000000..52e7e71
--- /dev/null
+++ b/site/docs/current/user/using-with-jupyter-notebooks.md
@@ -0,0 +1,16 @@
+---
+layout: doc
+title: Using with Jupyter Notebooks
+type: doc
+section: user
+weight: 30
+tagline: Apache Project !
+---
+
+{% include JB/setup %}
+
+- Create a notebook with Toree
+- Intro to magics
+- Intro to kernel API
+
+