Additional site documentation and sample cleanup.
diff --git a/build.xml b/build.xml
index 24b9027..7c414a7 100644
--- a/build.xml
+++ b/build.xml
@@ -94,7 +94,7 @@
         </exec>
         <exec executable="${svn.cmd}" dir="gateway-site/target/site">
             <arg value="add"/>
-            <arg value="-q"/>
+            <arg value="--force"/>
             <arg value="."/>
         </exec>
         <exec executable="${svn.cmd}" dir="gateway-site/target/site">
diff --git a/gateway-release/ext/README b/gateway-release/ext/README
new file mode 100644
index 0000000..9eb0ca5
--- /dev/null
+++ b/gateway-release/ext/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE JARS AND CLASSES CONTAINING CUSTOM EXTENSIONS CAN BE PLACED
\ No newline at end of file
diff --git a/gateway-release/lib/README b/gateway-release/lib/README
new file mode 100644
index 0000000..39cee63
--- /dev/null
+++ b/gateway-release/lib/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS RESERVED FOR USE BY FUTURE SYSTEM JARS AND CLASSES
\ No newline at end of file
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 1967ca4..0d9cddb 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -51,7 +51,7 @@
                                 <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
                                 <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
                                     <manifestEntries>
-                                        <Main-Class>org.apache.hadoop.gateway.GatewayServer</Main-Class>
+                                        <Main-Class>org.apache.hadoop.gateway.launcher.Launcher</Main-Class>
                                     </manifestEntries>
                                 </transformer>
                             </transformers>
diff --git a/gateway-release/samples/Example.groovy b/gateway-release/samples/ExamplePutFile.groovy
similarity index 100%
rename from gateway-release/samples/Example.groovy
rename to gateway-release/samples/ExamplePutFile.groovy
diff --git a/gateway-release/samples/SmokeTestJob.groovy b/gateway-release/samples/ExampleSubmitJob.groovy
similarity index 96%
rename from gateway-release/samples/SmokeTestJob.groovy
rename to gateway-release/samples/ExampleSubmitJob.groovy
index 871a274..d2522ea 100644
--- a/gateway-release/samples/SmokeTestJob.groovy
+++ b/gateway-release/samples/ExampleSubmitJob.groovy
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 import com.jayway.jsonpath.JsonPath
 import org.apache.hadoop.gateway.shell.Hadoop
 import org.apache.hadoop.gateway.shell.hdfs.Hdfs
@@ -46,8 +45,9 @@
   .input( "/tmp/test/input" ) \
   .output( "/tmp/test/output" ) \
   .now().jobId
-println "Submit job " + jobId
+println "Submitted job " + jobId
 
+println "Polling for completion..."
 done = false
 count = 0
 while( !done && count++ < 60 ) {
diff --git a/gateway-release/samples/SmokeTestWorkflow.groovy b/gateway-release/samples/ExampleSubmitWorkflow.groovy
similarity index 97%
rename from gateway-release/samples/SmokeTestWorkflow.groovy
rename to gateway-release/samples/ExampleSubmitWorkflow.groovy
index c515797..4dfc367 100644
--- a/gateway-release/samples/SmokeTestWorkflow.groovy
+++ b/gateway-release/samples/ExampleSubmitWorkflow.groovy
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 import com.jayway.jsonpath.JsonPath
 import org.apache.hadoop.gateway.shell.Hadoop
 import org.apache.hadoop.gateway.shell.hdfs.Hdfs
@@ -78,8 +77,9 @@
 hadoop.waitFor( putWorkflow, putData, putJar )
 
 jobId = Workflow.submit(hadoop).text( configuration ).now().jobId
-println "Submit job " + jobId
+println "Submitted job " + jobId
 
+println "Polling for completion..."
 status = "UNKNOWN";
 count = 0;
 while( status != "SUCCEEDED" && count++ < 60 ) {
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 0748a2b..59d7016 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -73,6 +73,10 @@
         </dependency>
         <dependency>
             <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-util-launcher</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
             <artifactId>gateway-util-urltemplate</artifactId>
         </dependency>
         <dependency>
diff --git a/gateway-server/src/main/resources/META-INF/launcher.cfg b/gateway-server/src/main/resources/META-INF/launcher.cfg
new file mode 100644
index 0000000..419051c
--- /dev/null
+++ b/gateway-server/src/main/resources/META-INF/launcher.cfg
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+main.class = org.apache.hadoop.gateway.GatewayServer
+class.path = ../lib; ../lib/*.jar; ../ext; ../ext/*.jar
diff --git a/gateway-site/src/site/markdown/client.md.vm b/gateway-site/src/site/markdown/client.md.vm
index fc325d1..87f5260 100644
--- a/gateway-site/src/site/markdown/client.md.vm
+++ b/gateway-site/src/site/markdown/client.md.vm
@@ -70,7 +70,7 @@
 
 The shell can also be used to execute a script by passing a single filename argument.
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar sample/SmokeTestJob.groovy
+    java -jar bin/shell-${gateway-version}.jar sample/SmokeTestJob.groovy
 
 When running interactively it may be helpful to reduce some of the output generated by the shell console.
 Use the following command in the interactive shell to reduce that output.
@@ -88,7 +88,7 @@
 Once the shell can be launched the DSL can be used to interact with the gateway and Hadoop.
 Below is a very simple example of an interactive shell session to upload a file to HDFS.
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar
+    java -jar bin/shell-${gateway-version}.jar
     knox:000> hadoop = Hadoop.login( "https://localhost:8443/gateway/sample", "hdfs", "hdfs-password" )
     knox:000> Hdfs.put( hadoop ).file( "README" ).to( "/tmp/example/README" ).now()
 
@@ -417,8 +417,8 @@
 Fortunately there is a very simple way to add classes and JARs to the shell classpath.
 The first time the shell is executed it will create a configuration file in the same directory as the JAR with the same base name and a `.cfg` extension.
 
-    bin/shell-0.2.0-SNAPSHOT.jar
-    bin/shell-0.2.0-SNAPSHOT.cfg
+    bin/shell-${gateway-version}.jar
+    bin/shell-${gateway-version}.cfg
 
 That file contains both the main class for the shell as well as a definition of the classpath.
 Currently that file will by default contain the following.
@@ -434,7 +434,7 @@
 The easiest way to add these to the shell is to compile them directory into the `ext` directory.
 *Note: This command depends upon having the Groovy compiler installed and available on the execution path.*
 
-    groovyc -d ext -cp bin/shell-0.2.0-SNAPSHOT.jar samples/SampleService.groovy samples/SampleSimpleCommand.groovy samples/SampleComplexCommand.groovy
+    groovyc -d ext -cp bin/shell-${gateway-version}.jar samples/SampleService.groovy samples/SampleSimpleCommand.groovy samples/SampleComplexCommand.groovy
 
 These source files are available in the samples directory of the distribution but these are included here for convenience.
 
@@ -551,17 +551,17 @@
 The shell included in the distribution is basically an unmodified packaging of the Groovy shell.
 Therefore these command are functionally equivalent if you have Groovy [installed][15].
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar sample/SmokeTestJob.groovy
-    groovy -cp bin/shell-0.2.0-SNAPSHOT.jar sample/SmokeTestJob.groovy
+    java -jar bin/shell-${gateway-version}.jar sample/SmokeTestJob.groovy
+    groovy -cp bin/shell-${gateway-version}.jar sample/SmokeTestJob.groovy
 
 The interactive shell isn't exactly equivalent.
-However the only difference is that the shell-0.2.0-SNAPSHOT.jar automatically executes some additional imports that are useful for the KnoxShell DSL.
+However the only difference is that the shell-${gateway-version}.jar automatically executes some additional imports that are useful for the KnoxShell DSL.
 So these two sets of commands should be functionality equivalent.
 ***However there is currently a class loading issue that prevents the groovysh command from working propertly.***
 
-    java -jar bin/shell-0.2.0-SNAPSHOT.jar
+    java -jar bin/shell-${gateway-version}.jar
 
-    groovysh -cp bin/shell-0.2.0-SNAPSHOT.jar # BROKEN, CLASS LOADING ISSUE
+    groovysh -cp bin/shell-${gateway-version}.jar
     import org.apache.hadoop.gateway.shell.Hadoop
     import org.apache.hadoop.gateway.shell.hdfs.Hdfs
     import org.apache.hadoop.gateway.shell.job.Job
@@ -570,7 +570,7 @@
 
 Alternatively, you can use the Groovy Console which does not appear to have the same class loading issue.
 
-    groovyConsole -cp bin/shell-0.2.0-SNAPSHOT.jar
+    groovyConsole -cp bin/shell-${gateway-version}.jar
 
     import org.apache.hadoop.gateway.shell.Hadoop
     import org.apache.hadoop.gateway.shell.hdfs.Hdfs
diff --git a/gateway-site/src/site/markdown/examples.md.vm b/gateway-site/src/site/markdown/examples.md.vm
index 3541734..672e538 100644
--- a/gateway-site/src/site/markdown/examples.md.vm
+++ b/gateway-site/src/site/markdown/examples.md.vm
@@ -16,7 +16,228 @@
 -->
 
 ------------------------------------------------------------------------------
-Example #3: WebHDFS & Templeton/WebHCat
+Apache Knox Gateway - Usage Examples
+------------------------------------------------------------------------------
+This guide provides detailed examples for how to do some basic interactions
+with Hadoop via the Apache Knox Gateway.
+
+The first two examples submit a Java MapReduce job and workflow using the
+KnoxShell DSL
+
+* Example #1: WebHDFS & Templeton/WebHCat via KnoxShell DSL
+* Example #2: WebHDFS & Oozie via KnoxShell DSL
+
+The second two examples submit the same job and workflow but do so using only
+the [cURL](http://curl.haxx.se/) command line HTTP client.
+
+* Example #1: WebHDFS & Templeton/WebHCat via cURL
+* Example #2: WebHDFS & Oozie via KnoxShell cURL
+
+------------------------------------------------------------------------------
+Assumptions
+------------------------------------------------------------------------------
+This document assumes a few things about your environment in order to
+simplify the examples.
+
+1. The JVM is executable as simply java.
+2. The Apache Knox Gateway is installed and functional.
+3. The example commands are executed within the context of the GATEWAY_HOME
+   current directory. The GATEWAY_HOME directory is the directory within the
+   Apache Knox Gateway installation that contains the README file and the bin,
+   conf and deployments directories.
+4. A few examples optionally require the use of commands from a standard
+   Groovy installation.  These examples are optional but to try them you will
+   need Groovy [installed][gii].
+
+[gii]: http://groovy.codehaus.org/Installing+Groovy
+
+------------------------------------------------------------------------------
+Customization
+------------------------------------------------------------------------------
+These examples may need to be tailored to the execution environment.  In
+particular hostnames and ports may need to be changes to match your
+environment.  In particular there are two example files in the distribution
+that may need to be customized.  Take a moment to review these files.
+All of the values that may need to be customized can be found together at the
+top of each file.
+
+* samples/ExampleSubmitJob.groovy
+* samples/ExampleSubmitWorkflow.groovy
+
+If you are using the Sandbox VM for your Hadoop cluster you may want to
+review [these configuration tips][sb].
+
+[sb]: sandbox.html
+
+------------------------------------------------------------------------------
+Example #1: WebHDFS & Templeton/WebHCat via KnoxShell DSL
+------------------------------------------------------------------------------
+This example will submit the familiar WordCount Java MapReduce job to the
+Hadoop cluster via the gateway using the KnoxShell DSL.  There are several
+ways to do this depending upon your preference.
+
+You can use the "embedded" Groovy interpreter provided with the distribution.
+
+    java -jar bin/shell-${gateway-version}.jar samples/ExampleSubmitJob.groovy
+
+You can load the KnoxShell DSL script into the standard Groovy Console.
+
+    groovyConsole -cp bin/shell-${gateway-version}.jar samples/ExampleSubmitJob.groovy
+
+You can manually type in the KnoxShell DSL script into the "embedded" Groovy
+interpreter provided with the distribution.
+
+    java -jar bin/shell-${gateway-version}.jar
+
+Each line from the file below will need to be typed or copied into the
+interactive shell.
+
+***samples/ExampleSubmitJob***
+
+    import com.jayway.jsonpath.JsonPath
+    import org.apache.hadoop.gateway.shell.Hadoop
+    import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+    import org.apache.hadoop.gateway.shell.job.Job
+
+    import static java.util.concurrent.TimeUnit.SECONDS
+
+    gateway = "https://localhost:8443/gateway/sample"
+    username = "mapred"
+    password = "mapred-password"
+    dataFile = "LICENSE"
+    jarFile = "samples/hadoop-examples.jar"
+
+    hadoop = Hadoop.login( gateway, username, password )
+
+    println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
+    println "Create /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
+
+    putData = Hdfs.put(hadoop).file( dataFile ).to( "/tmp/test/input/FILE" ).later() {
+      println "Put /tmp/test/input/FILE " + it.statusCode }
+    putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/hadoop-examples.jar" ).later() {
+      println "Put /tmp/test/hadoop-examples.jar " + it.statusCode }
+    hadoop.waitFor( putData, putJar )
+
+    jobId = Job.submitJava(hadoop) \
+      .jar( "/tmp/test/hadoop-examples.jar" ) \
+      .app( "wordcount" ) \
+      .input( "/tmp/test/input" ) \
+      .output( "/tmp/test/output" ) \
+      .now().jobId
+    println "Submitted job " + jobId
+
+    done = false
+    count = 0
+    while( !done && count++ < 60 ) {
+      sleep( 1000 )
+      json = Job.queryStatus(hadoop).jobId(jobId).now().string
+      done = JsonPath.read( json, "\$.status.jobComplete" )
+    }
+    println "Done " + done
+
+    println "Shutdown " + hadoop.shutdown( 10, SECONDS )
+
+------------------------------------------------------------------------------
+Example #2: WebHDFS & Oozie via KnoxShell DSL
+------------------------------------------------------------------------------
+This example will also submit the familiar WordCount Java MapReduce job to the
+Hadoop cluster via the gateway using the KnoxShell DSL.  However in this case
+the job will be submitted via a Oozie workflow.  There are several ways to do
+this depending upon your preference.
+
+You can use the "embedded" Groovy interpreter provided with the distribution.
+    java -jar bin/shell-${gateway-version}.jar samples/ExampleSubmitWorkflow.groovy
+
+You can load the KnoxShell DSL script into the standard Groovy Console.
+    groovyConsole -cp bin/shell-${gateway-version}.jar samples/ExampleSubmitWorkflow.groovy
+
+You can manually type in the KnoxShell DSL script into the "embedded" Groovy
+interpreter provided with the distribution.
+
+    java -jar bin/shell-${gateway-version}.jar
+
+Each line from the file below will need to be typed or copied into the
+interactive shell.
+
+***samples/ExampleSubmitWorkflow.groovy***
+
+    import com.jayway.jsonpath.JsonPath
+    import org.apache.hadoop.gateway.shell.Hadoop
+    import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+    import org.apache.hadoop.gateway.shell.workflow.Workflow
+
+    import static java.util.concurrent.TimeUnit.SECONDS
+
+    gateway = "https://localhost:8443/gateway/sample"
+    jobTracker = "sandbox:50300";
+    nameNode = "sandbox:8020";
+    username = "mapred"
+    password = "mapred-password"
+    inputFile = "LICENSE"
+    jarFile = "samples/hadoop-examples.jar"
+
+    definition = """\
+    <workflow-app xmlns="uri:oozie:workflow:0.2" name="wordcount-workflow">
+        <start to="root-node"/>
+        <action name="root-node">
+            <java>
+                <job-tracker>$jobTracker</job-tracker>
+                <name-node>hdfs://$nameNode</name-node>
+                <main-class>org.apache.hadoop.examples.WordCount</main-class>
+                <arg>/tmp/test/input</arg>
+                <arg>/tmp/test/output</arg>
+            </java>
+            <ok to="end"/>
+            <error to="fail"/>
+        </action>
+        <kill name="fail">
+            <message>Java failed</message>
+        </kill>
+        <end name="end"/>
+    </workflow-app>
+    """
+
+    configuration = """\
+    <configuration>
+        <property>
+            <name>user.name</name>
+            <value>$username</value>
+        </property>
+        <property>
+            <name>oozie.wf.application.path</name>
+            <value>hdfs://$nameNode/tmp/test</value>
+        </property>
+    </configuration>
+    """
+
+    hadoop = Hadoop.login( gateway, username, password )
+
+    println "Delete /tmp/test " + Hdfs.rm(hadoop).file( "/tmp/test" ).recursive().now().statusCode
+    println "Mkdir /tmp/test " + Hdfs.mkdir(hadoop).dir( "/tmp/test").now().statusCode
+    putWorkflow = Hdfs.put(hadoop).text( definition ).to( "/tmp/test/workflow.xml" ).later() {
+      println "Put /tmp/test/workflow.xml " + it.statusCode }
+    putData = Hdfs.put(hadoop).file( inputFile ).to( "/tmp/test/input/FILE" ).later() {
+      println "Put /tmp/test/input/FILE " + it.statusCode }
+    putJar = Hdfs.put(hadoop).file( jarFile ).to( "/tmp/test/lib/hadoop-examples.jar" ).later() {
+      println "Put /tmp/test/lib/hadoop-examples.jar " + it.statusCode }
+    hadoop.waitFor( putWorkflow, putData, putJar )
+
+    jobId = Workflow.submit(hadoop).text( configuration ).now().jobId
+    println "Submitted job " + jobId
+
+    status = "UNKNOWN";
+    count = 0;
+    while( status != "SUCCEEDED" && count++ < 60 ) {
+      sleep( 1000 )
+      json = Workflow.status(hadoop).jobId( jobId ).now().string
+      status = JsonPath.read( json, "\$.status" )
+    }
+    println "Job status " + status;
+
+    println "Shutdown " + hadoop.shutdown( 10, SECONDS )
+
+------------------------------------------------------------------------------
+Example #3: WebHDFS & Templeton/WebHCat via cURL
 ------------------------------------------------------------------------------
 The example below illustrates the sequence of curl commands that could be used
 to run a "word count" map reduce job.  It utilizes the hadoop-examples.jar
@@ -75,7 +296,7 @@
       'https://localhost:8443/gateway/sample/namenode/api/v1/tmp/test?op=DELETE&recursive=true'
 
 ------------------------------------------------------------------------------
-Example #4: WebHDFS & Oozie
+Example #4: WebHDFS & Oozie via cURL
 ------------------------------------------------------------------------------
 The example below illustrates the sequence of curl commands that could be used
 to run a "word count" map reduce job via an Oozie workflow.  It utilizes the
@@ -139,7 +360,6 @@
     curl -i -k -u mapred:mapred-password -X DELETE \
       'https://localhost:8443/gateway/sample/namenode/api/v1/tmp/test?op=DELETE&recursive=true'
 
-
 ------------------------------------------------------------------------------
 Disclaimer
 ------------------------------------------------------------------------------
diff --git a/gateway-site/src/site/markdown/getting-started.md.vm b/gateway-site/src/site/markdown/getting-started.md.vm
index 05df9c0..411005a 100644
--- a/gateway-site/src/site/markdown/getting-started.md.vm
+++ b/gateway-site/src/site/markdown/getting-started.md.vm
@@ -16,7 +16,7 @@
 -->
 
 ------------------------------------------------------------------------------
-Getting Started
+Apache Knox Gateway - Getting Started
 ------------------------------------------------------------------------------
 This guide describes the steps required to install, deploy and validate the
 Apache Knox Gateway.
@@ -46,6 +46,12 @@
 The Hadoop cluster should be ensured to have WebHDFS, WebHCat
 (i.e. Templeton) and Oozie configured, deployed and running.
 
+This release of the Apache Knox Gateway has been tested against the
+[Hortonworks Sandbox 1.2][hsb] with [these changes][sb].
+
+[hsb]: http://hortonworks.com/products/hortonworks-sandbox/
+[sb]: sandbox.html
+
 ------------------------------------------------------------------------------
 Installation
 ------------------------------------------------------------------------------
diff --git a/gateway-site/src/site/markdown/sandbox.md b/gateway-site/src/site/markdown/sandbox.md
new file mode 100644
index 0000000..c1aac5d
--- /dev/null
+++ b/gateway-site/src/site/markdown/sandbox.md
@@ -0,0 +1,62 @@
+<!---
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+------------------------------------------------------------------------------
+Sandbox Configuration
+------------------------------------------------------------------------------
+This version of the Apache Knox Gateway is tested against
+[Hortonworks Sandbox 1.2][sb]
+
+In order to correct the issue with Sandbox you can use the commands below
+to login to the Sandbox VM and modify the configuration.  This assumes that
+the name sandbox is setup to resolve to the Sandbox VM.  It may be necessary
+to use the IP address of the Sandbox VM instead.  ***This is frequently but
+not always 192.168.56.101.***
+
+    ssh root@sandbox
+    cat /usr/lib/hadoop/conf/hdfs-site.xml | sed s/localhost/sandbox/ > /usr/lib/hadoop/conf/hdfs-site.xml
+    shutdown -r now
+
+In addition to make it very easy to follow along with the samples for the
+gateway you can configure your local system to resolve the address of the
+Sandbox by the names `vm` and `sandbox`.
+
+On Linux or Macintosh systems add a line like this to the end of the
+`/etc/hosts file on` your local machine, ***not the Sandbox VM***.
+*Note: That is a _tab_ character between the 192.168.56.101 and the vm.*
+
+    192.168.56.101	vm sandbox
+
+On Windows systems a similar but different mechanism can be used.  On recent
+versions of windows the file that should be modified is
+`%systemroot%\system32\drivers\etc\hosts`
+
+[sb]: http://hortonworks.com/products/hortonworks-sandbox/
+
+------------------------------------------------------------------------------
+Disclaimer
+------------------------------------------------------------------------------
+The Apache Knox Gateway is an effort undergoing incubation at the
+Apache Software Foundation (ASF), sponsored by the Apache Incubator PMC.
+
+Incubation is required of all newly accepted projects until a further review
+indicates that the infrastructure, communications, and decision making process
+have stabilized in a manner consistent with other successful ASF projects.
+
+While incubation status is not necessarily a reflection of the completeness
+or stability of the code, it does indicate that the project has yet to be
+fully endorsed by the ASF.
\ No newline at end of file
diff --git a/gateway-site/src/site/site.xml b/gateway-site/src/site/site.xml
index eda5898..5d23bd6 100644
--- a/gateway-site/src/site/site.xml
+++ b/gateway-site/src/site/site.xml
@@ -99,6 +99,7 @@
             <item name="Getting Started" href="getting-started.html"/>
             <item name="Usage Examples" href="examples.html"/>
             <item name="Client (KnoxShell DSL)" href="client.html"/>
+            <item name="Sandbox Configuration" href="sandbox.html"/>
             <item name="Wiki" href="https://cwiki.apache.org/confluence/display/KNOX/Knox"/>
         </menu>