KNOX-743 - Add submitSqoop via knox shell api
diff --git a/gateway-release/home/samples/ExampleWebHCatSqoop.groovy b/gateway-release/home/samples/ExampleWebHCatSqoop.groovy
new file mode 100644
index 0000000..a5629e3
--- /dev/null
+++ b/gateway-release/home/samples/ExampleWebHCatSqoop.groovy
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.job.Job
+
+import static java.util.concurrent.TimeUnit.SECONDS
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+jobDir = "/user/" + username + "/test"
+
+session = Hadoop.login( gateway, username, pass )
+
+println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
+println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
+
+// Define sqoop options value for sqoop command.
+// This use publicly available Genome mysql database.
+// If the database is unavailable, setup an alternate database and update the
+// db information below.
+db = [ driver:"com.mysql.jdbc.Driver", url:"jdbc:mysql://genome-mysql.cse.ucsc.edu/hg38", user:"genome", password:"", name:"hg38", table:"scBlastTab", split:"query" ]
+
+targetdir = jobDir + "/" + db.table
+
+sqoop_command = "import --driver ${db.driver} --connect ${db.url} --username ${db.user} --password ${db.password} --table ${db.table} --split-by ${db.split} --target-dir ${targetdir}"
+
+jobId = Job.submitSqoop(session) \
+            .command(sqoop_command) \
+            .statusDir("${jobDir}/output") \
+            .now().jobId
+
+println "Submitted job: " + jobId
+
+println "Polling up to 60s for job completion..."
+done = false
+count = 0
+while( !done && count++ < 180 ) {
+  sleep( 1000 )
+  json = Job.queryStatus(session).jobId(jobId).now().string
+  done = JsonPath.read( json, "\$.status.jobComplete" )
+  print "."; System.out.flush();
+}
+println ""
+println "Job status: " + done
+
+text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
+
+println "Content of stderr:"
+println Hdfs.get( session ).from( jobDir + "/output/stderr" ).now().string
+
+println "Session closed: " + session.shutdown( 10, SECONDS )
diff --git a/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Job.java b/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Job.java
index f374244..ec49fe1 100644
--- a/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Job.java
+++ b/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Job.java
@@ -27,6 +27,10 @@
     return new Java.Request( session );
   }
 
+  public static Sqoop.Request submitSqoop( Hadoop session ) {
+    return new Sqoop.Request( session );
+  }
+
   public static Pig.Request submitPig( Hadoop session ) {
     return new Pig.Request( session );
   }
diff --git a/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java b/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java
new file mode 100644
index 0000000..124a00f
--- /dev/null
+++ b/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.shell.job;
+
+import com.jayway.jsonpath.JsonPath;
+import org.apache.hadoop.gateway.shell.AbstractRequest;
+import org.apache.hadoop.gateway.shell.BasicResponse;
+import org.apache.hadoop.gateway.shell.Hadoop;
+import org.apache.http.HttpResponse;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.entity.UrlEncodedFormEntity;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.utils.URIBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+public class Sqoop {
+
+  static class Request extends AbstractRequest<Response> {
+
+    private String statusDir;
+    List<NameValuePair> params = new ArrayList<NameValuePair>();
+
+    public Request( Hadoop session ) {
+      super( session );
+    }
+
+    public Request command( String command ) {
+      addParam( params, "command", command );
+      return this;
+    }
+
+    public Request libdir( String libdir ) {
+      addParam( params, "libdir", libdir );
+      return this;
+    }
+
+    public Request files( String files ) {
+      addParam( params, "files", files );
+      return this;
+    }
+
+    public Request optionsfile( String optionsFile ) {
+      addParam( params, "optionsfile", optionsFile );
+      return this;
+    }
+
+    public Request statusDir( String dir ) {
+      this.statusDir = dir;
+      return this;
+    }
+
+    protected Callable<Response> callable() {
+      return new Callable<Response>() {
+        @Override
+        public Response call() throws Exception {
+          URIBuilder uri = uri( Job.SERVICE_PATH, "/sqoop" );
+          addParam( params, "statusdir", statusDir );
+          UrlEncodedFormEntity form = new UrlEncodedFormEntity( params );
+          HttpPost request = new HttpPost( uri.build() );
+          request.setEntity( form );
+          return new Response( execute( request ) );
+        }
+      };
+    }
+
+  }
+
+  static class Response extends BasicResponse {
+
+    protected Response( HttpResponse response ) {
+      super( response );
+    }
+
+    public String getJobId() throws IOException {
+      return JsonPath.read( getString(), "$.id" );
+    }
+
+  }
+
+}