KNOX-222: Remove hadoop-examples.jar from source tree
diff --git a/gateway-release/home/samples/ExampleWebHCatJob.groovy b/gateway-release/home/samples/ExampleWebHCatJob.groovy
index 50e43fd..0db8497 100644
--- a/gateway-release/home/samples/ExampleWebHCatJob.groovy
+++ b/gateway-release/home/samples/ExampleWebHCatJob.groovy
@@ -45,7 +45,7 @@
 
 jobId = Job.submitJava(session) \
   .jar( jobDir + "/lib/hadoop-examples.jar" ) \
-  .app( "wordcount" ) \
+  .app( "org.apache.hadoop.examples.WordCount" ) \
   .input( jobDir + "/input" ) \
   .output( jobDir + "/output" ) \
   .now().jobId
diff --git a/gateway-release/home/samples/hadoop-examples.jar b/gateway-release/home/samples/hadoop-examples.jar
deleted file mode 100644
index 351b77a..0000000
--- a/gateway-release/home/samples/hadoop-examples.jar
+++ /dev/null
Binary files differ
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 24310b1..815034c 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -172,6 +172,11 @@
 
         <dependency>
             <groupId>${gateway-group}</groupId>
+            <artifactId>hadoop-examples</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
             <artifactId>gateway-shell</artifactId>
         </dependency>
         <dependency>
diff --git a/gateway-release/src/assembly.xml b/gateway-release/src/assembly.xml
index 77cb291..196ef1b 100644
--- a/gateway-release/src/assembly.xml
+++ b/gateway-release/src/assembly.xml
@@ -35,18 +35,18 @@
                 <exclude>**/.settings/**</exclude>
             </excludes>
         </fileSet>
-		<fileSet>
-		    <directory>..</directory>
-		    <outputDirectory></outputDirectory>
-		    <includes>
-		        <include>CHANGES</include>
-		        <include>DISCLAIMER</include>
-		        <include>ISSUES</include>
-		        <include>LICENSE</include>
-		        <include>NOTICE</include>
-		        <include>README</include>
-		    </includes>
-		</fileSet>
+        <fileSet>
+            <directory>..</directory>
+            <outputDirectory></outputDirectory>
+            <includes>
+                <include>CHANGES</include>
+                <include>DISCLAIMER</include>
+                <include>ISSUES</include>
+                <include>LICENSE</include>
+                <include>NOTICE</include>
+                <include>README</include>
+            </includes>
+        </fileSet>
     </fileSets>
     <dependencySets>
         <dependencySet>
@@ -95,5 +95,12 @@
                 <include>${gateway-group}:gateway-test-ldap-launcher</include>
             </includes>
         </dependencySet>
+        <dependencySet>
+            <outputDirectory>samples</outputDirectory>
+            <outputFileNameMapping>hadoop-examples.jar</outputFileNameMapping>
+            <includes>
+                <include>${gateway-group}:hadoop-examples</include>
+            </includes>
+        </dependencySet>
     </dependencySets>
 </assembly>
\ No newline at end of file
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index 11eeb0c..fbe7e8c 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -21,6 +21,7 @@
 import com.jayway.restassured.response.Response;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
 import org.apache.hadoop.test.TestUtils;
 import org.apache.hadoop.test.category.FunctionalTests;
 import org.apache.hadoop.test.category.MediumTests;
@@ -42,6 +43,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.FileFilter;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.StringWriter;
 import java.net.InetSocketAddress;
@@ -726,7 +730,7 @@
     /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
     curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
      */
-    driver.createFile( user, pass, null, root+"/hadoop-examples.jar", "777", "application/octet-stream", "hadoop-examples.jar", 307, 201, 200 );
+    driver.createFile( user, pass, null, root+"/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 
     /* Put the data file into HDFS (changes.txt)
     curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
@@ -842,7 +846,7 @@
     /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
     curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
      */
-    driver.createFile( user, pass, group, root+"/lib/hadoop-examples.jar", "777", "application/octet-stream", "hadoop-examples.jar", 307, 201, 200 );
+    driver.createFile( user, pass, group, root+"/lib/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 
     /* Put the data file into HDFS (changes.txt)
     curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
@@ -1863,4 +1867,27 @@
     driver.assertComplete();
   }
 
+  private File findFile( File dir, String pattern ) {
+    File file = null;
+    FileFilter filter = new WildcardFileFilter( pattern );
+    File[] files = dir.listFiles(filter);
+    if( files != null && files.length > 0 ) {
+      file = files[0];
+    }
+    return file;
+  }
+
+  private String findHadoopExamplesJar() throws IOException {
+    String pattern = "hadoop-examples-*.jar";
+    File dir = new File( System.getProperty( "user.dir" ), "hadoop-examples/target" );
+    File file = findFile( dir, pattern );
+    if( file == null || !file.exists() ) {
+      file = findFile( new File( System.getProperty( "user.dir" ), "../hadoop-examples/target" ), pattern );
+    }
+    if( file == null ) {
+      throw new FileNotFoundException( pattern );
+    }
+    return file.toURI().toString();
+  }
+
 }
\ No newline at end of file
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
index 0fb91e1..0d33fc2 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
@@ -243,8 +243,17 @@
     return url;
   }
 
-  public InputStream getResourceStream( String resource ) {
-    InputStream stream = ClassLoader.getSystemResourceAsStream( getResourceName( resource ) );
+  public InputStream getResourceStream( String resource ) throws IOException {
+    InputStream stream = null;
+    if( resource.startsWith( "file:/" ) ) {
+      try {
+        stream = FileUtils.openInputStream( new File( new URI( resource ) ) );
+      } catch( URISyntaxException e ) {
+        throw new IOException( e  );
+      }
+    } else {
+      stream = ClassLoader.getSystemResourceAsStream( getResourceName( resource ) );
+    }
     assertThat( "Failed to find test resource " + resource, stream, Matchers.notNullValue() );
     return stream;
   }
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/hadoop-examples.jar b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/hadoop-examples.jar
deleted file mode 100644
index 351b77a..0000000
--- a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/hadoop-examples.jar
+++ /dev/null
Binary files differ
diff --git a/hadoop-examples/pom.xml b/hadoop-examples/pom.xml
new file mode 100644
index 0000000..2eb5e10
--- /dev/null
+++ b/hadoop-examples/pom.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.3.1</version>
+    </parent>
+
+    <artifactId>hadoop-examples</artifactId>
+    <name>hadoop-examples</name>
+    <description>Hadoop Examples</description>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-core</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>commons-cli</groupId>
+            <artifactId>commons-cli</artifactId>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+
+</project>
\ No newline at end of file
diff --git a/hadoop-examples/src/main/java/org/apache/hadoop/examples/WordCount.java b/hadoop-examples/src/main/java/org/apache/hadoop/examples/WordCount.java
new file mode 100644
index 0000000..5e4ac4c
--- /dev/null
+++ b/hadoop-examples/src/main/java/org/apache/hadoop/examples/WordCount.java
@@ -0,0 +1,84 @@
+/**
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+
+package org.apache.hadoop.examples;
+
+import java.io.IOException;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.GenericOptionsParser;
+
+public class WordCount {
+
+  public static class TokenizerMapper
+      extends Mapper<Object, Text, Text, IntWritable>{
+
+    private final static IntWritable one = new IntWritable(1);
+    private Text word = new Text();
+
+    public void map(Object key, Text value, Context context
+    ) throws IOException, InterruptedException {
+      StringTokenizer itr = new StringTokenizer(value.toString());
+      while (itr.hasMoreTokens()) {
+        word.set(itr.nextToken());
+        context.write(word, one);
+      }
+    }
+  }
+
+  public static class IntSumReducer
+      extends Reducer<Text,IntWritable,Text,IntWritable> {
+    private IntWritable result = new IntWritable();
+
+    public void reduce(Text key, Iterable<IntWritable> values,
+                       Context context
+    ) throws IOException, InterruptedException {
+      int sum = 0;
+      for (IntWritable val : values) {
+        sum += val.get();
+      }
+      result.set(sum);
+      context.write(key, result);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    Configuration conf = new Configuration();
+    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
+    if (otherArgs.length != 2) {
+      System.err.println( "Usage: wordcount <in> <out>" );
+      System.exit(2);
+    }
+    Job job = new Job(conf, "Word Count");
+    job.setJarByClass(WordCount.class);
+    job.setMapperClass(TokenizerMapper.class);
+    job.setCombinerClass(IntSumReducer.class);
+    job.setReducerClass(IntSumReducer.class);
+    job.setOutputKeyClass(Text.class);
+    job.setOutputValueClass(IntWritable.class);
+    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
+    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
+    System.exit(job.waitForCompletion(true) ? 0 : 1);
+  }
+}
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index d54f4e0..4d74ada 100644
--- a/pom.xml
+++ b/pom.xml
@@ -58,6 +58,7 @@
         <module>gateway-server-launcher</module>
         <module>gateway-shell</module>
         <module>gateway-shell-launcher</module>
+        <module>hadoop-examples</module>
         <module>gateway-release</module>
         <module>gateway-test</module>
         <module>hsso-release</module>
@@ -223,6 +224,9 @@
                     <excludedGroups>
                         org.apache.hadoop.test.category.SlowTests,org.apache.hadoop.test.category.ManualTests,org.apache.hadoop.test.category.IntegrationTests
                     </excludedGroups>
+                    <systemPropertyVariables>
+                        <gateway-version>${gateway-version}</gateway-version>
+                    </systemPropertyVariables>
                 </configuration>
             </plugin>
         </plugins>
@@ -439,6 +443,11 @@
             </dependency>
             <dependency>
                 <groupId>${gateway-group}</groupId>
+                <artifactId>hadoop-examples</artifactId>
+                <version>${gateway-version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${gateway-group}</groupId>
                 <artifactId>gateway-release</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
@@ -531,6 +540,12 @@
 
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-mapreduce-client-core</artifactId>
+                <version>0.23.3</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-core</artifactId>
                 <version>1.0.3</version>
                 <exclusions>