Merge pull request #30 from apache/jwills_great_version_upgrade

The great version upgrade PR
diff --git a/crunch-archetype/src/main/resources/archetype-resources/pom.xml b/crunch-archetype/src/main/resources/archetype-resources/pom.xml
index 6575417..34da675 100644
--- a/crunch-archetype/src/main/resources/archetype-resources/pom.xml
+++ b/crunch-archetype/src/main/resources/archetype-resources/pom.xml
@@ -89,8 +89,8 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.5.1</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.8</source>
+          <target>1.8</target>
         </configuration>
       </plugin>
       <plugin>
diff --git a/crunch-core/pom.xml b/crunch-core/pom.xml
index 1f487a2..c356dce 100644
--- a/crunch-core/pom.xml
+++ b/crunch-core/pom.xml
@@ -42,7 +42,6 @@
     <dependency>
       <groupId>org.apache.avro</groupId>
       <artifactId>avro-mapred</artifactId>
-      <classifier>${avro.classifier}</classifier>
     </dependency>
 
     <dependency>
@@ -51,6 +50,11 @@
     </dependency>
 
     <dependency>
+      <groupId>com.thoughtworks.paranamer</groupId>
+      <artifactId>paranamer</artifactId>
+    </dependency>
+
+    <dependency>
       <groupId>org.javassist</groupId>
       <artifactId>javassist</artifactId>
     </dependency>
@@ -69,6 +73,12 @@
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-distcp</artifactId>
       <scope>provided</scope>
     </dependency>
@@ -142,7 +152,7 @@
 
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
 
diff --git a/crunch-core/src/it/java/org/apache/crunch/io/parquet/AvroParquetFileSourceTargetIT.java b/crunch-core/src/it/java/org/apache/crunch/io/parquet/AvroParquetFileSourceTargetIT.java
index a14a5e3..3c80a3c 100644
--- a/crunch-core/src/it/java/org/apache/crunch/io/parquet/AvroParquetFileSourceTargetIT.java
+++ b/crunch-core/src/it/java/org/apache/crunch/io/parquet/AvroParquetFileSourceTargetIT.java
@@ -153,7 +153,7 @@
 
     Person person = Iterables.getOnlyElement(ageOnly.materialize());
     assertNull(person.getName());
-    assertEquals(person.getAge(), new Integer(42));
+    assertEquals(person.getAge().intValue(), 42);
     assertNull(person.getSiblingnames());
   }
 
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/parquet/AvroParquetFileSource.java b/crunch-core/src/main/java/org/apache/crunch/io/parquet/AvroParquetFileSource.java
index 09703bd..9660b16 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/parquet/AvroParquetFileSource.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/parquet/AvroParquetFileSource.java
@@ -186,7 +186,7 @@
       if (field == null) {
         throw new IllegalArgumentException("No field " + fieldName + " in schema: " + baseSchema.getName());
       }
-      fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue(), field.order()));
+      fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultVal(), field.order()));
       return this;
     }
 
diff --git a/crunch-core/src/main/java/org/apache/crunch/util/CrunchRenameCopyListing.java b/crunch-core/src/main/java/org/apache/crunch/util/CrunchRenameCopyListing.java
index b930beb..7431d5d 100644
--- a/crunch-core/src/main/java/org/apache/crunch/util/CrunchRenameCopyListing.java
+++ b/crunch-core/src/main/java/org/apache/crunch/util/CrunchRenameCopyListing.java
@@ -219,7 +219,7 @@
           DistCpUtils.getRelativePath(sourcePathRoot, fileStatus.getPath()), fileStatus.getPath());
     }
 
-    if (!shouldCopy(fileStatus.getPath(), options)) {
+    if (!shouldCopy(fileStatus.getPath())) {
       return;
     }
 
@@ -269,4 +269,4 @@
   protected long getNumberOfPaths() {
     return totalPaths;
   }
-}
\ No newline at end of file
+}
diff --git a/crunch-core/src/test/java/org/apache/crunch/io/parquet/AvroParquetFileReaderFactoryTest.java b/crunch-core/src/test/java/org/apache/crunch/io/parquet/AvroParquetFileReaderFactoryTest.java
index d2ad5bc..95d4561 100644
--- a/crunch-core/src/test/java/org/apache/crunch/io/parquet/AvroParquetFileReaderFactoryTest.java
+++ b/crunch-core/src/test/java/org/apache/crunch/io/parquet/AvroParquetFileReaderFactoryTest.java
@@ -98,7 +98,7 @@
   }
 
   public static Schema.Field cloneField(Schema.Field field) {
-    return new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultValue());
+    return new Schema.Field(field.name(), field.schema(), field.doc(), field.defaultVal());
   }
 
 }
diff --git a/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroChildClassloaderTestRunner.java b/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroChildClassloaderTestRunner.java
index a1f93b7..d0016fc 100644
--- a/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroChildClassloaderTestRunner.java
+++ b/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroChildClassloaderTestRunner.java
@@ -17,6 +17,8 @@
  */
 package org.apache.crunch.types.avro;
 
+import java.io.File;
+import java.lang.management.ManagementFactory;
 import java.net.URL;
 import java.net.URLClassLoader;
 import java.util.ArrayList;
@@ -45,11 +47,25 @@
     private static ClassLoader parentClassLoader;
     private static URL[] crunchURLs;
     static {
-      URLClassLoader systemClassLoader = (URLClassLoader) getSystemClassLoader();
+      ClassLoader classLoader = getSystemClassLoader();
+      URL[] urls = null;
+      if (classLoader instanceof URLClassLoader) {
+	 urls = ((URLClassLoader) classLoader).getURLs();
+      } else {
+        String[] pieces = ManagementFactory.getRuntimeMXBean().getClassPath().split(File.pathSeparator);
+        urls = new URL[pieces.length];
+        for (int i = 0; i < pieces.length; i++) {
+          try {
+            urls[i] = new File(pieces[i]).toURI().toURL();
+          } catch (Exception e) {
+            throw new RuntimeException(e);
+          }
+        }
+      }
 
       Collection<URL> crunchURLs = new ArrayList<URL>();
       Collection<URL> otherURLs = new ArrayList<URL>();
-      for (URL url : systemClassLoader.getURLs()) {
+      for (URL url : urls) {
         if (url.getPath().matches("^.*/crunch-?.*/.*$")) {
           crunchURLs.add(url);
         } else {
@@ -58,7 +74,7 @@
       }
 
       TestClassLoader.crunchURLs = crunchURLs.toArray(new URL[crunchURLs.size()]);
-      parentClassLoader = new URLClassLoader(otherURLs.toArray(new URL[otherURLs.size()]), null);
+      parentClassLoader = new URLClassLoader(otherURLs.toArray(new URL[otherURLs.size()]), classLoader);
     }
 
     public TestClassLoader() {
diff --git a/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroModeTest.java b/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroModeTest.java
index fe36520..8df49d3 100644
--- a/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroModeTest.java
+++ b/crunch-core/src/test/java/org/apache/crunch/types/avro/AvroModeTest.java
@@ -23,7 +23,6 @@
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertThat;
-import static org.mockito.Mockito.mock;
 
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericData;
@@ -152,30 +151,6 @@
     assertThat(returnedMode.getFactory(), is(instanceOf(FakeReaderWriterFactory.class)));
   }
 
-  @Test
-  public void testRegisterClassLoader() {
-    // First make sure things are in the default situation
-    AvroMode.setSpecificClassLoader(null);
-
-    ClassLoader classLoaderA = mock(ClassLoader.class);
-    ClassLoader classLoaderB = mock(ClassLoader.class);
-
-    // Basic sanity check to ensure that the class loader was really nulled out
-    assertNull(AvroMode.getSpecificClassLoader());
-
-    // Do an internal registration of a class loader. Because there is currently no internal class loader set,
-    // this should set the internal specific class loader
-    AvroMode.registerSpecificClassLoaderInternal(classLoaderA);
-
-    assertEquals(classLoaderA, AvroMode.getSpecificClassLoader());
-
-    // Now we do an internal register of another class loader. Because there already is an internal specific class
-    // loader set, this should have no impact (as opposed to calling setSpecificClassLoader)
-    AvroMode.registerSpecificClassLoaderInternal(classLoaderB);
-
-    assertEquals(classLoaderA, AvroMode.getSpecificClassLoader());
-  }
-
   private static class FakeReaderWriterFactory implements ReaderWriterFactory{
 
     @Override
diff --git a/crunch-kafka/pom.xml b/crunch-kafka/pom.xml
index 14817b6..1036808 100644
--- a/crunch-kafka/pom.xml
+++ b/crunch-kafka/pom.xml
@@ -40,7 +40,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.kafka</groupId>
-      <artifactId>kafka_2.11</artifactId>
+      <artifactId>kafka_${scala.base.version}</artifactId>
     </dependency>
     <dependency>
       <groupId>org.scala-lang</groupId>
diff --git a/crunch-kafka/src/main/java/org/apache/crunch/kafka/KafkaUtils.java b/crunch-kafka/src/main/java/org/apache/crunch/kafka/KafkaUtils.java
index 2681df0..0e9d750 100644
--- a/crunch-kafka/src/main/java/org/apache/crunch/kafka/KafkaUtils.java
+++ b/crunch-kafka/src/main/java/org/apache/crunch/kafka/KafkaUtils.java
@@ -34,7 +34,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.network.ListenerName;
-import org.apache.kafka.common.protocol.SecurityProtocol;
+import org.apache.kafka.common.security.auth.SecurityProtocol; 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import scala.Option;
diff --git a/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaRecordsIterableIT.java b/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaRecordsIterableIT.java
index fd940f3..dd179ae 100644
--- a/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaRecordsIterableIT.java
+++ b/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaRecordsIterableIT.java
@@ -385,8 +385,6 @@
           entry.getKey().partition(), entry.getValue() + 1, "key", null));
     }
 
-    when(records.isEmpty()).thenReturn(false);
-    when(records.iterator()).thenReturn(returnedRecords.iterator());
     when(mockedConsumer.poll(Matchers.anyLong()))
         //for the fill poll call
         .thenReturn(null)
diff --git a/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaUtilsIT.java b/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaUtilsIT.java
index dc4ea82..707f1b3 100644
--- a/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaUtilsIT.java
+++ b/crunch-kafka/src/test/java/org/apache/crunch/kafka/KafkaUtilsIT.java
@@ -25,7 +25,7 @@
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.network.ListenerName;
-import org.apache.kafka.common.protocol.SecurityProtocol;
+import org.apache.kafka.common.security.auth.SecurityProtocol;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
diff --git a/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordReaderTest.java b/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordReaderTest.java
index fd8cd8e..4b3a548 100644
--- a/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordReaderTest.java
+++ b/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordReaderTest.java
@@ -63,9 +63,6 @@
 
     inputSplit = new KafkaInputSplit(topicPartition.topic(), topicPartition.partition(), startOffset, endOffset);
 
-    when(consumer.beginningOffsets(Collections.singleton(inputSplit.getTopicPartition()))).thenReturn(
-        Collections.singletonMap(inputSplit.getTopicPartition(), 0L));
-
     records = new ConsumerRecords<>(Collections.singletonMap(inputSplit.getTopicPartition(),
         Collections.singletonList(new ConsumerRecord<>("topic", 0, 0, "key", "value"))));
 
@@ -166,4 +163,4 @@
       return consumer;
     }
   }
-}
\ No newline at end of file
+}
diff --git a/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordsIterableIT.java b/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordsIterableIT.java
index 9939d64..f996d96 100644
--- a/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordsIterableIT.java
+++ b/crunch-kafka/src/test/java/org/apache/crunch/kafka/record/KafkaRecordsIterableIT.java
@@ -388,8 +388,6 @@
           entry.getKey().partition(), entry.getValue() + 1, "key", null));
     }
 
-    when(records.isEmpty()).thenReturn(false);
-    when(records.iterator()).thenReturn(returnedRecords.iterator());
     when(mockedConsumer.poll(Matchers.anyLong()))
         //for the fill poll call
         .thenReturn(null)
@@ -413,4 +411,4 @@
   private static Map<TopicPartition, Long> getStartOffsets(Properties props, String topic) {
     return KafkaUtils.getBrokerOffsets(props, OffsetRequest.EarliestTime(), topic);
   }
-}
\ No newline at end of file
+}
diff --git a/crunch-kafka/src/test/java/org/apache/crunch/kafka/utils/KafkaBrokerTestHarness.java b/crunch-kafka/src/test/java/org/apache/crunch/kafka/utils/KafkaBrokerTestHarness.java
index 13a4e2c..f3f2234 100644
--- a/crunch-kafka/src/test/java/org/apache/crunch/kafka/utils/KafkaBrokerTestHarness.java
+++ b/crunch-kafka/src/test/java/org/apache/crunch/kafka/utils/KafkaBrokerTestHarness.java
@@ -323,6 +323,7 @@
       props.setProperty("num.partitions", String.valueOf(PARTITIONS_PER_TOPIC));
       props.setProperty("default.replication.factor", String.valueOf(brokers));
       props.setProperty("auto.create.topics.enable", Boolean.FALSE.toString());
+      props.setProperty("offsets.topic.replication.factor", String.valueOf(brokers));
 
       props.putAll(baseProperties);
 
diff --git a/crunch-spark/src/main/scala/org/apache/crunch/scrunch/spark/ByteBufferInputStream.scala b/crunch-spark/src/main/scala/org/apache/crunch/scrunch/spark/ByteBufferInputStream.scala
index cc11b46..b43fa55 100644
--- a/crunch-spark/src/main/scala/org/apache/crunch/scrunch/spark/ByteBufferInputStream.scala
+++ b/crunch-spark/src/main/scala/org/apache/crunch/scrunch/spark/ByteBufferInputStream.scala
@@ -55,7 +55,7 @@
   override def skip(bytes: Long): Long = {
     if (buffer != null) {
       val amountToSkip = math.min(bytes, buffer.remaining).toInt
-      buffer.position(buffer.position + amountToSkip)
+      buffer.position(buffer.position() + amountToSkip)
       if (buffer.remaining() == 0) {
         cleanUp()
       }
diff --git a/crunch-test/pom.xml b/crunch-test/pom.xml
index b2c18dc..fb3110d 100644
--- a/crunch-test/pom.xml
+++ b/crunch-test/pom.xml
@@ -68,7 +68,7 @@
     
     <dependency>
        <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
     </dependency>
 
     <dependency>
diff --git a/pom.xml b/pom.xml
index 46d8838..49437a7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -72,8 +72,8 @@
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-    <java.source.version>1.7</java.source.version>
-    <java.target.version>1.7</java.target.version>
+    <java.source.version>1.8</java.source.version>
+    <java.target.version>1.8</java.target.version>
     <!--
         NOTE: Please don't change any dependency versions here without
               checking first that they match those on Hadoop's runtime
@@ -85,31 +85,30 @@
     <commons-lang.version>2.6</commons-lang.version>
     <commons-codec.version>1.4</commons-codec.version>
     <commons-cli.version>1.2</commons-cli.version>
-    <avro.version>1.7.7</avro.version>
+    <avro.version>1.8.2</avro.version>
     <hive.version>2.1.0</hive.version>
-    <parquet.version>1.8.1</parquet.version>
+    <parquet.version>1.10.1</parquet.version>
     <javassist.version>3.16.1-GA</javassist.version>
     <jackson.databind.version>2.6.1</jackson.databind.version>
     <protobuf-java.version>2.5.0</protobuf-java.version>
     <libthrift.version>0.8.0</libthrift.version>
-    <slf4j.version>1.6.1</slf4j.version>
+    <slf4j.version>1.7.25</slf4j.version>
     <log4j.version>1.2.15</log4j.version>
-    <algebird.version>0.9.0</algebird.version>
+    <algebird.version>0.13.2</algebird.version>
     <junit.version>4.10</junit.version>
     <hamcrest.version>1.1</hamcrest.version>
-    <mockito.version>1.9.0</mockito.version>
+    <mockito.version>2.23.4</mockito.version>
     <pkg>org.apache.crunch</pkg>
 
-    <hadoop.version>2.7.1</hadoop.version>
+    <hadoop.version>2.8.2</hadoop.version>
     <hbase.version>2.0.1</hbase.version>
-    <avro.classifier>hadoop2</avro.classifier>
     <hive.version>2.1.0</hive.version>
 
-    <kafka.version>0.10.2.1</kafka.version>
-    <scala.base.version>2.11</scala.base.version>
-    <scala.version>2.11.8</scala.version>
-    <scalatest.version>2.2.4</scalatest.version>
-    <spark.version>2.0.0</spark.version>
+    <kafka.version>1.1.0</kafka.version>
+    <scala.base.version>2.12</scala.base.version>
+    <scala.version>2.12.10</scala.version>
+    <scalatest.version>3.0.1</scalatest.version>
+    <spark.version>2.4.0</spark.version>
     <jline.version>2.12.1</jline.version>
     <jsr305.version>1.3.9</jsr305.version>
   </properties>
@@ -202,6 +201,12 @@
 
       <dependency>
         <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdfs</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-minicluster</artifactId>
         <version>${hadoop.version}</version>
       </dependency>
@@ -258,7 +263,6 @@
         <groupId>org.apache.avro</groupId>
         <artifactId>avro-mapred</artifactId>
         <version>${avro.version}</version>
-        <classifier>${avro.classifier}</classifier>
         <exclusions>
           <exclusion>
             <groupId>org.apache.avro</groupId>
@@ -268,13 +272,19 @@
       </dependency>
 
       <dependency>
+        <groupId>com.thoughtworks.paranamer</groupId>
+        <artifactId>paranamer</artifactId>
+        <version>2.8</version>
+      </dependency>
+
+      <dependency>
         <groupId>org.apache.parquet</groupId>
         <artifactId>parquet-avro</artifactId>
         <version>${parquet.version}</version>
         <exclusions>
           <exclusion>
             <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
+            <artifactId>hadoop-client</artifactId>
           </exclusion>
         </exclusions>
       </dependency>
@@ -391,7 +401,7 @@
     
       <dependency>
         <groupId>org.mockito</groupId>
-        <artifactId>mockito-all</artifactId>
+        <artifactId>mockito-core</artifactId>
         <version>${mockito.version}</version>
       </dependency>
            
@@ -514,11 +524,6 @@
      </dependency>
      <dependency>
        <groupId>org.apache.hadoop</groupId>
-       <artifactId>hadoop-hdfs</artifactId>
-       <version>${hadoop.version}</version>
-     </dependency>
-     <dependency>
-       <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-mapreduce-client-core</artifactId>
        <version>${hadoop.version}</version>
      </dependency>
@@ -637,7 +642,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-compiler-plugin</artifactId>
-          <version>3.3</version>
+          <version>3.8.1</version>
           <configuration>
             <source>${java.source.version}</source>
             <target>${java.target.version}</target>
@@ -685,7 +690,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-assembly-plugin</artifactId>
-          <version>2.5.4</version>
+          <version>3.1.1</version>
         </plugin>
         <plugin>
           <groupId>net.alchim31.maven</groupId>
@@ -695,9 +700,10 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-surefire-plugin</artifactId>
-          <version>2.18.1</version>
+          <version>2.22.2</version>
           <configuration>
-            <argLine>-Xmx2G -XX:PermSize=512m -XX:MaxPermSize=1G -Dfile.encoding=UTF-8</argLine>
+            <useSystemClassLoader>false</useSystemClassLoader>
+            <argLine>-Xmx2G -Dfile.encoding=UTF-8</argLine>
           </configuration>
         </plugin>
         <plugin>
@@ -837,9 +843,8 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-failsafe-plugin</artifactId>
-        <version>2.18.1</version>
+        <version>2.22.2</version>
         <configuration>
-          <argLine>-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m -Dfile.encoding=UTF-8</argLine>
           <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
         </configuration>
         <executions>