HBASE-23606 Removed deprecations of external dependencies (#59)

Signed-off-by: Balazs Meszaros <meszibalu@apache.org>
diff --git a/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DumpToStringListener.java b/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DumpToStringListener.java
index 9704c67..477cddd 100755
--- a/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DumpToStringListener.java
+++ b/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/DumpToStringListener.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.kafka;
 
+import java.time.Duration;
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.Properties;
@@ -36,13 +37,12 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hbase.thirdparty.org.apache.commons.cli.BasicParser;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
 
-
 /**
  * connects to kafka and reads from the passed in topics.  Parses each message into an avro object
  * and dumps it to the console.
@@ -52,7 +52,6 @@
   private static final Logger LOG = LoggerFactory.getLogger(DumpToStringListener.class);
 
   private DumpToStringListener(){
-
   }
 
   public static void main(String[] args) {
@@ -65,12 +64,14 @@
     options.addRequiredOption("t", "kafkatopics", true,"Kafka Topics "
         + "to subscribe to (comma delimited)");
     CommandLine commandLine = null;
+
     try {
-      commandLine = new BasicParser().parse(options, args);
+      commandLine = new DefaultParser().parse(options, args);
     } catch (ParseException e) {
       LOG.error("Could not parse: ", e);
       printUsageAndExit(options, -1);
     }
+
     SpecificDatumReader<HBaseKafkaEvent> dreader =
             new SpecificDatumReader<>(HBaseKafkaEvent.SCHEMA$);
 
@@ -81,11 +82,11 @@
     props.put("key.deserializer", ByteArrayDeserializer.class.getName());
     props.put("value.deserializer", ByteArrayDeserializer.class.getName());
 
-    try (KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props);){
+    try (KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props)) {
       consumer.subscribe(Arrays.stream(topic.split(",")).collect(Collectors.toList()));
 
       while (true) {
-        ConsumerRecords<byte[], byte[]> records = consumer.poll(10000);
+        ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofMillis(10000));
         Iterator<ConsumerRecord<byte[], byte[]>> it = records.iterator();
         while (it.hasNext()) {
           ConsumerRecord<byte[], byte[]> record = it.next();
@@ -108,5 +109,4 @@
                     "[-k <kafka brokers (comma delmited)>] \n", true);
     System.exit(exitCode);
   }
-
 }
diff --git a/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java b/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java
index 16bf544..2c1aca1 100755
--- a/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java
+++ b/kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java
@@ -49,8 +49,8 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hbase.thirdparty.org.apache.commons.cli.BasicParser;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
@@ -162,7 +162,7 @@
     String[] restArgs = parser.getRemainingArgs();
 
     try {
-      commandLine = new BasicParser().parse(options, restArgs);
+      commandLine = new DefaultParser().parse(options, restArgs);
     } catch (ParseException e) {
       LOG.error("Could not parse: ", e);
       printUsageAndExit(options, -1);
diff --git a/spark/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java b/spark/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java
index e5a8ddd..802938e 100644
--- a/spark/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java
+++ b/spark/hbase-spark-it/src/test/java/org/apache/hadoop/hbase/spark/IntegrationTestSparkBulkLoad.java
@@ -28,7 +28,7 @@
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
-import org.apache.commons.lang3.RandomStringUtils;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
@@ -224,8 +224,7 @@
         // Insert record into a list
         List<byte[]> tmp1 = Arrays.asList(rk, CHAIN_FAM, chainIdArray, Bytes.toBytes(nextRow));
         List<byte[]> tmp2 = Arrays.asList(rk, SORT_FAM, chainIdArray, Bytes.toBytes(i));
-        List<byte[]> tmp3 = Arrays.asList(rk, DATA_FAM, chainIdArray, Bytes.toBytes(
-            RandomStringUtils.randomAlphabetic(50)));
+        List<byte[]> tmp3 = Arrays.asList(rk, DATA_FAM, chainIdArray, Bytes.toBytes("random" + i));
         res.add(tmp1);
         res.add(tmp2);
         res.add(tmp3);