SQOOP-2437: Use hive configuration to connect to secure metastore

(Abraham Elmahrek via Jarek Jarcec Cecho)
diff --git a/src/java/org/apache/sqoop/hive/HiveConfig.java b/src/java/org/apache/sqoop/hive/HiveConfig.java
new file mode 100644
index 0000000..18a722b
--- /dev/null
+++ b/src/java/org/apache/sqoop/hive/HiveConfig.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hive;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+import java.util.Map;
+
+public class HiveConfig {
+
+  public static final Log LOG = LogFactory.getLog(HiveConfig.class.getName());
+
+  public static final String HIVE_CONF_CLASS = "org.apache.hadoop.hive.conf.HiveConf";
+
+  public static final String HIVE_SASL_ENABLED = "hive.metastore.sasl.enabled";
+
+  /**
+   * Dynamically create hive configuration object.
+   * @param conf
+   * @return
+   */
+  public static Configuration getHiveConf(Configuration conf) {
+    try {
+      Class HiveConfClass = Class.forName(HIVE_CONF_CLASS);
+      return ((Configuration)(HiveConfClass.getConstructor(Configuration.class, Class.class)
+          .newInstance(conf, Configuration.class)));
+    } catch (ClassNotFoundException ex) {
+      LOG.error("Could not load " + HIVE_CONF_CLASS
+          + ". Make sure HIVE_CONF_DIR is set correctly.");
+    } catch (Exception ex) {
+      LOG.error("Could not instantiate HiveConf instance.", ex);
+    }
+
+    return null;
+  }
+
+  /**
+   * Add hive conf to configuration object without overriding already set properties.
+   * @param hiveConf
+   * @param conf
+   */
+  public static void addHiveConfigs(Configuration hiveConf, Configuration conf) {
+    for (Map.Entry<String, String> item : hiveConf) {
+      conf.setIfUnset(item.getKey(), item.getValue());
+    }
+  }
+}
+
diff --git a/src/java/org/apache/sqoop/hive/HiveImport.java b/src/java/org/apache/sqoop/hive/HiveImport.java
index 2ec7f2b..e03d33c 100644
--- a/src/java/org/apache/sqoop/hive/HiveImport.java
+++ b/src/java/org/apache/sqoop/hive/HiveImport.java
@@ -389,7 +389,9 @@
     List<String> newArgs = new LinkedList<String>();
     newArgs.addAll(Arrays.asList(args));
 
-    if (System.getProperty("mapreduce.job.credentials.binary") != null) {
+    HiveConfig.addHiveConfigs(HiveConfig.getHiveConf(configuration), configuration);
+
+    if (configuration.getBoolean(HiveConfig.HIVE_SASL_ENABLED, false)) {
       newArgs.add("--hiveconf");
       newArgs.add("hive.metastore.sasl.enabled=true");
     }
diff --git a/src/java/org/apache/sqoop/mapreduce/ParquetJob.java b/src/java/org/apache/sqoop/mapreduce/ParquetJob.java
index f310419..2ebdea1 100644
--- a/src/java/org/apache/sqoop/mapreduce/ParquetJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/ParquetJob.java
@@ -28,6 +28,7 @@
 import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.sqoop.hive.HiveConfig;
 import org.kitesdk.data.CompressionType;
 import org.kitesdk.data.Dataset;
 import org.kitesdk.data.DatasetDescriptor;
@@ -38,7 +39,6 @@
 
 import java.io.IOException;
 import java.lang.reflect.Method;
-import java.util.Map;
 
 /**
  * Helper class for setting up a Parquet MapReduce job.
@@ -47,7 +47,6 @@
 
   public static final Log LOG = LogFactory.getLog(ParquetJob.class.getName());
 
-  public static final String HIVE_CONF_CLASS = "org.apache.hadoop.hive.conf.HiveConf";
   public static final String HIVE_METASTORE_CLIENT_CLASS = "org.apache.hadoop.hive.metastore.HiveMetaStoreClient";
   public static final String HIVE_METASTORE_SASL_ENABLED = "hive.metastore.sasl.enabled";
   // Purposefully choosing the same token alias as the one Oozie chooses.
@@ -90,12 +89,12 @@
   public static void configureImportJob(JobConf conf, Schema schema,
       String uri, WriteMode writeMode) throws IOException {
     Dataset dataset;
-    Configuration hiveConf = getHiveConf(conf);
+    Configuration hiveConf = HiveConfig.getHiveConf(conf);
 
     // Add hive delegation token only if we don't already have one.
     if (uri.startsWith("dataset:hive") && isSecureMetastore(hiveConf)) {
       // Copy hive configs to job config
-      addHiveConfigs(hiveConf, conf);
+      HiveConfig.addHiveConfigs(hiveConf, conf);
 
       if (conf.getCredentials().getToken(new Text(HIVE_METASTORE_TOKEN_ALIAS)) == null) {
         addHiveDelegationToken(conf);
@@ -145,25 +144,6 @@
   }
 
   /**
-   * Dynamically create hive configuration object.
-   * @param conf
-   * @return
-   */
-  private static Configuration getHiveConf(Configuration conf) {
-    try {
-      Class HiveConfClass = Class.forName(HIVE_CONF_CLASS);
-      return ((Configuration)(HiveConfClass.getConstructor(Configuration.class, Class.class)
-          .newInstance(conf, Configuration.class)));
-    } catch (ClassNotFoundException ex) {
-      LOG.error("Could not load " + HIVE_CONF_CLASS
-          + ". Make sure HIVE_CONF_DIR is set correctly.");
-    } catch (Exception ex) {
-      LOG.error("Could not instantiate HiveConf instance.", ex);
-    }
-    return null;
-  }
-
-  /**
    * Add hive delegation token to credentials store.
    * @param conf
    */
@@ -182,9 +162,9 @@
     }
 
     try {
-      HiveConfClass = Class.forName(HIVE_CONF_CLASS);
+      HiveConfClass = Class.forName(HiveConfig.HIVE_CONF_CLASS);
     } catch (ClassNotFoundException ex) {
-      LOG.error("Could not load " + HIVE_CONF_CLASS
+      LOG.error("Could not load " + HiveConfig.HIVE_CONF_CLASS
           + " when adding hive delegation token."
           + " Make sure HIVE_CONF_DIR is set correctly.", ex);
       throw new RuntimeException("Couldn't fetch delegation token.", ex);
@@ -209,15 +189,4 @@
       throw new RuntimeException("Couldn't fetch delegation token.", ex);
     }
   }
-
-  /**
-   * Add hive conf to configuration object without overriding already set properties.
-   * @param hiveConf
-   * @param conf
-   */
-  private static void addHiveConfigs(Configuration hiveConf, Configuration conf) {
-    for (Map.Entry<String, String> item : hiveConf) {
-      conf.setIfUnset(item.getKey(), item.getValue());
-    }
-  }
 }