SQOOP-2535: Add error handling to HiveConf
(Dian Fu via Jarek Jarcec Cecho)
diff --git a/src/java/org/apache/sqoop/hive/HiveConfig.java b/src/java/org/apache/sqoop/hive/HiveConfig.java
index 18a722b..43ac295 100644
--- a/src/java/org/apache/sqoop/hive/HiveConfig.java
+++ b/src/java/org/apache/sqoop/hive/HiveConfig.java
@@ -22,6 +22,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import java.io.IOException;
import java.util.Map;
public class HiveConfig {
@@ -36,8 +37,9 @@
* Dynamically create hive configuration object.
* @param conf
* @return
+ * @throws IOException if instantiate HiveConf failed.
*/
- public static Configuration getHiveConf(Configuration conf) {
+ public static Configuration getHiveConf(Configuration conf) throws IOException {
try {
Class HiveConfClass = Class.forName(HIVE_CONF_CLASS);
return ((Configuration)(HiveConfClass.getConstructor(Configuration.class, Class.class)
@@ -45,11 +47,11 @@
} catch (ClassNotFoundException ex) {
LOG.error("Could not load " + HIVE_CONF_CLASS
+ ". Make sure HIVE_CONF_DIR is set correctly.");
+ throw new IOException(ex);
} catch (Exception ex) {
LOG.error("Could not instantiate HiveConf instance.", ex);
+ throw new IOException(ex);
}
-
- return null;
}
/**
diff --git a/src/java/org/apache/sqoop/hive/HiveImport.java b/src/java/org/apache/sqoop/hive/HiveImport.java
index e03d33c..4828375 100644
--- a/src/java/org/apache/sqoop/hive/HiveImport.java
+++ b/src/java/org/apache/sqoop/hive/HiveImport.java
@@ -385,7 +385,7 @@
}
}
- private String[] getHiveArgs(String... args) {
+ private String[] getHiveArgs(String... args) throws IOException {
List<String> newArgs = new LinkedList<String>();
newArgs.addAll(Arrays.asList(args));
diff --git a/src/java/org/apache/sqoop/mapreduce/ParquetJob.java b/src/java/org/apache/sqoop/mapreduce/ParquetJob.java
index 2ebdea1..b077d9b 100644
--- a/src/java/org/apache/sqoop/mapreduce/ParquetJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/ParquetJob.java
@@ -89,15 +89,17 @@
public static void configureImportJob(JobConf conf, Schema schema,
String uri, WriteMode writeMode) throws IOException {
Dataset dataset;
- Configuration hiveConf = HiveConfig.getHiveConf(conf);
// Add hive delegation token only if we don't already have one.
- if (uri.startsWith("dataset:hive") && isSecureMetastore(hiveConf)) {
- // Copy hive configs to job config
- HiveConfig.addHiveConfigs(hiveConf, conf);
+ if (uri.startsWith("dataset:hive")) {
+ Configuration hiveConf = HiveConfig.getHiveConf(conf);
+ if (isSecureMetastore(hiveConf)) {
+ // Copy hive configs to job config
+ HiveConfig.addHiveConfigs(hiveConf, conf);
- if (conf.getCredentials().getToken(new Text(HIVE_METASTORE_TOKEN_ALIAS)) == null) {
- addHiveDelegationToken(conf);
+ if (conf.getCredentials().getToken(new Text(HIVE_METASTORE_TOKEN_ALIAS)) == null) {
+ addHiveDelegationToken(conf);
+ }
}
}