SQOOP-3010: Sqoop should not allow --as-parquetfile with hcatalog jobs or when hive import with create-hive-table is used
(Sowmya Ramesh via Venkat Ranganathan)
diff --git a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
index fecdf43..b71bc5e 100644
--- a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
+++ b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
@@ -1421,6 +1421,14 @@
+ "importing into SequenceFile format.");
}
+ // Hive import and create hive table not compatible for ParquetFile format
+ if (options.doHiveImport()
+ && options.doFailIfHiveTableExists()
+ && options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
+ throw new InvalidOptionsException("Hive import and create hive table is not compatible with "
+ + "importing into ParquetFile format.");
+ }
+
if (options.doHiveImport()
&& options.isAppendMode()
&& !options.getIncrementalMode().equals(IncrementalMode.AppendRows)) {
@@ -1598,6 +1606,12 @@
+ " option." + HELP_STR);
}
+ if (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
+ throw new InvalidOptionsException("HCatalog job is not compatible with "
+ + "SequenceFile format option " + FMT_PARQUETFILE_ARG
+ + " option." + HELP_STR);
+ }
+
if (options.getHCatalogPartitionKeys() != null
&& options.getHCatalogPartitionValues() == null) {
throw new InvalidOptionsException("Either both --hcatalog-partition-keys"
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
index b626964..26d087b 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
@@ -402,6 +402,27 @@
{"test2", 4242, "somestring2"}, {"test", 42, "somestring"}});
}
+ /**
+ * Test hive create and --as-parquetfile options validation.
+ */
+ @Test
+ public void testCreateHiveImportAsParquet() throws ParseException {
+ final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
+ setCurTableName(TABLE_NAME);
+ setNumCols(3);
+ String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
+ ImportTool tool = new ImportTool();
+
+ try {
+ tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
+ null, true));
+ fail("Expected InvalidOptionsException");
+ } catch (InvalidOptionsException ex) {
+ /* success */
+ }
+ }
+
+
/** Test that dates are coerced properly to strings. */
@Test
public void testDate() throws IOException {
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index 5cd4c26..54b4552 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -164,6 +164,24 @@
}
}
+ public void testHCatExportWithParquetFile() throws Exception {
+ String[] args = {
+ "--connect",
+ "jdbc:db:url",
+ "--table",
+ "dbtable",
+ "--hcatalog-table",
+ "table",
+ "--as-parquetfile",
+ };
+ try {
+ SqoopOptions opts = parseExportArgs(args);
+ exportTool.validateOptions(opts);
+ fail("Expected InvalidOptionsException");
+ } catch (SqoopOptions.InvalidOptionsException ioe) {
+ // expected.
+ }
+ }
public void testHCatImportWithSequenceFile() throws Exception {
String[] args = {
@@ -184,6 +202,28 @@
}
}
+ public void testHCatImportWithParquetFile() throws Exception {
+ String[] args = {
+ "--hcatalog-table",
+ "table",
+ "--create-hcatalog-table",
+ "--connect",
+ "jdbc:db:url",
+ "--table",
+ "dbtable",
+ "--hcatalog-table",
+ "table",
+ "--as-parquetfile",
+ };
+ try {
+ SqoopOptions opts = parseImportArgs(args);
+ importTool.validateOptions(opts);
+ fail("Expected InvalidOptionsException");
+ } catch (SqoopOptions.InvalidOptionsException ioe) {
+ // expected.
+ }
+ }
+
public void testHCatImportWithAvroFile() throws Exception {
String[] args = {
"--connect",