SQOOP-2372: Import all tables as parquet will throw NPE
(Qian Xu via Abraham Elmahrek)
diff --git a/src/java/org/apache/sqoop/tool/CodeGenTool.java b/src/java/org/apache/sqoop/tool/CodeGenTool.java
index ab339ad..22ab030 100644
--- a/src/java/org/apache/sqoop/tool/CodeGenTool.java
+++ b/src/java/org/apache/sqoop/tool/CodeGenTool.java
@@ -93,8 +93,8 @@
if (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
String className = options.getClassName() != null ?
- options.getClassName() : options.getTableName();
- if (className.equalsIgnoreCase(options.getTableName())) {
+ options.getClassName() : tableName;
+ if (className.equalsIgnoreCase(tableName)) {
className = "codegen_" + className;
options.setClassName(className);
LOG.info("Will generate java class as " + options.getClassName());
diff --git a/src/test/com/cloudera/sqoop/TestAllTables.java b/src/test/com/cloudera/sqoop/TestAllTables.java
index 39e8941..f981024 100644
--- a/src/test/com/cloudera/sqoop/TestAllTables.java
+++ b/src/test/com/cloudera/sqoop/TestAllTables.java
@@ -23,6 +23,7 @@
import java.util.ArrayList;
import java.util.List;
+import org.apache.avro.generic.GenericRecord;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -34,6 +35,9 @@
import com.cloudera.sqoop.testutil.CommonArgs;
import com.cloudera.sqoop.testutil.ImportJobTestCase;
import com.cloudera.sqoop.tool.ImportAllTablesTool;
+import org.kitesdk.data.Dataset;
+import org.kitesdk.data.DatasetReader;
+import org.kitesdk.data.Datasets;
/**
* Test the --all-tables functionality that can import multiple tables.
@@ -44,13 +48,10 @@
* Create the argv to pass to Sqoop.
* @return the argv as an array of strings.
*/
- private String [] getArgv(boolean includeHadoopFlags, String[] excludeTables) {
+ private String [] getArgv(String[] extraArgs, String[] excludeTables) {
ArrayList<String> args = new ArrayList<String>();
- if (includeHadoopFlags) {
- CommonArgs.addHadoopFlags(args);
- }
-
+ CommonArgs.addHadoopFlags(args);
args.add("--warehouse-dir");
args.add(getWarehouseDir());
args.add("--connect");
@@ -63,6 +64,11 @@
args.add("--exclude-tables");
args.add(StringUtils.join(excludeTables, ","));
}
+ if (extraArgs != null) {
+ for (String arg : extraArgs) {
+ args.add(arg);
+ }
+ }
return args.toArray(new String[0]);
}
@@ -124,7 +130,7 @@
}
public void testMultiTableImport() throws IOException {
- String [] argv = getArgv(true, null);
+ String [] argv = getArgv(null, null);
runImport(new ImportAllTablesTool(), argv);
Path warehousePath = new Path(this.getWarehouseDir());
@@ -159,9 +165,38 @@
}
}
+ public void testMultiTableImportAsParquetFormat() throws IOException {
+ String [] argv = getArgv(new String[]{"--as-parquetfile"}, null);
+ runImport(new ImportAllTablesTool(), argv);
+
+ Path warehousePath = new Path(this.getWarehouseDir());
+ int i = 0;
+ for (String tableName : this.tableNames) {
+ Path tablePath = new Path(warehousePath, tableName);
+ Dataset dataset = Datasets.load("dataset:file:" + tablePath);
+
+ // dequeue the expected value for this table. This
+ // list has the same order as the tableNames list.
+ String expectedVal = Integer.toString(i++) + ","
+ + this.expectedStrings.get(0);
+ this.expectedStrings.remove(0);
+
+ DatasetReader<GenericRecord> reader = dataset.newReader();
+ try {
+ GenericRecord record = reader.next();
+ String line = record.get(0) + "," + record.get(1);
+ assertEquals("Table " + tableName + " expected a different string",
+ expectedVal, line);
+ assertFalse(reader.hasNext());
+ } finally {
+ reader.close();
+ }
+ }
+ }
+
public void testMultiTableImportWithExclude() throws IOException {
String exclude = this.tableNames.get(0);
- String [] argv = getArgv(true, new String[]{ exclude });
+ String [] argv = getArgv(null, new String[]{ exclude });
runImport(new ImportAllTablesTool(), argv);
Path warehousePath = new Path(this.getWarehouseDir());