SQOOP-3142: Restore fail messages removed in SQOOP-3092

(Boglarka Egyed via Attila Szabo)
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
index 1d67a2d..6f13fe2 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
@@ -468,6 +468,8 @@
     ImportTool tool = new ImportTool();
 
     thrown.expect(InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during Hive table creation with " +
+        "--as-parquetfile");
     tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
         null, true));
   }
@@ -509,6 +511,7 @@
     String [] vals = { "3.14159", "'foo'" };
 
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException on erroneous Hive exit status");
     runImportTest(TABLE_NAME, types, vals, "failingImport.q",
         getArgv(false, null), new ImportTool());
   }
@@ -641,6 +644,8 @@
     ImportTool tool = new ImportTool();
 
     thrown.expect(InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException with conflicting Hive delimiter " +
+        "drop/replace options");
     tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
         null, true));
   }
@@ -698,16 +703,22 @@
 
     // Test hive-import with the 1st args.
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " +
+        "as importing column");
     runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
         getArgv(false, moreArgs1), new ImportTool());
 
     // Test hive-import with the 2nd args.
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " +
+        "as importing column");
     runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
         getArgv(false, moreArgs2), new ImportTool());
 
     // Test create-hive-table with the 1st args.
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive table creation with partition key " +
+        "as importing column");
     runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
         getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
   }
diff --git a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
index 4db629f..6af12da 100644
--- a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
+++ b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
@@ -55,6 +55,7 @@
   // Test getHiveOctalCharCode and expect an IllegalArgumentException.
   private void expectExceptionInCharCode(int charCode) {
     thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException with out-of-range Hive delimiter");
     TableDefWriter.getHiveOctalCharCode(charCode);
   }
 
@@ -221,6 +222,7 @@
     writer.setColumnTypes(colTypes);
 
     thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException on non applied Hive type mapping");
     String createTable = writer.getCreateTableStmt();
   }
 
diff --git a/src/test/com/cloudera/sqoop/io/TestCodecMap.java b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
index 982b6ad..c78a5ae 100644
--- a/src/test/com/cloudera/sqoop/io/TestCodecMap.java
+++ b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
@@ -70,7 +70,9 @@
     verifyShortName("gzip", "org.apache.hadoop.io.compress.GzipCodec");
     verifyShortName("default", "org.apache.hadoop.io.compress.DefaultCodec");
 
-    thrown.expect(IOException.class);
+    thrown.expect(UnsupportedCodecException.class);
+    thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException with invalid codec name during getting " +
+        "short codec name");
     verifyShortName("NONE", "bogus");
   }
 
@@ -83,6 +85,7 @@
   @Test
   public void testUnrecognizedCodec() throws UnsupportedCodecException {
     thrown.expect(UnsupportedCodecException.class);
+    thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException with invalid codec name");
     CodecMap.getCodec("bogus", new Configuration());
   }
 
diff --git a/src/test/com/cloudera/sqoop/io/TestLobFile.java b/src/test/com/cloudera/sqoop/io/TestLobFile.java
index 7f8ca6d..029758c 100644
--- a/src/test/com/cloudera/sqoop/io/TestLobFile.java
+++ b/src/test/com/cloudera/sqoop/io/TestLobFile.java
@@ -143,6 +143,7 @@
     reader.close();
 
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException calling next after close");
     reader.next();
 
     // A second close shouldn't hurt anything. This should be a no-op.
@@ -590,6 +591,7 @@
     runCompressedTest(CodecMap.DEFLATE);
 
     thrown.expect(UnsupportedCodecException.class);
+    thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException for lzo");
     runCompressedTest(CodecMap.LZO);
   }
 
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index 71a371d..104effb 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -110,6 +110,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --target-dir");
     importTool.validateOptions(opts);
   }
 
@@ -128,6 +130,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --warehouse-dir");
     importTool.validateOptions(opts);
   }
 
@@ -145,6 +149,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --hive-import");
     importTool.validateOptions(opts);
   }
 
@@ -163,6 +169,8 @@
     SqoopOptions opts = parseExportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog export " +
+        "with --export-dir");
     exportTool.validateOptions(opts);
   }
 
@@ -180,6 +188,8 @@
     SqoopOptions opts = parseExportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog export " +
+        "with --as-parquetfile");
     exportTool.validateOptions(opts);
   }
 
@@ -197,6 +207,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --as-sequencefile");
     importTool.validateOptions(opts);
   }
 
@@ -217,6 +229,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --as-parquetfile");
     importTool.validateOptions(opts);
   }
 
@@ -234,6 +248,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --as-avrodatafile");
     importTool.validateOptions(opts);
   }
 
@@ -278,6 +294,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --drop-and-create-hcatalog-table");
     importTool.validateOptions(opts);
   }
 
@@ -331,6 +349,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with only HCatalog keys");
     importTool.validateOptions(opts);
   }
 
@@ -351,6 +371,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with mismatched keys and values");
     importTool.validateOptions(opts);
   }
 
@@ -371,6 +393,8 @@
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with empty keys and values");
     importTool.validateOptions(opts);
   }
 
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
index adb795e..911749f 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
@@ -146,6 +146,8 @@
     TextSplitter splitter = new TextSplitter();
 
     thrown.expect(ValidationException.class);
+    thrown.reportMissingExceptionWithMessage("Expected ValidationException during splitting " +
+        "when min string greater than max string");
     splitter.split(4, "Z", "A", "");
   }