SQOOP-3092: Clean up expected exception logic in
tests - Part II

(Boglarka Egyed via Attila Szabo)
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
index 26d087b..6f488ab 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
@@ -26,6 +26,7 @@
 import java.util.Arrays;
 import java.util.List;
 
+import junit.framework.JUnit4TestAdapter;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -34,6 +35,7 @@
 import org.apache.hadoop.fs.Path;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
 
 import com.cloudera.sqoop.SqoopOptions;
@@ -46,6 +48,9 @@
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.tool.SqoopTool;
 import org.apache.commons.cli.ParseException;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 import org.kitesdk.data.Dataset;
 import org.kitesdk.data.DatasetReader;
 import org.kitesdk.data.Datasets;
@@ -53,11 +58,15 @@
 /**
  * Test HiveImport capability after an import to HDFS.
  */
+@RunWith(JUnit4.class)
 public class TestHiveImport extends ImportJobTestCase {
 
   public static final Log LOG = LogFactory.getLog(
       TestHiveImport.class.getName());
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   @Before
   public void setUp() {
     super.setUp();
@@ -406,20 +415,16 @@
    * Test hive create and --as-parquetfile options validation.
    */
   @Test
-  public void testCreateHiveImportAsParquet() throws ParseException {
+  public void testCreateHiveImportAsParquet() throws ParseException, InvalidOptionsException {
     final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
     setCurTableName(TABLE_NAME);
     setNumCols(3);
     String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
     ImportTool tool = new ImportTool();
 
-    try {
-      tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
-          null, true));
-      fail("Expected InvalidOptionsException");
-    } catch (InvalidOptionsException ex) {
-      /* success */
-    }
+    thrown.expect(InvalidOptionsException.class);
+    tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
+        null, true));
   }
 
 
@@ -449,7 +454,7 @@
 
   /** If bin/hive returns an error exit status, we should get an IOException. */
   @Test
-  public void testHiveExitFails() {
+  public void testHiveExitFails() throws IOException {
     // The expected script is different than the one which would be generated
     // by this, so we expect an IOException out.
     final String TABLE_NAME = "FAILING_HIVE_IMPORT";
@@ -457,14 +462,10 @@
     setNumCols(2);
     String [] types = { "NUMERIC", "CHAR(64)" };
     String [] vals = { "3.14159", "'foo'" };
-    try {
-      runImportTest(TABLE_NAME, types, vals, "failingImport.q",
-          getArgv(false, null), new ImportTool());
-      // If we get here, then the run succeeded -- which is incorrect.
-      fail("FAILING_HIVE_IMPORT test should have thrown IOException");
-    } catch (IOException ioe) {
-      // expected; ok.
-    }
+
+    thrown.expect(IOException.class);
+    runImportTest(TABLE_NAME, types, vals, "failingImport.q",
+        getArgv(false, null), new ImportTool());
   }
 
   /** Test that we can set delimiters how we want them. */
@@ -585,7 +586,7 @@
    * Test hive drop and replace option validation.
    */
   @Test
-  public void testHiveDropAndReplaceOptionValidation() throws ParseException {
+  public void testHiveDropAndReplaceOptionValidation() throws ParseException, InvalidOptionsException {
     LOG.info("Testing conflicting Hive delimiter drop/replace options");
 
     setNumCols(3);
@@ -593,13 +594,10 @@
       "--"+BaseSqoopTool.HIVE_DROP_DELIMS_ARG, };
 
     ImportTool tool = new ImportTool();
-    try {
-      tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
-          null, true));
-      fail("Expected InvalidOptionsException");
-    } catch (InvalidOptionsException ex) {
-      /* success */
-    }
+
+    thrown.expect(InvalidOptionsException.class);
+    tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
+        null, true));
   }
 
   /**
@@ -627,7 +625,7 @@
    * IOException.
    * */
   @Test
-  public void testImportWithBadPartitionKey() {
+  public void testImportWithBadPartitionKey() throws IOException {
     final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
 
     LOG.info("Doing import of single row into " + TABLE_NAME + " table");
@@ -654,30 +652,23 @@
     };
 
     // Test hive-import with the 1st args.
-    try {
-      runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
-          getArgv(false, moreArgs1), new ImportTool());
-      fail(TABLE_NAME + " test should have thrown IOException");
-    } catch (IOException ioe) {
-      // expected; ok.
-    }
+    thrown.expect(IOException.class);
+    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
+        getArgv(false, moreArgs1), new ImportTool());
 
     // Test hive-import with the 2nd args.
-    try {
-      runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
-          getArgv(false, moreArgs2), new ImportTool());
-      fail(TABLE_NAME + " test should have thrown IOException");
-    } catch (IOException ioe) {
-      // expected; ok.
-    }
+    thrown.expect(IOException.class);
+    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
+        getArgv(false, moreArgs2), new ImportTool());
 
     // Test create-hive-table with the 1st args.
-    try {
-      runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
-          getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
-      fail(TABLE_NAME + " test should have thrown IOException");
-    } catch (IOException ioe) {
-      // expected; ok.
-    }
+    thrown.expect(IOException.class);
+    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
+        getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
+  }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestHiveImport.class);
   }
 }
diff --git a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
index 55e572e..8d6b9d5 100644
--- a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
+++ b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
@@ -20,6 +20,7 @@
 
 import java.util.Map;
 
+import junit.framework.JUnit4TestAdapter;
 import junit.framework.TestCase;
 
 import org.apache.commons.logging.Log;
@@ -30,26 +31,33 @@
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
 import java.sql.Types;
 
 /**
  * Test Hive DDL statement generation.
  */
+@RunWith(JUnit4.class)
 public class TestTableDefWriter extends TestCase {
 
   public static final Log LOG = LogFactory.getLog(
       TestTableDefWriter.class.getName());
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   // Test getHiveOctalCharCode and expect an IllegalArgumentException.
   private void expectExceptionInCharCode(int charCode) {
-    try {
-      TableDefWriter.getHiveOctalCharCode(charCode);
-      fail("Expected IllegalArgumentException");
-    } catch (IllegalArgumentException iae) {
-      // Expected; ok.
-    }
+    thrown.expect(IllegalArgumentException.class);
+    TableDefWriter.getHiveOctalCharCode(charCode);
   }
 
+  @Test
   public void testHiveOctalCharCode() {
     assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
     assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
@@ -61,6 +69,7 @@
     expectExceptionInCharCode(254);
   }
 
+  @Test
   public void testDifferentTableNames() throws Exception {
     Configuration conf = new Configuration();
     SqoopOptions options = new SqoopOptions();
@@ -83,6 +92,7 @@
     assertTrue(loadData.indexOf("/inputTable'") != -1);
   }
 
+  @Test
   public void testDifferentTargetDirs() throws Exception {
     String targetDir = "targetDir";
     String inputTable = "inputTable";
@@ -111,6 +121,7 @@
     assertTrue(loadData.indexOf("/" + targetDir + "'") != -1);
   }
 
+  @Test
   public void testPartitions() throws Exception {
     String[] args = {
         "--hive-partition-key", "ds",
@@ -137,6 +148,7 @@
     assertTrue(loadData.endsWith(" PARTITION (ds='20110413')"));
   }
 
+  @Test
   public void testLzoSplitting() throws Exception {
     String[] args = {
         "--compress",
@@ -165,6 +177,7 @@
         createTable);
   }
 
+  @Test
   public void testUserMapping() throws Exception {
     String[] args = {
         "--map-column-hive", "id=STRING,value=INTEGER",
@@ -191,6 +204,7 @@
     assertFalse(createTable.contains("`value` STRING"));
   }
 
+  @Test
   public void testUserMappingFailWhenCantBeApplied() throws Exception {
     String[] args = {
         "--map-column-hive", "id=STRING,value=INTEGER",
@@ -205,14 +219,11 @@
     colTypes.put("id", Types.INTEGER);
     writer.setColumnTypes(colTypes);
 
-    try {
-      String createTable = writer.getCreateTableStmt();
-      fail("Expected failure on non applied mapping.");
-    } catch(IllegalArgumentException iae) {
-      // Expected, ok
-    }
+    thrown.expect(IllegalArgumentException.class);
+    String createTable = writer.getCreateTableStmt();
   }
 
+  @Test
   public void testHiveDatabase() throws Exception {
     String[] args = {
         "--hive-database", "db",
@@ -234,4 +245,9 @@
     assertNotNull(loadStmt);
     assertTrue(createTable.contains("`db`.`outputTable`"));
   }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestTableDefWriter.class);
+  }
 }
diff --git a/src/test/com/cloudera/sqoop/io/TestCodecMap.java b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
index 925c544..867f1d5 100644
--- a/src/test/com/cloudera/sqoop/io/TestCodecMap.java
+++ b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
@@ -20,28 +20,40 @@
 
 import java.io.IOException;
 
+import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 
 import junit.framework.TestCase;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test looking up codecs by name.
  */
+@RunWith(JUnit4.class)
 public class TestCodecMap extends TestCase {
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   private void verifyCodec(Class<?> c, String codecName)
       throws UnsupportedCodecException {
     CompressionCodec codec = CodecMap.getCodec(codecName, new Configuration());
     assertEquals(codec.getClass(), c);
   }
 
+  @Test
   public void testGetCodecNames() {
     // gzip is picked up from Hadoop defaults
     assertTrue(CodecMap.getCodecNames().contains("gzip"));
   }
 
+  @Test
   public void testGetCodec() throws IOException {
     verifyCodec(GzipCodec.class, "gzip");
     verifyCodec(GzipCodec.class, "Gzip");
@@ -52,15 +64,13 @@
     verifyCodec(GzipCodec.class, "org.apache.hadoop.io.compress.GzipCodec");
   }
 
+  @Test
   public void testGetShortName() throws UnsupportedCodecException {
     verifyShortName("gzip", "org.apache.hadoop.io.compress.GzipCodec");
     verifyShortName("default", "org.apache.hadoop.io.compress.DefaultCodec");
-    try {
-      verifyShortName("NONE", "bogus");
-      fail("Expected IOException");
-    } catch (UnsupportedCodecException e) {
-      // Exception is expected
-    }
+
+    thrown.expect(IOException.class);
+    verifyShortName("NONE", "bogus");
   }
 
   private void verifyShortName(String expected, String codecName)
@@ -69,12 +79,14 @@
       CodecMap.getCodecShortNameByName(codecName, new Configuration()));
   }
 
-  public void testUnrecognizedCodec() {
-    try {
-      CodecMap.getCodec("bogus", new Configuration());
-      fail("'bogus' codec should throw exception");
-    } catch (UnsupportedCodecException e) {
-      // expected
-    }
+  @Test
+  public void testUnrecognizedCodec() throws UnsupportedCodecException {
+    thrown.expect(UnsupportedCodecException.class);
+    CodecMap.getCodec("bogus", new Configuration());
+  }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestCodecMap.class);
   }
 }
diff --git a/src/test/com/cloudera/sqoop/io/TestLobFile.java b/src/test/com/cloudera/sqoop/io/TestLobFile.java
index 129b03a..d056750 100644
--- a/src/test/com/cloudera/sqoop/io/TestLobFile.java
+++ b/src/test/com/cloudera/sqoop/io/TestLobFile.java
@@ -27,6 +27,7 @@
 import java.io.Writer;
 import java.nio.CharBuffer;
 
+import junit.framework.JUnit4TestAdapter;
 import junit.framework.TestCase;
 
 import org.apache.commons.logging.Log;
@@ -34,10 +35,17 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test the LobFile reader/writer implementation.
  */
+@RunWith(JUnit4.class)
 public class TestLobFile extends TestCase {
 
   public static final Log LOG = LogFactory.getLog(
@@ -57,6 +65,10 @@
   private Configuration conf;
   private FileSystem fs;
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Before
   public void setUp() throws Exception {
     conf = new Configuration();
     conf.set("fs.default.name", "file:///");
@@ -130,12 +142,8 @@
 
     reader.close();
 
-    try {
-      reader.next();
-      fail("Expected IOException calling next after close");
-    } catch (IOException ioe) {
-      // expected this.
-    }
+    thrown.expect(IOException.class);
+    reader.next();
 
     // A second close shouldn't hurt anything. This should be a no-op.
     reader.close();
@@ -148,15 +156,18 @@
     fs.delete(p, false);
   }
 
+  @Test
   public void testEmptyRecord() throws Exception {
     runClobFileTest(new Path(TEMP_BASE_DIR, "empty.lob"), null);
   }
 
+  @Test
   public void testSingleRecord() throws Exception {
     runClobFileTest(new Path(TEMP_BASE_DIR, "single.lob"),
         null, "this is a single record!");
   }
 
+  @Test
   public void testMultiRecords() throws Exception {
     runClobFileTest(new Path(TEMP_BASE_DIR, "multi.lob"),
         CodecMap.NONE,
@@ -165,6 +176,7 @@
         "yet one more record graces this file.");
   }
 
+  @Test
   public void testMultiIndexSegments() throws Exception {
     // Test that we can use multiple IndexSegments.
     runClobFileTest(new Path(TEMP_BASE_DIR, "multi-index.lob"),
@@ -231,6 +243,7 @@
     assertFalse(reader.isRecordAvailable());
   }
 
+  @Test
   public void testVeryShortRead() throws Exception {
     // Read only a small fraction of a record, ensure that we can
     // read the next record, even when we've left more than a 16-byte
@@ -250,6 +263,7 @@
 
   }
 
+  @Test
   public void testIncompleteOverread() throws Exception {
     // Read most of the first record so that we partially consume the
     // next record start mark; make sure we realign properly.
@@ -266,6 +280,7 @@
         RECORD3);
   }
 
+  @Test
   public void testSeekToRecord() throws Exception {
     // Seek past the first two records and read the third.
 
@@ -342,6 +357,7 @@
     assertEquals(expectedRecord, finalRecord);
   }
 
+  @Test
   public void testManySeeks() throws Exception {
     // Test that we can do gymnastics with seeking between records.
 
@@ -505,6 +521,7 @@
     reader.close();
   }
 
+  @Test
   public void testBinaryRecords() throws Exception {
     // Write a BLOB file and read it all back.
 
@@ -523,6 +540,7 @@
     verifyBlobRecords(p, NUM_RECORDS, RECORD_LEN, RECORD_LEN);
   }
 
+  @Test
   public void testOverLengthBinaryRecord() throws Exception {
     // Write a record with a declared length shorter than the
     // actual length, and read it back.
@@ -556,6 +574,7 @@
     runClobFileTest(p, codec, records);
   }
 
+  @Test
   public void testCompressedFile() throws Exception {
     // Test all the various compression codecs.
 
@@ -564,15 +583,13 @@
     runCompressedTest(CodecMap.NONE);
     runCompressedTest(CodecMap.DEFLATE);
 
-    try {
-      // We expect this to throw UnsupportedCodecException
-      // because this class is not included in our package.
-      runCompressedTest(CodecMap.LZO);
-      fail("Expected unsupported codec exception for lzo");
-    } catch (UnsupportedCodecException uce) {
-      // We pass.
-      LOG.info("Got unsupported codec exception for lzo; expected -- good.");
-    }
+    thrown.expect(UnsupportedCodecException.class);
+    runCompressedTest(CodecMap.LZO);
+  }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestLobFile.class);
   }
 }
 
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index 54b4552..d3337c7 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -18,6 +18,7 @@
 
 package org.apache.sqoop.hcat;
 
+import junit.framework.JUnit4TestAdapter;
 import junit.framework.TestCase;
 
 import org.junit.Before;
@@ -25,14 +26,23 @@
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.tool.ExportTool;
 import com.cloudera.sqoop.tool.ImportTool;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test basic HCatalog related features.
  */
+@RunWith(JUnit4.class)
 public class TestHCatalogBasic extends TestCase {
   private static ImportTool importTool;
   private static ExportTool exportTool;
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   @Before
   @Override
   public void setUp() {
@@ -49,6 +59,7 @@
     return opts;
   }
 
+  @Test
   public void testHCatalogHomeWithImport() throws Exception {
     String[] args = {
       "--hcatalog-home",
@@ -58,6 +69,7 @@
     SqoopOptions opts = parseImportArgs(args);
   }
 
+  @Test
   public void testHCatalogHomeWithExport() throws Exception {
     String[] args = {
       "--hcatalog-home",
@@ -67,6 +79,7 @@
     SqoopOptions opts = parseExportArgs(args);
   }
 
+  @Test
   public void testHCatalogImport() throws Exception {
     String[] args = {
       "--hcatalog-table",
@@ -76,6 +89,7 @@
     SqoopOptions opts = parseImportArgs(args);
   }
 
+  @Test
   public void testHCatalogExport() throws Exception {
     String[] args = {
       "--hcatalog-table",
@@ -85,6 +99,7 @@
     SqoopOptions opts = parseExportArgs(args);
   }
 
+  @Test
   public void testHCatImportWithTargetDir() throws Exception {
     String[] args = {
       "--connect",
@@ -96,15 +111,13 @@
       "--target-dir",
       "/target/dir",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithWarehouseDir() throws Exception {
     String[] args = {
       "--connect",
@@ -116,15 +129,13 @@
       "--warehouse-dir",
       "/target/dir",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithHiveImport() throws Exception {
     String[] args = {
       "--connect",
@@ -135,15 +146,13 @@
       "table",
       "--hive-import",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatExportWithExportDir() throws Exception {
     String[] args = {
       "--connect",
@@ -155,15 +164,13 @@
       "--export-dir",
       "/export/dir",
     };
-    try {
-      SqoopOptions opts = parseExportArgs(args);
-      exportTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseExportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    exportTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatExportWithParquetFile() throws Exception {
     String[] args = {
       "--connect",
@@ -174,15 +181,13 @@
       "table",
       "--as-parquetfile",
     };
-    try {
-      SqoopOptions opts = parseExportArgs(args);
-      exportTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseExportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    exportTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithSequenceFile() throws Exception {
     String[] args = {
       "--connect",
@@ -193,15 +198,13 @@
       "table",
       "--as-sequencefile",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithParquetFile() throws Exception {
     String[] args = {
       "--hcatalog-table",
@@ -215,15 +218,13 @@
       "table",
       "--as-parquetfile",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithAvroFile() throws Exception {
     String[] args = {
       "--connect",
@@ -234,14 +235,13 @@
       "table",
       "--as-avrodatafile",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
+
+  @Test
   public void testHCatImportWithCreateTable() throws Exception {
     String[] args = {
       "--hcatalog-table",
@@ -251,6 +251,7 @@
     SqoopOptions opts = parseImportArgs(args);
   }
 
+  @Test
   public void testHCatImportWithDropAndCreateTable() throws Exception {
     String[] args = {
             "--connect",
@@ -265,6 +266,7 @@
     importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithCreateTableAndDropAndCreateTable()
     throws Exception {
     String[] args = {
@@ -278,14 +280,12 @@
             "--drop-and-create-hcatalog-table",
     };
     SqoopOptions opts = parseImportArgs(args);
-    try {
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithStorageStanza() throws Exception {
     String[] args = {
       "--hcatalog-table",
@@ -296,6 +296,7 @@
     SqoopOptions opts = parseImportArgs(args);
   }
 
+  @Test
   public void testHCatImportWithDatabase() throws Exception {
     String[] args = {
       "--hcatalog-table",
@@ -306,6 +307,7 @@
     SqoopOptions opts = parseImportArgs(args);
   }
 
+  @Test
   public void testHCatImportWithPartKeys() throws Exception {
     String[] args = {
       "--hcatalog-table",
@@ -318,6 +320,7 @@
     SqoopOptions opts = parseImportArgs(args);
   }
 
+  @Test
   public void testHCatImportWithOnlyHCatKeys() throws Exception {
     String[] args = {
       "--connect",
@@ -329,15 +332,13 @@
       "--hcatalog-partition-keys",
       "k1,k2",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
 
+  @Test
   public void testHCatImportWithMismatchedKeysAndVals() throws Exception {
     String[] args = {
       "--connect",
@@ -351,14 +352,13 @@
       "--hcatalog-partition-values",
       "v1",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
+
+  @Test
   public void testHCatImportWithEmptyKeysAndVals() throws Exception {
     String[] args = {
       "--connect",
@@ -372,14 +372,13 @@
       "--hcatalog-partition-values",
       ",v1",
     };
-    try {
-      SqoopOptions opts = parseImportArgs(args);
-      importTool.validateOptions(opts);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    SqoopOptions opts = parseImportArgs(args);
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    importTool.validateOptions(opts);
   }
+
+  @Test
   public void testHCatImportWithBothHCatAndHivePartOptions() throws Exception {
     String[] args = {
       "--connect",
@@ -400,4 +399,9 @@
     SqoopOptions opts = parseImportArgs(args);
     importTool.validateOptions(opts);
   }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestHCatalogBasic.class);
+  }
 }
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
index 5cfb0a5..c402a54 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
@@ -23,15 +23,24 @@
 
 import com.cloudera.sqoop.mapreduce.db.TextSplitter;
 
+import junit.framework.JUnit4TestAdapter;
 import junit.framework.TestCase;
-import junit.framework.Test;
 import org.apache.sqoop.validation.ValidationException;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test that the TextSplitter implementation creates a sane set of splits.
  */
+@RunWith(JUnit4.class)
 public class TestTextSplitter extends TestCase {
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   public String formatArray(Object [] ar) {
     StringBuilder sb = new StringBuilder();
     sb.append("[");
@@ -70,30 +79,35 @@
     }
   }
 
+  @Test
   public void testStringConvertEmpty() {
     TextSplitter splitter = new TextSplitter();
     BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
     assertEquals(BigDecimal.ZERO, emptyBigDec);
   }
 
+  @Test
   public void testBigDecConvertEmpty() {
     TextSplitter splitter = new TextSplitter();
     String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
     assertEquals("", emptyStr);
   }
 
+  @Test
   public void testConvertA() {
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
     assertEquals("A", out);
   }
 
+  @Test
   public void testConvertZ() {
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
     assertEquals("Z", out);
   }
 
+  @Test
   public void testConvertThreeChars() {
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(
@@ -101,6 +115,7 @@
     assertEquals("abc", out);
   }
 
+  @Test
   public void testConvertStr() {
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(
@@ -108,6 +123,7 @@
     assertEquals("big str", out);
   }
 
+  @Test
   public void testConvertChomped() {
     TextSplitter splitter = new TextSplitter();
     String out = splitter.bigDecimalToString(
@@ -115,6 +131,7 @@
     assertEquals("AVeryLon", out);
   }
 
+  @Test
   public void testAlphabetSplit() throws SQLException, ValidationException {
     // This should give us 25 splits, one per letter.
     TextSplitter splitter = new TextSplitter();
@@ -125,17 +142,15 @@
     assertArrayEquals(expected, splits.toArray(new String [0]));
   }
 
-    public void testAlphabetSplitWhenMinStringGreaterThanMaxString() throws SQLException {
-        TextSplitter splitter = new TextSplitter();
-        try {
-            splitter.split(4, "Z", "A", "");
-            fail();
-        } catch (ValidationException e) {
-            // expected
-            assertTrue(true);
-        }
-    }
+  @Test
+  public void testAlphabetSplitWhenMinStringGreaterThanMaxString() throws SQLException, ValidationException {
+    TextSplitter splitter = new TextSplitter();
 
+    thrown.expect(ValidationException.class);
+    splitter.split(4, "Z", "A", "");
+  }
+
+  @Test
   public void testCommonPrefix() throws SQLException, ValidationException {
     // Splits between 'Hand' and 'Hardy'
     TextSplitter splitter = new TextSplitter();
@@ -148,6 +163,7 @@
     assertEquals(6, splits.size());
   }
 
+  @Test
   public void testNChar() throws SQLException {
     // Splits between 'Hand' and 'Hardy'
     NTextSplitter splitter = new NTextSplitter();
@@ -156,4 +172,9 @@
     assertEquals(false, splitter2.isUseNCharStrings());
   }
 
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestTextSplitter.class);
+  }
+
 }
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java b/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
index 043130e..32ebf45 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
@@ -20,6 +20,7 @@
 import java.sql.ResultSet;
 import java.util.List;
 
+import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
@@ -29,24 +30,33 @@
 import com.cloudera.sqoop.testutil.MockResultSet;
 
 import junit.framework.TestCase;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
+@RunWith(JUnit4.class)
 public class TextSplitterHadoopConfIntegrationTest extends TestCase {
   private static final String TEXT_COL_NAME = "text_col_name";
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Test
   public void testDefaultValueOfUnsetBooleanParam() throws Exception {
     Configuration conf = Job.getInstance().getConfiguration();
     TextSplitter splitter = new TextSplitter();
     ResultSet rs = new MockResultSet();
-    try {
-      splitter.split(conf, rs, TEXT_COL_NAME);
-      fail();
-    } catch (ValidationException e) {
-      // expected to throw ValidationException with the a message about the
-      // "i-know-what-i-am-doing" prop
-      assertTrue(e.getMessage().contains(TextSplitter.ALLOW_TEXT_SPLITTER_PROPERTY));
-    }
+
+    String containedByExpectedExceptionMessage = TextSplitter.ALLOW_TEXT_SPLITTER_PROPERTY;
+
+    thrown.expect(ValidationException.class);
+    thrown.expectMessage(containedByExpectedExceptionMessage);
+    splitter.split(conf, rs, TEXT_COL_NAME);
   }
 
+  @Test
   public void testBooleanParamValue() throws Exception {
     Configuration conf = Job.getInstance().getConfiguration();
     conf.set(TextSplitter.ALLOW_TEXT_SPLITTER_PROPERTY, "true");
@@ -55,5 +65,10 @@
     List<InputSplit> splits = splitter.split(conf, rs, TEXT_COL_NAME);
     assertFalse(splits.isEmpty());
   }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TextSplitterHadoopConfIntegrationTest.class);
+  }
 }