SQOOP-3091: Clean up expected exception logic in
tests - Part I

(Boglarka Egyed via Attila Szabo)
diff --git a/src/test/com/cloudera/sqoop/TestAvroExport.java b/src/test/com/cloudera/sqoop/TestAvroExport.java
index b513138..750887a 100644
--- a/src/test/com/cloudera/sqoop/TestAvroExport.java
+++ b/src/test/com/cloudera/sqoop/TestAvroExport.java
@@ -37,6 +37,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import junit.framework.JUnit4TestAdapter;
 import org.apache.avro.Conversions;
 import org.apache.avro.LogicalTypes;
 import org.apache.avro.Schema;
@@ -49,12 +50,22 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test that we can export Avro Data Files from HDFS into databases.
  */
+
+@RunWith(JUnit4.class)
 public class TestAvroExport extends ExportJobTestCase {
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   /**
    * @return an argv for the CodeGenTool to use when creating tables to export.
    */
@@ -342,6 +353,7 @@
     assertColValForRowId(maxId, colName, expectedMax);
   }
 
+  @Test
   public void testSupportedAvroTypes() throws IOException, SQLException {
     GenericData.get().addLogicalTypeConversion(new Conversions.DecimalConversion());
 
@@ -383,6 +395,7 @@
     }
   }
 
+  @Test
   public void testPathPatternInExportDir() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
 
@@ -403,6 +416,7 @@
     verifyExport(TOTAL_RECORDS);
   }
 
+  @Test
   public void testNullableField() throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1 * 10;
@@ -421,6 +435,7 @@
     assertColMinAndMax(forIdx(1), gen1);
   }
 
+  @Test
   public void testAvroRecordsNotSupported() throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1;
@@ -434,15 +449,12 @@
     ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
     createAvroFile(0, TOTAL_RECORDS,  gen);
     createTable(gen);
-    try {
-      runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-      fail("Avro records can not be exported.");
-    } catch (Exception e) {
-      // expected
-      assertTrue(true);
-    }
+
+    thrown.expect(Exception.class);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
   }
 
+  @Test
   public void testMissingDatabaseFields() throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1;
@@ -458,6 +470,7 @@
   }
 
   // Test Case for Issue [SQOOP-2846]
+  @Test
   public void testAvroWithUpsert() throws IOException, SQLException {
     String[] argv = { "--update-key", "ID", "--update-mode", "allowinsert" };
     final int TOTAL_RECORDS = 2;
@@ -465,15 +478,13 @@
     // Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
     createAvroFile(0, TOTAL_RECORDS, null);
     createTableWithInsert();
-    try {
-      runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    } catch (Exception e) {
-      // expected
-      assertTrue(true);
-    }
+
+    thrown.expect(Exception.class);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
   }
 
   // Test Case for Issue [SQOOP-2846]
+  @Test
   public void testAvroWithUpdateKey() throws IOException, SQLException {
     String[] argv = { "--update-key", "ID" };
     final int TOTAL_RECORDS = 1;
@@ -484,6 +495,8 @@
     runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
     verifyExport(getMsgPrefix() + "0");
   }
+
+  @Test
   public void testMissingAvroFields()  throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1;
@@ -492,15 +505,12 @@
     ColumnGenerator gen = colGenerator(null, null, null, "VARCHAR(64)");
     createAvroFile(0, TOTAL_RECORDS, gen);
     createTable(gen);
-    try {
-      runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-      fail("Missing Avro field.");
-    } catch (Exception e) {
-      // expected
-      assertTrue(true);
-    }
+
+    thrown.expect(Exception.class);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
   }
 
+  @Test
   public void testSpecifiedColumnsAsAvroFields()  throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
     ColumnGenerator[] gens = new ColumnGenerator[] {
@@ -523,4 +533,9 @@
     assertColValForRowId(9, "col3", null);
   }
 
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestAvroExport.class);
+  }
+
 }
diff --git a/src/test/com/cloudera/sqoop/TestConnFactory.java b/src/test/com/cloudera/sqoop/TestConnFactory.java
index 59c3455..ff41706 100644
--- a/src/test/com/cloudera/sqoop/TestConnFactory.java
+++ b/src/test/com/cloudera/sqoop/TestConnFactory.java
@@ -24,6 +24,7 @@
 import java.util.List;
 import java.util.Map;
 
+import junit.framework.JUnit4TestAdapter;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
@@ -33,13 +34,23 @@
 import com.cloudera.sqoop.manager.ManagerFactory;
 import com.cloudera.sqoop.metastore.JobData;
 import com.cloudera.sqoop.tool.ImportTool;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test the ConnFactory implementation and its ability to delegate to multiple
  * different ManagerFactory implementations using reflection.
  */
+@RunWith(JUnit4.class)
 public class TestConnFactory extends TestCase {
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Test
   public void testCustomFactory() throws IOException {
     Configuration conf = new Configuration();
     conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY,
@@ -52,20 +63,19 @@
     assertTrue("Expected a DummyManager", manager instanceof DummyManager);
   }
 
-  public void testExceptionForNoManager() {
+  @Test
+  public void testExceptionForNoManager() throws IOException {
     Configuration conf = new Configuration();
     conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, EmptyFactory.class.getName());
 
     ConnFactory factory = new ConnFactory(conf);
-    try {
-      factory.getManager(
-          new JobData(new SqoopOptions(), new ImportTool()));
-      fail("factory.getManager() expected to throw IOException");
-    } catch (IOException ioe) {
-      // Expected this. Test passes.
-    }
+
+    thrown.expect(IOException.class);
+    factory.getManager(
+        new JobData(new SqoopOptions(), new ImportTool()));
   }
 
+  @Test
   public void testMultipleManagers() throws IOException {
     Configuration conf = new Configuration();
     // The AlwaysDummyFactory is second in this list. Nevertheless, since
@@ -185,4 +195,9 @@
     public void release() {
     }
   }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestConnFactory.class);
+  }
 }
diff --git a/src/test/com/cloudera/sqoop/TestExportUpdate.java b/src/test/com/cloudera/sqoop/TestExportUpdate.java
index 95d7b6a..33a176a 100644
--- a/src/test/com/cloudera/sqoop/TestExportUpdate.java
+++ b/src/test/com/cloudera/sqoop/TestExportUpdate.java
@@ -27,6 +27,7 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 
+import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,13 +35,22 @@
 
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test that we can update a copy of data in the database,
  * based on newer data in HDFS.
  */
+@RunWith(JUnit4.class)
 public class TestExportUpdate extends ExportJobTestCase {
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   @Override
   protected String getTablePrefix() {
     return "UPDATE_TABLE_";
@@ -405,6 +415,7 @@
         "--update-key", updateCol));
   }
 
+  @Test
   public void testBasicUpdate() throws Exception {
     // Test that we can do a single-task single-file update.
     // This updates the entire database.
@@ -424,6 +435,7 @@
    * and then modifies a subset of the rows via update.
    * @throws Exception
    */
+  @Test
   public void testMultiKeyUpdate() throws Exception {
     createMultiKeyTable(3);
 
@@ -461,6 +473,7 @@
    * and then modifies a subset of the rows via update.
    * @throws Exception
    */
+  @Test
   public void testMultiKeyUpdateMultipleFilesNoUpdate() throws Exception {
     createMultiKeyTable(4);
 
@@ -504,6 +517,7 @@
    * and then modifies a subset of the rows via update.
    * @throws Exception
    */
+  @Test
   public void testMultiKeyUpdateMultipleFilesFullUpdate() throws Exception {
     createMultiKeyTable(4);
 
@@ -542,7 +556,7 @@
         new int[] { 3, 2 }, 3, 2, "3bar2");
   }
 
-
+  @Test
   public void testEmptyTable() throws Exception {
     // Test that an empty table will "accept" updates that modify
     // no rows; no new data is injected into the database.
@@ -552,6 +566,7 @@
     verifyRowCount(0);
   }
 
+  @Test
   public void testEmptyFiles() throws Exception {
     // An empty input file results in no changes to a db table.
     populateDatabase(10);
@@ -564,6 +579,7 @@
     verifyRow("A", "9", "9", "foo9", "9");
   }
 
+  @Test
   public void testStringCol() throws Exception {
     // Test that we can do modifications based on the string "B" column.
     populateDatabase(10);
@@ -575,6 +591,7 @@
     verifyRow("B", "'foo9'", "18", "foo9", "18");
   }
 
+  @Test
   public void testLastCol() throws Exception {
     // Test that we can do modifications based on the third int column.
     populateDatabase(10);
@@ -586,6 +603,7 @@
     verifyRow("C", "9", "18", "foo18", "9");
   }
 
+  @Test
   public void testMultiMaps() throws Exception {
     // Test that we can handle multiple map tasks.
     populateDatabase(20);
@@ -600,6 +618,7 @@
     verifyRow("A", "19", "19", "foo38", "38");
   }
 
+  @Test
   public void testSubsetUpdate() throws Exception {
     // Update only a few rows in the middle of the table.
     populateDatabase(10);
@@ -619,6 +638,7 @@
     verifyRow("A", "7", "7", "foo14", "14");
   }
 
+  @Test
   public void testSubsetUpdate2() throws Exception {
     // Update only some of the rows in the db. Also include some
     // updates that do not affect actual rows in the table.
@@ -647,6 +667,7 @@
    *
    * @throws Exception
    */
+  @Test
   public void testUpdateColumnSubset() throws Exception {
     populateDatabase(4);
     createUpdateFiles(1, 3, 0);
@@ -675,15 +696,18 @@
    *
    * @throws Exception
    */
+  @Test
   public void testUpdateColumnNotInColumns() throws Exception {
     populateDatabase(1);
-    try {
-      runExport(getArgv(true, 2, 2, "-m", "1",
+
+    thrown.expect(IOException.class);
+    runExport(getArgv(true, 2, 2, "-m", "1",
         "--update-key", "A", "--columns", "B"));
-      fail("Expected IOException");
-    } catch (IOException e) {
-      assertTrue(true);
-    }
+  }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestExportUpdate.class);
   }
 
 }
diff --git a/src/test/com/cloudera/sqoop/TestParquetExport.java b/src/test/com/cloudera/sqoop/TestParquetExport.java
index c6ddef6..94122f6 100644
--- a/src/test/com/cloudera/sqoop/TestParquetExport.java
+++ b/src/test/com/cloudera/sqoop/TestParquetExport.java
@@ -20,10 +20,16 @@
 
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
 import com.google.common.collect.Lists;
+import junit.framework.JUnit4TestAdapter;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericRecord;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 import org.kitesdk.data.*;
 
 import java.io.IOException;
@@ -41,8 +47,12 @@
 /**
  * Test that we can export Parquet Data Files from HDFS into databases.
  */
+@RunWith(JUnit4.class)
 public class TestParquetExport extends ExportJobTestCase {
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   /**
    * @return an argv for the CodeGenTool to use when creating tables to export.
    */
@@ -318,6 +328,7 @@
     assertColValForRowId(maxId, colName, expectedMax);
   }
 
+  @Test
   public void testSupportedParquetTypes() throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1 * 10;
@@ -351,6 +362,7 @@
     }
   }
 
+  @Test
   public void testNullableField() throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1 * 10;
@@ -369,6 +381,7 @@
     assertColMinAndMax(forIdx(1), gen1);
   }
 
+  @Test
   public void testParquetRecordsNotSupported() throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1;
@@ -382,15 +395,12 @@
     ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
     createParquetFile(0, TOTAL_RECORDS,  gen);
     createTable(gen);
-    try {
-      runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-      fail("Parquet records can not be exported.");
-    } catch (Exception e) {
-      // expected
-      assertTrue(true);
-    }
+
+    thrown.expect(Exception.class);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
   }
 
+  @Test
   public void testMissingDatabaseFields() throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1;
@@ -405,6 +415,7 @@
     verifyExport(TOTAL_RECORDS);
   }
 
+  @Test
   public void testParquetWithUpdateKey() throws IOException, SQLException {
     String[] argv = { "--update-key", "ID" };
     final int TOTAL_RECORDS = 1;
@@ -415,6 +426,7 @@
   }
 
   // Test Case for Issue [SQOOP-2846]
+  @Test
   public void testParquetWithUpsert() throws IOException, SQLException {
     String[] argv = { "--update-key", "ID", "--update-mode", "allowinsert" };
     final int TOTAL_RECORDS = 2;
@@ -422,13 +434,12 @@
     // Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
     createParquetFile(0, TOTAL_RECORDS, null);
     createTableWithInsert();
-    try {
-      runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    } catch (Exception e) {
-      // expected
-      assertTrue(true);
-    }
+
+    thrown.expect(Exception.class);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
   }
+
+  @Test
   public void testMissingParquetFields()  throws IOException, SQLException {
     String[] argv = {};
     final int TOTAL_RECORDS = 1;
@@ -437,13 +448,14 @@
     ColumnGenerator gen = colGenerator(null, null, null, "VARCHAR(64)");
     createParquetFile(0, TOTAL_RECORDS, gen);
     createTable(gen);
-    try {
-      runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-      fail("Missing Parquet field.");
-    } catch (Exception e) {
-      // expected
-      assertTrue(true);
-    }
+
+    thrown.expect(Exception.class);
+    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
+  }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestParquetExport.class);
   }
 
 }
diff --git a/src/test/com/cloudera/sqoop/TestSqoopOptions.java b/src/test/com/cloudera/sqoop/TestSqoopOptions.java
index d95f904..84dc7b0 100644
--- a/src/test/com/cloudera/sqoop/TestSqoopOptions.java
+++ b/src/test/com/cloudera/sqoop/TestSqoopOptions.java
@@ -21,6 +21,7 @@
 import java.util.Properties;
 
 import com.cloudera.sqoop.tool.BaseSqoopTool;
+import junit.framework.JUnit4TestAdapter;
 import junit.framework.TestCase;
 
 import org.apache.commons.lang.ArrayUtils;
@@ -31,7 +32,9 @@
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
 import org.junit.Before;
 import org.junit.After;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
@@ -48,6 +51,9 @@
 
   private Properties originalSystemProperties;
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   @Before
   public void setup() {
    originalSystemProperties = System.getProperties();
@@ -66,22 +72,14 @@
 
   @Test
   public void testEmptyString() throws Exception {
-    try {
-      SqoopOptions.toChar("");
-      fail("Expected exception");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expect this.
-    }
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    SqoopOptions.toChar("");
   }
 
   @Test
   public void testNullString() throws Exception {
-    try {
-      SqoopOptions.toChar(null);
-      fail("Expected exception");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expect this.
-    }
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    SqoopOptions.toChar(null);
   }
 
   @Test
@@ -134,22 +132,14 @@
 
   @Test
   public void testUnknownEscape1() throws Exception {
-    try {
-      SqoopOptions.toChar("\\Q");
-      fail("Expected exception");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expect this.
-    }
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    SqoopOptions.toChar("\\Q");
   }
 
   @Test
   public void testUnknownEscape2() throws Exception {
-    try {
-      SqoopOptions.toChar("\\nn");
-      fail("Expected exception");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expect this.
-    }
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    SqoopOptions.toChar("\\nn");
   }
 
   @Test
@@ -184,22 +174,14 @@
 
   @Test
   public void testErrOctalChar() throws Exception {
-    try {
-      SqoopOptions.toChar("\\095");
-      fail("Expected exception");
-    } catch (NumberFormatException nfe) {
-      // expected.
-    }
+    thrown.expect(NumberFormatException.class);
+    SqoopOptions.toChar("\\095");
   }
 
   @Test
   public void testErrHexChar() throws Exception {
-    try {
-      SqoopOptions.toChar("\\0x9K5");
-      fail("Expected exception");
-    } catch (NumberFormatException nfe) {
-      // expected.
-    }
+    thrown.expect(NumberFormatException.class);
+    SqoopOptions.toChar("\\0x9K5");
   }
 
   private SqoopOptions parse(String [] argv) throws Exception {
@@ -258,12 +240,8 @@
       "x",
     };
 
-    try {
-      parse(args);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    parse(args);
   }
 
   @Test
@@ -273,12 +251,8 @@
       "x",
     };
 
-    try {
-      parse(args);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // expected.
-    }
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    parse(args);
   }
 
   @Test
@@ -719,12 +693,9 @@
       "--append",
       "--delete-target-dir",
     };
-    try {
-      validateImportOptions(extraArgs);
-      fail("Expected InvalidOptionsException");
-    } catch(SqoopOptions.InvalidOptionsException ioe) {
-      // Expected
-    }
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    validateImportOptions(extraArgs);
   }
 
   //test incompatability of --delete-target-dir with incremental import
@@ -734,12 +705,9 @@
       "--incremental", "append",
       "--delete-target-dir",
     };
-    try {
-      validateImportOptions(extraArgs);
-      fail("Expected InvalidOptionsException");
-    } catch(SqoopOptions.InvalidOptionsException ioe) {
-      // Expected
-    }
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    validateImportOptions(extraArgs);
   }
 
   // test that hbase bulk load import with table name and target dir
@@ -761,12 +729,9 @@
     String [] extraArgs = {
         longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
         longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test"};
-    try {
-      validateImportOptions(extraArgs);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // Expected
-    }
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    validateImportOptions(extraArgs);
   }
 
   private static String longArgument(String argument) {
@@ -796,11 +761,13 @@
       "--split-by",
       "col0",
     };
-    try {
-      validateImportOptions(extraArgs);
-      fail("Expected InvalidOptionsException");
-    } catch (SqoopOptions.InvalidOptionsException ioe) {
-      // Expected
-    }
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    validateImportOptions(extraArgs);
+  }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestSqoopOptions.class);
   }
 }
diff --git a/src/test/com/cloudera/sqoop/TestTargetDir.java b/src/test/com/cloudera/sqoop/TestTargetDir.java
index 7aad7e1..4816e70 100644
--- a/src/test/com/cloudera/sqoop/TestTargetDir.java
+++ b/src/test/com/cloudera/sqoop/TestTargetDir.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.ArrayList;
 
+import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,15 +33,24 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * Test that --target-dir works.
  */
+@RunWith(JUnit4.class)
 public class TestTargetDir extends ImportJobTestCase {
 
   public static final Log LOG = LogFactory
       .getLog(TestTargetDir.class.getName());
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   /**
    * Create the argv to pass to Sqoop.
    *
@@ -70,27 +80,22 @@
   }
 
   /** test invalid argument exception if several output options. */
+  @Test
   public void testSeveralOutputsIOException() throws IOException {
+    ArrayList args = getOutputArgv(true);
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--target-dir");
+    args.add(getWarehouseDir());
 
-    try {
-      ArrayList args = getOutputArgv(true);
-      args.add("--warehouse-dir");
-      args.add(getWarehouseDir());
-      args.add("--target-dir");
-      args.add(getWarehouseDir());
+    String[] argv = (String[]) args.toArray(new String[0]);
 
-      String[] argv = (String[]) args.toArray(new String[0]);
-      runImport(argv);
-
-      fail("warehouse-dir & target-dir were set and run "
-          + "without problem reported");
-
-    } catch (IOException e) {
-      // expected
-    }
+    thrown.expect(IOException.class);
+    runImport(argv);
   }
 
   /** test target-dir contains imported files. */
+  @Test
   public void testTargetDir() throws IOException {
 
     try {
@@ -123,29 +128,29 @@
 
   /** test target-dir breaks if already existing
    * (only allowed in append mode). */
+  @Test
   public void testExistingTargetDir() throws IOException {
+    String targetDir = getWarehouseDir() + "/tempTargetDir";
 
-    try {
-      String targetDir = getWarehouseDir() + "/tempTargetDir";
+    ArrayList args = getOutputArgv(true);
+    args.add("--target-dir");
+    args.add(targetDir);
 
-      ArrayList args = getOutputArgv(true);
-      args.add("--target-dir");
-      args.add(targetDir);
-
-      // delete target-dir if exists and recreate it
-      FileSystem fs = FileSystem.get(getConf());
-      Path outputPath = new Path(targetDir);
-      if (!fs.exists(outputPath)) {
-        fs.mkdirs(outputPath);
-      }
-
-      String[] argv = (String[]) args.toArray(new String[0]);
-      runImport(argv);
-
-      fail("Existing target-dir run without problem report");
-
-    } catch (IOException e) {
-      // expected
+    // delete target-dir if exists and recreate it
+    FileSystem fs = FileSystem.get(getConf());
+    Path outputPath = new Path(targetDir);
+    if (!fs.exists(outputPath)) {
+      fs.mkdirs(outputPath);
     }
+
+    String[] argv = (String[]) args.toArray(new String[0]);
+
+    thrown.expect(IOException.class);
+    runImport(argv);
+  }
+
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestTargetDir.class);
   }
 }
diff --git a/src/test/org/apache/sqoop/TestExportUsingProcedure.java b/src/test/org/apache/sqoop/TestExportUsingProcedure.java
index cf5e2cd..8182c8d 100644
--- a/src/test/org/apache/sqoop/TestExportUsingProcedure.java
+++ b/src/test/org/apache/sqoop/TestExportUsingProcedure.java
@@ -29,6 +29,7 @@
 import java.sql.Time;
 import java.sql.Types;
 
+import junit.framework.JUnit4TestAdapter;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.manager.GenericJdbcManager;
@@ -39,11 +40,18 @@
 
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.TestExport;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.rules.TestName;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 /**
  * We'll use H2 as a database as the version of HSQLDB we currently depend on
  * (1.8) doesn't include support for stored procedures.
  */
+@RunWith(JUnit4.class)
 public class TestExportUsingProcedure extends TestExport {
   private static final String PROCEDURE_NAME = "INSERT_PROCEDURE";
   /**
@@ -55,6 +63,17 @@
   private String[] types;
   private Connection connection;
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Rule
+  public TestName testName = new TestName();
+
+  @Override
+  public String getName() {
+    return testName.getMethodName();
+  }
+
   @Override
   @Before
   public void setUp() {
@@ -193,31 +212,24 @@
   // TEST OVERRIDES
 
   @Override
-  public void testMultiMapTextExportWithStaging() throws IOException,
-      SQLException {
-    try {
-      super.testMultiMapTextExportWithStaging();
-      fail("staging tables not compatible with --call");
-    } catch (IOException e) {
-      // expected
-    }
+  @Test
+  public void testMultiMapTextExportWithStaging() throws IOException, SQLException {
+    thrown.expect(IOException.class);
+    super.testMultiMapTextExportWithStaging();
   }
 
   @Override
-  public void testMultiTransactionWithStaging() throws IOException,
-      SQLException {
-    try {
-      super.testMultiTransactionWithStaging();
-      fail("staging tables not compatible with --call");
-    } catch (IOException e) {
-      // expected
-    }
+  @Test
+  public void testMultiTransactionWithStaging() throws IOException, SQLException {
+    thrown.expect(IOException.class);
+    super.testMultiTransactionWithStaging();
   }
 
   /**
    * H2 renames the stored procedure arguments P1, P2, ..., Pn.
    */
   @Override
+  @Test
   public void testColumnsExport() throws IOException, SQLException {
     super.testColumnsExport("P1,P2,P3,P4");
   }
@@ -326,4 +338,9 @@
     });
   }
 
+  //workaround: ant kept falling back to JUnit3
+  public static junit.framework.Test suite() {
+    return new JUnit4TestAdapter(TestExportUsingProcedure.class);
+  }
+
 }