SQOOP-3099: Remove all junit3 TestCase extension
and test suites

(Anna Szonyi via Attila Szabo)
diff --git a/src/test/com/cloudera/sqoop/AllTests.java b/src/test/com/cloudera/sqoop/AllTests.java
deleted file mode 100644
index 1dfb0fc..0000000
--- a/src/test/com/cloudera/sqoop/AllTests.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-/**
- * All tests for Sqoop (com.cloudera.sqoop).
- */
-public final class AllTests {
-
-  private AllTests() { }
-
-  public static Test suite() {
-    TestSuite suite = new TestSuite("All tests for com.cloudera.sqoop");
-
-    suite.addTest(SmokeTests.suite());
-    suite.addTest(ThirdPartyTests.suite());
-
-    return suite;
-  }
-
-}
-
diff --git a/src/test/com/cloudera/sqoop/SmokeTests.java b/src/test/com/cloudera/sqoop/SmokeTests.java
deleted file mode 100644
index c5dc860..0000000
--- a/src/test/com/cloudera/sqoop/SmokeTests.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import org.apache.sqoop.TestExportUsingProcedure;
-
-import com.cloudera.sqoop.hive.TestHiveImport;
-import com.cloudera.sqoop.hive.TestTableDefWriter;
-import com.cloudera.sqoop.io.TestLobFile;
-import com.cloudera.sqoop.io.TestNamedFifo;
-import com.cloudera.sqoop.io.TestSplittableBufferedWriter;
-import com.cloudera.sqoop.lib.TestBooleanParser;
-import com.cloudera.sqoop.lib.TestFieldFormatter;
-import com.cloudera.sqoop.lib.TestRecordParser;
-import com.cloudera.sqoop.lib.TestBlobRef;
-import com.cloudera.sqoop.lib.TestClobRef;
-import com.cloudera.sqoop.lib.TestLargeObjectLoader;
-import com.cloudera.sqoop.manager.TestHsqldbManager;
-import com.cloudera.sqoop.manager.TestSqlManager;
-import com.cloudera.sqoop.mapreduce.MapreduceTests;
-import com.cloudera.sqoop.metastore.TestSavedJobs;
-import com.cloudera.sqoop.orm.TestClassWriter;
-import com.cloudera.sqoop.orm.TestParseMethods;
-
-import com.cloudera.sqoop.tool.TestToolPlugin;
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-/**
- * Smoke tests for Sqoop (com.cloudera.sqoop).
- */
-public final class SmokeTests {
-
-  private SmokeTests() { }
-
-  public static Test suite() {
-    TestSuite suite = new TestSuite("Smoke tests for com.cloudera.sqoop");
-
-    suite.addTestSuite(TestAllTables.class);
-    suite.addTestSuite(TestHsqldbManager.class);
-    suite.addTestSuite(TestSqlManager.class);
-    suite.addTestSuite(TestClassWriter.class);
-    suite.addTestSuite(TestColumnTypes.class);
-    suite.addTestSuite(TestExport.class);
-    suite.addTestSuite(TestMultiCols.class);
-    suite.addTestSuite(TestMultiMaps.class);
-    suite.addTestSuite(TestSplitBy.class);
-    suite.addTestSuite(TestQuery.class);
-    suite.addTestSuite(TestWhere.class);
-    suite.addTestSuite(TestTargetDir.class);
-    suite.addTestSuite(TestAppendUtils.class);
-    suite.addTestSuite(TestHiveImport.class);
-    suite.addTestSuite(TestRecordParser.class);
-    suite.addTestSuite(TestFieldFormatter.class);
-    suite.addTestSuite(TestSqoopOptions.class);
-    suite.addTestSuite(TestParseMethods.class);
-    suite.addTestSuite(TestConnFactory.class);
-    suite.addTestSuite(TestSplittableBufferedWriter.class);
-    suite.addTestSuite(TestTableDefWriter.class);
-    suite.addTestSuite(TestBlobRef.class);
-    suite.addTestSuite(TestClobRef.class);
-    suite.addTestSuite(TestLargeObjectLoader.class);
-    suite.addTestSuite(TestLobFile.class);
-    suite.addTestSuite(TestExportUpdate.class);
-    suite.addTestSuite(TestSavedJobs.class);
-    suite.addTestSuite(TestNamedFifo.class);
-    suite.addTestSuite(TestBooleanParser.class);
-    suite.addTestSuite(TestMerge.class);
-    suite.addTestSuite(TestToolPlugin.class);
-    suite.addTestSuite(TestExportUsingProcedure.class);
-    suite.addTest(MapreduceTests.suite());
-
-    return suite;
-  }
-
-}
-
diff --git a/src/test/com/cloudera/sqoop/TestAllTables.java b/src/test/com/cloudera/sqoop/TestAllTables.java
index f981024..232b82f 100644
--- a/src/test/com/cloudera/sqoop/TestAllTables.java
+++ b/src/test/com/cloudera/sqoop/TestAllTables.java
@@ -35,10 +35,15 @@
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.tool.ImportAllTablesTool;
+import org.junit.Test;
 import org.kitesdk.data.Dataset;
 import org.kitesdk.data.DatasetReader;
 import org.kitesdk.data.Datasets;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.fail;
+
 /**
  * Test the --all-tables functionality that can import multiple tables.
  */
@@ -129,6 +134,7 @@
     super.tearDown();
   }
 
+  @Test
   public void testMultiTableImport() throws IOException {
     String [] argv = getArgv(null, null);
     runImport(new ImportAllTablesTool(), argv);
@@ -165,6 +171,7 @@
     }
   }
 
+  @Test
   public void testMultiTableImportAsParquetFormat() throws IOException {
     String [] argv = getArgv(new String[]{"--as-parquetfile"}, null);
     runImport(new ImportAllTablesTool(), argv);
@@ -194,6 +201,7 @@
     }
   }
 
+  @Test
   public void testMultiTableImportWithExclude() throws IOException {
     String exclude = this.tableNames.get(0);
     String [] argv = getArgv(null, new String[]{ exclude });
diff --git a/src/test/com/cloudera/sqoop/TestAppendUtils.java b/src/test/com/cloudera/sqoop/TestAppendUtils.java
index b09dfc6..486afee 100644
--- a/src/test/com/cloudera/sqoop/TestAppendUtils.java
+++ b/src/test/com/cloudera/sqoop/TestAppendUtils.java
@@ -41,6 +41,12 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.AppendUtils;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --append works.
@@ -245,6 +251,7 @@
   }
 
   /** independent to target-dir. */
+  @Test
   public void testAppend() throws IOException {
     ArrayList args = getOutputlessArgv(false, false, HsqldbTestServer.getFieldNames(), getConf());
     args.add("--warehouse-dir");
@@ -255,6 +262,7 @@
   }
 
   /** working with target-dir. */
+  @Test
   public void testAppendToTargetDir() throws IOException {
     ArrayList args = getOutputlessArgv(false, false, HsqldbTestServer.getFieldNames(), getConf());
     String targetDir = getWarehouseDir() + "/tempTargetDir";
@@ -272,6 +280,7 @@
    *
    * @throws IOException
    */
+  @Test
   public void testAppendWithQuery() throws IOException {
     ArrayList args = getOutputlessArgv(false, true, HsqldbTestServer.getFieldNames(), getConf());
     String targetDir = getWarehouseDir() + "/tempTargetDir";
@@ -285,6 +294,7 @@
   /**
    * If the append source does not exist, don't crash.
    */
+  @Test
   public void testAppendSrcDoesNotExist() throws IOException {
     Configuration conf = new Configuration();
     if (!isOnPhysicalCluster()) {
diff --git a/src/test/com/cloudera/sqoop/TestAvroExport.java b/src/test/com/cloudera/sqoop/TestAvroExport.java
index 750887a..487c12b 100644
--- a/src/test/com/cloudera/sqoop/TestAvroExport.java
+++ b/src/test/com/cloudera/sqoop/TestAvroExport.java
@@ -19,6 +19,7 @@
 package com.cloudera.sqoop;
 
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
 
 import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
 import com.cloudera.sqoop.testutil.CommonArgs;
@@ -37,7 +38,6 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import junit.framework.JUnit4TestAdapter;
 import org.apache.avro.Conversions;
 import org.apache.avro.LogicalTypes;
 import org.apache.avro.Schema;
@@ -50,17 +50,14 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.junit.Rule;
 import org.junit.Test;
+import org.junit.Rule;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
 /**
  * Test that we can export Avro Data Files from HDFS into databases.
  */
 
-@RunWith(JUnit4.class)
 public class TestAvroExport extends ExportJobTestCase {
 
   @Rule
@@ -533,9 +530,4 @@
     assertColValForRowId(9, "col3", null);
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestAvroExport.class);
-  }
-
 }
diff --git a/src/test/com/cloudera/sqoop/TestAvroImport.java b/src/test/com/cloudera/sqoop/TestAvroImport.java
index 0a64b52..26edd4c 100644
--- a/src/test/com/cloudera/sqoop/TestAvroImport.java
+++ b/src/test/com/cloudera/sqoop/TestAvroImport.java
@@ -45,6 +45,12 @@
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Tests --as-avrodatafile.
@@ -84,22 +90,26 @@
     return args.toArray(new String[0]);
   }
 
+  @Test
   public void testAvroImport() throws IOException {
     this.setCurTableName("Avro_Import_Test");
     avroImportTestHelper(null, null);
   }
 
+  @Test
   public void testDeflateCompressedAvroImport() throws IOException {
     this.setCurTableName("Deflate_Compressed_Avro_Import_Test_1");
     avroImportTestHelper(new String[] {"--compression-codec",
       "org.apache.hadoop.io.compress.DefaultCodec", }, "deflate");
   }
 
+  @Test
   public void testDefaultCompressedAvroImport() throws IOException {
     this.setCurTableName("Deflate_Compressed_Avro_Import_Test_2");
     avroImportTestHelper(new String[] {"--compress", }, "deflate");
   }
 
+  @Test
   public void testUnsupportedCodec() throws IOException {
     try {
       this.setCurTableName("Deflate_Compressed_Avro_Import_Test_3");
@@ -169,6 +179,7 @@
     checkSchemaFile(schema);
   }
 
+  @Test
   public void testOverrideTypeMapping() throws IOException {
     String [] types = { "INT" };
     String [] vals = { "10" };
@@ -191,6 +202,7 @@
     assertEquals("DATA_COL0", new Utf8("10"), record1.get("DATA_COL0"));
   }
 
+  @Test
   public void testFirstUnderscoreInColumnName() throws IOException {
     String [] names = { "_NAME" };
     String [] types = { "INT" };
@@ -212,6 +224,7 @@
     assertEquals("__NAME", 1987, record1.get("__NAME"));
   }
 
+  @Test
   public void testNonstandardCharactersInColumnName() throws IOException {
     String [] names = { "avro\uC3A11" };
     String [] types = { "INT" };
@@ -234,6 +247,7 @@
     assertEquals("AVRO\uC3A11", 1987, record1.get("AVRO\uC3A11"));
   }
 
+  @Test
   public void testNonIdentCharactersInColumnName() throws IOException {
     String [] names = { "test_a-v+r/o" };
     String [] types = { "INT" };
@@ -258,6 +272,7 @@
   /*
    * Test Case For checking multiple columns having non standard characters in multiple columns
    */
+  @Test
   public void testNonstandardCharactersInMultipleColumns() throws IOException {
     String[] names = { "id$1", "id1$" };
     String[] types = { "INT", "INT" };
@@ -289,6 +304,7 @@
     assertEquals(type, field.schema().getTypes().get(1).getType());
   }
 
+  @Test
   public void testNullableAvroImport() throws IOException, SQLException {
     String [] types = { "INT" };
     String [] vals = { null };
diff --git a/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java b/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java
index fd4275d..8e718c3 100644
--- a/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java
+++ b/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java
@@ -34,6 +34,9 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
 
 /**
  * Tests importing a database table as an Avro Data File then back to the
@@ -44,6 +47,7 @@
   public static final Log LOG = LogFactory
       .getLog(TestAvroImportExportRoundtrip.class.getName());
 
+  @Test
   public void testRoundtripQuery() throws IOException, SQLException {
     String[] argv = {};
 
@@ -55,6 +59,7 @@
     checkFirstColumnSum();
   }
 
+  @Test
   public void testRoundtrip() throws IOException, SQLException {
     String[] argv = {};
 
@@ -158,13 +163,6 @@
     return args.toArray(new String[0]);
   }
 
-  /**
-   * Create the argv to pass to Sqoop.
-   * @param includeHadoopFlags if true, then include -D various.settings=values
-   * @param rowsPerStmt number of rows to export in a single INSERT statement.
-   * @param statementsPerTx ## of statements to use in a transaction.
-   * @return the argv as an array of strings.
-   */
   protected ArrayList<String> formatAdditionalArgs(String... additionalArgv) {
     ArrayList<String> args = new ArrayList<String>();
 
diff --git a/src/test/com/cloudera/sqoop/TestBoundaryQuery.java b/src/test/com/cloudera/sqoop/TestBoundaryQuery.java
index 99f652e..925bec3 100644
--- a/src/test/com/cloudera/sqoop/TestBoundaryQuery.java
+++ b/src/test/com/cloudera/sqoop/TestBoundaryQuery.java
@@ -34,6 +34,10 @@
 import com.cloudera.sqoop.testutil.SeqFileReader;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --boundary-query works in Sqoop.
@@ -164,6 +168,7 @@
     }
   }
 
+  @Test
   public void testBoundaryQuery() throws IOException {
     System.out.println("PCYO");
     String query = "select min(intfield1), max(intfield1) from "
@@ -173,6 +178,7 @@
       "--m", "1", "--split-by", "INTFIELD1");
   }
 
+  @Test
   public void testNoBoundaryQuerySingleMapper() throws IOException {
 
       runQueryTest(null, false, 4, 16, getTablePath().toString(),
diff --git a/src/test/com/cloudera/sqoop/TestCompression.java b/src/test/com/cloudera/sqoop/TestCompression.java
index 841f054..7110510 100644
--- a/src/test/com/cloudera/sqoop/TestCompression.java
+++ b/src/test/com/cloudera/sqoop/TestCompression.java
@@ -42,6 +42,10 @@
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test that compression options (--compress, --compression-codec) work.
@@ -181,14 +185,17 @@
     assertEquals(expectedNum, numLines);
   }
 
+  @Test
   public void testDefaultTextCompression() throws IOException {
     runTextCompressionTest(null, 4);
   }
 
+  @Test
   public void testBzip2TextCompression() throws IOException {
     runTextCompressionTest(new BZip2Codec(), 4);
   }
 
+  @Test
   public void testBzip2SequenceFileCompression() throws Exception {
     runSequenceFileCompressionTest(new BZip2Codec(), 4);
   }
diff --git a/src/test/com/cloudera/sqoop/TestConnFactory.java b/src/test/com/cloudera/sqoop/TestConnFactory.java
index ff41706..62035cb 100644
--- a/src/test/com/cloudera/sqoop/TestConnFactory.java
+++ b/src/test/com/cloudera/sqoop/TestConnFactory.java
@@ -24,9 +24,6 @@
 import java.util.List;
 import java.util.Map;
 
-import junit.framework.JUnit4TestAdapter;
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 
 import com.cloudera.sqoop.manager.ConnManager;
@@ -34,22 +31,22 @@
 import com.cloudera.sqoop.manager.ManagerFactory;
 import com.cloudera.sqoop.metastore.JobData;
 import com.cloudera.sqoop.tool.ImportTool;
-import org.junit.Rule;
 import org.junit.Test;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import org.junit.Rule;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
 /**
  * Test the ConnFactory implementation and its ability to delegate to multiple
  * different ManagerFactory implementations using reflection.
  */
-@RunWith(JUnit4.class)
-public class TestConnFactory extends TestCase {
+
+public class TestConnFactory {
 
   @Rule
   public ExpectedException thrown = ExpectedException.none();
-
   @Test
   public void testCustomFactory() throws IOException {
     Configuration conf = new Configuration();
@@ -64,6 +61,7 @@
   }
 
   @Test
+
   public void testExceptionForNoManager() throws IOException {
     Configuration conf = new Configuration();
     conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, EmptyFactory.class.getName());
@@ -196,8 +194,4 @@
     }
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestConnFactory.class);
-  }
 }
diff --git a/src/test/com/cloudera/sqoop/TestDirectImport.java b/src/test/com/cloudera/sqoop/TestDirectImport.java
index f48c112..927b1fe 100644
--- a/src/test/com/cloudera/sqoop/TestDirectImport.java
+++ b/src/test/com/cloudera/sqoop/TestDirectImport.java
@@ -21,17 +21,13 @@
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import junit.framework.JUnit4TestAdapter;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-
 
 import java.io.IOException;
 import java.util.ArrayList;
 
-@RunWith(value = org.junit.runners.JUnit4.class)
 public class TestDirectImport extends ImportJobTestCase {
 
   @Rule
@@ -81,8 +77,5 @@
     runImport(argv);
 
   }
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestDirectImport.class);
-  }
+
 }
diff --git a/src/test/com/cloudera/sqoop/TestExport.java b/src/test/com/cloudera/sqoop/TestExport.java
index 00d17c2..df5a663 100644
--- a/src/test/com/cloudera/sqoop/TestExport.java
+++ b/src/test/com/cloudera/sqoop/TestExport.java
@@ -50,6 +50,11 @@
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
 import com.cloudera.sqoop.tool.CodeGenTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test that we can export data from HDFS into databases.
@@ -454,11 +459,13 @@
   /**
    * Run an "export" on an empty file.
    */
+  @Test
   public void testEmptyExport() throws IOException, SQLException {
     multiFileTest(1, 0, 1);
   }
 
   /** Export 10 rows, make sure they load in correctly. */
+  @Test
   public void testTextExport() throws IOException, SQLException {
     multiFileTest(1, 10, 1);
   }
@@ -466,6 +473,7 @@
   /** Make sure we can use CombineFileInputFormat to handle multiple
    * files in a single mapper.
    */
+  @Test
   public void testMultiFilesOneMapper() throws IOException, SQLException {
     multiFileTest(2, 10, 1);
   }
@@ -473,11 +481,13 @@
   /** Make sure we can use CombineFileInputFormat to handle multiple
    * files and multiple maps.
    */
+  @Test
   public void testMultiFilesMultiMaps() throws IOException, SQLException {
     multiFileTest(2, 10, 2);
   }
 
   /** Export 10 rows from gzipped text files. */
+  @Test
   public void testGzipExport() throws IOException, SQLException {
 
     LOG.info("Beginning gzip export test");
@@ -494,6 +504,7 @@
   /**
    * Ensure that we use multiple statements in a transaction.
    */
+  @Test
   public void testMultiStatement() throws IOException, SQLException {
     final int TOTAL_RECORDS = 20;
     createTextFile(0, TOTAL_RECORDS, true);
@@ -505,6 +516,7 @@
   /**
    * Ensure that we use multiple transactions in a single mapper.
    */
+  @Test
   public void testMultiTransaction() throws IOException, SQLException {
     final int TOTAL_RECORDS = 20;
     createTextFile(0, TOTAL_RECORDS, true);
@@ -518,6 +530,7 @@
    * @throws IOException
    * @throws SQLException
    */
+  @Test
   public void testMultiTransactionWithStaging()
     throws IOException, SQLException {
     final int TOTAL_RECORDS = 20;
@@ -532,6 +545,7 @@
    * Ensure that when we don't force a commit with a statement cap,
    * it happens anyway.
    */
+  @Test
   public void testUnlimitedTransactionSize() throws IOException, SQLException {
     final int TOTAL_RECORDS = 20;
     createTextFile(0, TOTAL_RECORDS, true);
@@ -541,6 +555,7 @@
   }
 
   /** Run 2 mappers, make sure all records load in correctly. */
+  @Test
   public void testMultiMapTextExport() throws IOException, SQLException {
 
     final int RECORDS_PER_MAP = 10;
@@ -559,6 +574,7 @@
    * Run 2 mappers with staging enabled,
    * make sure all records load in correctly.
    */
+  @Test
   public void testMultiMapTextExportWithStaging()
   throws IOException, SQLException {
 
@@ -576,6 +592,7 @@
   }
 
   /** Export some rows from a SequenceFile, make sure they import correctly. */
+  @Test
   public void testSequenceFileExport() throws Exception {
 
     final int TOTAL_RECORDS = 10;
@@ -640,6 +657,7 @@
     }
   }
 
+  @Test
   public void testIntCol() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
 
@@ -668,6 +686,7 @@
     return "BIGINT";
   }
 
+  @Test
   public void testBigIntCol() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
 
@@ -737,6 +756,7 @@
     };
   }
 
+  @Test
   public void testDatesAndTimes() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
 
@@ -751,6 +771,7 @@
     assertColMinAndMax(forIdx(1), genTime);
   }
 
+  @Test
   public void testNumericTypes() throws IOException, SQLException {
     final int TOTAL_RECORDS = 9;
 
@@ -794,6 +815,7 @@
     assertColMinAndMax(forIdx(1), genNumeric);
   }
 
+  @Test
   public void testColumnsExport() throws IOException, SQLException {
     testColumnsExport("ID,MSG," + forIdx(0) + "," + forIdx(2));
   }
diff --git a/src/test/com/cloudera/sqoop/TestExportUpdate.java b/src/test/com/cloudera/sqoop/TestExportUpdate.java
index 33a176a..81f3a68 100644
--- a/src/test/com/cloudera/sqoop/TestExportUpdate.java
+++ b/src/test/com/cloudera/sqoop/TestExportUpdate.java
@@ -27,7 +27,6 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 
-import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -35,17 +34,19 @@
 
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
 /**
  * Test that we can update a copy of data in the database,
  * based on newer data in HDFS.
  */
-@RunWith(JUnit4.class)
 public class TestExportUpdate extends ExportJobTestCase {
 
   @Rule
@@ -705,9 +706,4 @@
         "--update-key", "A", "--columns", "B"));
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestExportUpdate.class);
-  }
-
 }
diff --git a/src/test/com/cloudera/sqoop/TestFreeFormQueryImport.java b/src/test/com/cloudera/sqoop/TestFreeFormQueryImport.java
index 81f1c59..4f9e652 100644
--- a/src/test/com/cloudera/sqoop/TestFreeFormQueryImport.java
+++ b/src/test/com/cloudera/sqoop/TestFreeFormQueryImport.java
@@ -36,6 +36,10 @@
 
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.junit.After;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
 
 /**
  * Test free form query import.
@@ -58,7 +62,7 @@
   /** the names of the tables we're creating. */
   private List<String> tableNames;
 
-  @Override
+  @After
   public void tearDown() {
     // Clean up the database on our way out.
     for (String tableName : tableNames) {
@@ -102,6 +106,7 @@
    * import on the result table that is created by joining the two tables on
    * the id column.
    */
+  @Test
   public void testSimpleJoin() throws IOException {
     tableNames = new ArrayList<String>();
 
diff --git a/src/test/com/cloudera/sqoop/TestIncrementalImport.java b/src/test/com/cloudera/sqoop/TestIncrementalImport.java
index d62532c..57f4433 100644
--- a/src/test/com/cloudera/sqoop/TestIncrementalImport.java
+++ b/src/test/com/cloudera/sqoop/TestIncrementalImport.java
@@ -30,7 +30,7 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import junit.framework.TestCase;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -48,8 +48,11 @@
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.tool.JobTool;
+import org.junit.Before;
+import org.junit.Test;
 
-import javax.management.Query;
+
+import static org.junit.Assert.*;
 
 /**
  * Test the incremental import functionality.
@@ -58,7 +61,8 @@
  * The metastore URL is configured to be in-memory, and drop all
  * state between individual tests.
  */
-public class TestIncrementalImport extends TestCase {
+
+public class TestIncrementalImport  {
 
   public static final Log LOG = LogFactory.getLog(
       TestIncrementalImport.class.getName());
@@ -66,7 +70,7 @@
   // What database do we read from.
   public static final String SOURCE_DB_URL = "jdbc:hsqldb:mem:incremental";
 
-  @Override
+  @Before
   public void setUp() throws Exception {
     // Delete db state between tests.
     TestSavedJobs.resetJobSchema();
@@ -645,6 +649,7 @@
   }
 
   // Incremental import of an empty table, no metastore.
+  @Test
   public void testEmptyAppendImport() throws Exception {
     final String TABLE_NAME = "emptyAppend1";
     createIdTable(TABLE_NAME, 0);
@@ -659,6 +664,7 @@
   }
 
   // Incremental import of a filled table, no metastore.
+  @Test
   public void testFullAppendImport() throws Exception {
     final String TABLE_NAME = "fullAppend1";
     createIdTable(TABLE_NAME, 10);
@@ -672,6 +678,7 @@
     assertDirOfNumbers(TABLE_NAME, 10);
   }
 
+  @Test
   public void testEmptyJobAppend() throws Exception {
     // Create a job and run an import on an empty table.
     // Nothing should happen.
@@ -690,6 +697,7 @@
     assertDirOfNumbers(TABLE_NAME, 0);
   }
 
+  @Test
   public void testEmptyThenFullJobAppend() throws Exception {
     // Create an empty table. Import it; nothing happens.
     // Add some rows. Verify they are appended.
@@ -717,6 +725,7 @@
     assertDirOfNumbers(TABLE_NAME, 20);
   }
 
+  @Test
   public void testEmptyThenFullJobAppendWithQuery() throws Exception {
     // Create an empty table. Import it; nothing happens.
     // Add some rows. Verify they are appended.
@@ -748,6 +757,7 @@
     assertDirOfNumbers(TABLE_NAME, 20);
   }
 
+  @Test
   public void testAppend() throws Exception {
     // Create a table with data in it; import it.
     // Then add more data, verify that only the incremental data is pulled.
@@ -768,6 +778,7 @@
     assertDirOfNumbers(TABLE_NAME, 20);
   }
 
+  @Test
   public void testEmptyLastModified() throws Exception {
     final String TABLE_NAME = "emptyLastModified";
     createTimestampTable(TABLE_NAME, 0, null);
@@ -781,6 +792,7 @@
     assertDirOfNumbers(TABLE_NAME, 0);
   }
 
+  @Test
   public void testEmptyLastModifiedWithNonExistingParentDirectory() throws Exception {
     final String TABLE_NAME = "emptyLastModifiedNoParent";
     final String QUERY = "SELECT id, last_modified FROM \"" + TABLE_NAME + "\" WHERE $CONDITIONS";
@@ -796,6 +808,7 @@
     assertDirOfNumbers(DIRECTORY, 0);
   }
 
+  @Test
   public void testFullLastModifiedImport() throws Exception {
     // Given a table of rows imported in the past,
     // see that they are imported.
@@ -813,6 +826,7 @@
     assertDirOfNumbers(TABLE_NAME, 10);
   }
 
+  @Test
   public void testNoImportFromTheFuture() throws Exception {
     // If last-modified dates for writes are serialized to be in the
     // future w.r.t. an import, do not import these rows.
@@ -831,6 +845,7 @@
     assertDirOfNumbers(TABLE_NAME, 0);
   }
 
+  @Test
   public void testEmptyJobLastMod() throws Exception {
     // Create a job and run an import on an empty table.
     // Nothing should happen.
@@ -850,6 +865,7 @@
     assertDirOfNumbers(TABLE_NAME, 0);
   }
 
+  @Test
   public void testEmptyThenFullJobLastMod() throws Exception {
     // Create an empty table. Import it; nothing happens.
     // Add some rows. Verify they are appended.
@@ -894,6 +910,7 @@
     assertDirOfNumbers(TABLE_NAME, 20);
   }
 
+  @Test
   public void testAppendWithTimestamp() throws Exception {
     // Create a table with data in it; import it.
     // Then add more data, verify that only the incremental data is pulled.
@@ -921,6 +938,7 @@
     assertDirOfNumbers(TABLE_NAME, 20);
   }
 
+  @Test
   public void testAppendWithString() throws Exception {
     // Create a table with string column in it;
     // incrementally import it on the string column - it should fail.
@@ -939,7 +957,7 @@
       //expected
     }
   }
-
+  @Test
   public void testModifyWithTimestamp() throws Exception {
     // Create a table with data in it; import it.
     // Then modify some existing rows, and verify that we only grab
@@ -981,7 +999,7 @@
     runJob(TABLE_NAME);
     assertFirstSpecificNumber(TABLE_NAME, 4000);
   }
-
+  @Test
   public void testUpdateModifyWithTimestamp() throws Exception {
     // Create a table with data in it; import it.
     // Then modify some existing rows, and verify that we only grab
@@ -1033,6 +1051,7 @@
     assertSpecificNumber(TABLE_NAME, 4000);
   }
 
+  @Test
   public void testUpdateModifyWithTimestampWithQuery() throws Exception {
     // Create an empty table. Import it; nothing happens.
     // Add some rows. Verify they are appended.
@@ -1086,6 +1105,7 @@
     assertSpecificNumber(TABLE_NAME, 4000);
   }
 
+  @Test
   public void testUpdateModifyWithTimestampJob() throws Exception {
     // Create a table with data in it; import it.
     // Then modify some existing rows, and verify that we only grab
@@ -1162,6 +1182,7 @@
     }
   }
 
+  @Test
   public void testTimestampBoundary() throws Exception {
     // Run an import, and then insert rows with the last-modified timestamp
     // set to the exact time when the first import runs. Run a second import
@@ -1200,6 +1221,7 @@
     assertDirOfNumbers(TABLE_NAME, 20);
   }
 
+  @Test
   public void testIncrementalAppendTimestamp() throws Exception {
     // Run an import, and then insert rows with the last-modified timestamp
     // set to the exact time when the first import runs. Run a second import
@@ -1236,7 +1258,7 @@
     runJob(TABLE_NAME);
     assertDirOfNumbers(TABLE_NAME, 20);
   }
-  
+  @Test
 	public void testIncrementalHiveAppendEmptyThenFull() throws Exception {
 		// This is to test Incremental Hive append feature. SQOOP-2470
 		final String TABLE_NAME = "incrementalHiveAppendEmptyThenFull";
@@ -1296,6 +1318,7 @@
 	}
 
   // SQOOP-1890
+  @Test
   public void testTableNameWithSpecialCharacters() throws Exception {
     // Table name with special characters to verify proper table name escaping
     final String TABLE_NAME = "my-table.ext";
diff --git a/src/test/com/cloudera/sqoop/TestMerge.java b/src/test/com/cloudera/sqoop/TestMerge.java
index 1709419..114e934 100644
--- a/src/test/com/cloudera/sqoop/TestMerge.java
+++ b/src/test/com/cloudera/sqoop/TestMerge.java
@@ -52,6 +52,10 @@
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
 
 /**
  * Test that the merge tool works.
@@ -79,7 +83,7 @@
       Arrays.asList(new Integer(1), new Integer(43)),
       Arrays.asList(new Integer(3), new Integer(313)));
 
-  @Override
+  @Before
   public void setUp() {
     super.setUp();
     manager = getManager();
@@ -145,10 +149,12 @@
     conn.commit();
   }
 
+  @Test
   public void testTextFileMerge() throws Exception {
     runMergeTest(SqoopOptions.FileLayout.TextFile);
   }
 
+  @Test
   public void testAvroFileMerge() throws Exception {
     runMergeTest(SqoopOptions.FileLayout.AvroDataFile);
   }
diff --git a/src/test/com/cloudera/sqoop/TestMultiCols.java b/src/test/com/cloudera/sqoop/TestMultiCols.java
index e165cfe..94721b8 100644
--- a/src/test/com/cloudera/sqoop/TestMultiCols.java
+++ b/src/test/com/cloudera/sqoop/TestMultiCols.java
@@ -24,6 +24,7 @@
 import org.apache.commons.logging.LogFactory;
 
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.junit.Test;
 
 /**
  * Test cases that import rows containing multiple columns,
@@ -40,11 +41,8 @@
    * Do a full import verification test on a table containing one row.
    * @param types the types of the columns to insert
    * @param insertVals the SQL text to use to insert each value
-   * @param validateVals the text to expect when retrieving each value from
-   * the db
    * @param validateLine the text to expect as a toString() of the entire row,
    * as imported by the tool
-   * @param importColumns The list of columns to import
    */
   private void verifyTypes(String [] types , String [] insertVals,
       String validateLine) {
@@ -59,6 +57,7 @@
     LOG.debug("Verified input line as " + validateLine + " -- ok!");
   }
 
+  @Test
   public void testThreeStrings() {
     String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
     String [] insertVals = { "'foo'", "'bar'", "'baz'" };
@@ -67,6 +66,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testStringsWithNull1() {
     String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
     String [] insertVals = { "'foo'", "null", "'baz'" };
@@ -75,6 +75,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testStringsWithNull2() {
     String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
     String [] insertVals = { "null", "'foo'", "'baz'" };
@@ -83,6 +84,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testStringsWithNull3() {
     String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
     String [] insertVals = { "'foo'", "'baz'", "null"};
@@ -91,6 +93,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testThreeInts() {
     String [] types = { "INTEGER", "INTEGER", "INTEGER" };
     String [] insertVals = { "1", "2", "3" };
@@ -99,6 +102,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testIntsWithNulls() {
     String [] types = { "INTEGER", "INTEGER", "INTEGER" };
     String [] insertVals = { "1", "null", "3" };
@@ -107,6 +111,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testMixed1() {
     String [] types = { "INTEGER", "VARCHAR(32)", "DATE" };
     String [] insertVals = { "1", "'meep'", "'2009-12-31'" };
@@ -115,6 +120,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testMixed2() {
     String [] types = { "INTEGER", "VARCHAR(32)", "DATE" };
     String [] insertVals = { "null", "'meep'", "'2009-12-31'" };
@@ -123,6 +129,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testMixed3() {
     String [] types = { "INTEGER", "VARCHAR(32)", "DATE" };
     String [] insertVals = { "1", "'meep'", "null" };
@@ -131,6 +138,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testMixed4() {
     String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
     String [] insertVals = { "-42", "17", "33333333333333333333333.1714" };
@@ -139,6 +147,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testMixed5() {
     String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
     String [] insertVals = { "null", "17", "33333333333333333333333.0" };
@@ -147,6 +156,7 @@
     verifyTypes(types, insertVals, validateLine);
   }
 
+  @Test
   public void testMixed6() {
     String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
     String [] insertVals = { "33333333333333333333333", "17", "-42"};
@@ -160,6 +170,7 @@
   // we can selectively import only certain columns.
   //////////////////////////////////////////////////////////////////////////
 
+  @Test
   public void testSkipFirstCol() {
     String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
     String [] insertVals = { "33333333333333333333333", "17", "-42"};
@@ -170,6 +181,7 @@
     verifyTypes(types, insertVals, validateLine, loadCols);
   }
 
+  @Test
   public void testSkipSecondCol() {
     String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
     String [] insertVals = { "33333333333333333333333", "17", "-42"};
@@ -180,6 +192,7 @@
     verifyTypes(types, insertVals, validateLine, loadCols);
   }
 
+  @Test
   public void testSkipThirdCol() {
     String [] types = { "NUMERIC", "INTEGER", "NUMERIC" };
     String [] insertVals = { "33333333333333333333333", "17", "-42"};
@@ -198,6 +211,7 @@
    *
    * @throws IOException
    */
+  @Test
   public void testSingleColumnsArg() throws IOException {
     String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
     String [] insertVals = { "'foo'", "'bar'", "'baz'" };
@@ -215,6 +229,7 @@
    *
    * @throws IOException
    */
+  @Test
   public void testColumnsWithSpaces() throws IOException {
     String [] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)" };
     String [] insertVals = { "'foo'", "'bar'", "'baz'" };
diff --git a/src/test/com/cloudera/sqoop/TestMultiMaps.java b/src/test/com/cloudera/sqoop/TestMultiMaps.java
index ac23229..c5f487b 100644
--- a/src/test/com/cloudera/sqoop/TestMultiMaps.java
+++ b/src/test/com/cloudera/sqoop/TestMultiMaps.java
@@ -37,6 +37,10 @@
 import com.cloudera.sqoop.orm.CompilationManager;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that using multiple mapper splits works.
@@ -177,6 +181,7 @@
     }
   }
 
+  @Test
   public void testSplitByFirstCol() throws IOException {
     runMultiMapTest("INTFIELD1", HsqldbTestServer.getFirstColSum());
   }
diff --git a/src/test/com/cloudera/sqoop/TestParquetExport.java b/src/test/com/cloudera/sqoop/TestParquetExport.java
index 94122f6..3c3db33 100644
--- a/src/test/com/cloudera/sqoop/TestParquetExport.java
+++ b/src/test/com/cloudera/sqoop/TestParquetExport.java
@@ -20,16 +20,14 @@
 
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
 import com.google.common.collect.Lists;
-import junit.framework.JUnit4TestAdapter;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericRecord;
 import org.junit.Rule;
+
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 import org.kitesdk.data.*;
 
 import java.io.IOException;
@@ -43,11 +41,12 @@
 import java.util.List;
 
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
 
 /**
  * Test that we can export Parquet Data Files from HDFS into databases.
  */
-@RunWith(JUnit4.class)
 public class TestParquetExport extends ExportJobTestCase {
 
   @Rule
@@ -453,9 +452,5 @@
     runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestParquetExport.class);
-  }
 
 }
diff --git a/src/test/com/cloudera/sqoop/TestParquetImport.java b/src/test/com/cloudera/sqoop/TestParquetImport.java
index 2a6dd6c..4ff025b 100644
--- a/src/test/com/cloudera/sqoop/TestParquetImport.java
+++ b/src/test/com/cloudera/sqoop/TestParquetImport.java
@@ -30,6 +30,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
+import org.junit.Test;
 import org.kitesdk.data.CompressionType;
 import org.kitesdk.data.Dataset;
 import org.kitesdk.data.DatasetReader;
@@ -42,6 +43,13 @@
 import java.util.Arrays;
 import java.util.List;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Tests --as-parquetfile.
  */
@@ -106,10 +114,12 @@
     return args.toArray(new String[args.size()]);
   }
 
+  @Test
   public void testSnappyCompression() throws IOException {
     runParquetImportTest("snappy");
   }
 
+  @Test
   public void testDeflateCompression() throws IOException {
     runParquetImportTest("deflate");
   }
@@ -158,6 +168,7 @@
     }
   }
 
+  @Test
   public void testOverrideTypeMapping() throws IOException {
     String [] types = { "INT" };
     String [] vals = { "10" };
@@ -183,6 +194,7 @@
     }
   }
 
+  @Test
   public void testFirstUnderscoreInColumnName() throws IOException {
     String [] names = { "_NAME" };
     String [] types = { "INT" };
@@ -208,6 +220,7 @@
     }
   }
 
+  @Test
   public void testNonIdentCharactersInColumnName() throws IOException {
     String [] names = { "test_p-a+r/quet" };
     String [] types = { "INT" };
@@ -233,6 +246,7 @@
     }
   }
 
+  @Test
   public void testNullableParquetImport() throws IOException, SQLException {
     String [] types = { "INT" };
     String [] vals = { null };
@@ -251,6 +265,7 @@
     }
   }
 
+  @Test
   public void testQueryImport() throws IOException, SQLException {
     String [] types = { "INT" };
     String [] vals = { "1" };
@@ -269,6 +284,7 @@
     }
   }
 
+  @Test
   public void testIncrementalParquetImport() throws IOException, SQLException {
     String [] types = { "INT" };
     String [] vals = { "1" };
@@ -290,6 +306,7 @@
     }
   }
 
+  @Test
   public void testOverwriteParquetDatasetFail() throws IOException, SQLException {
     String [] types = { "INT" };
     String [] vals = {};
diff --git a/src/test/com/cloudera/sqoop/TestQuery.java b/src/test/com/cloudera/sqoop/TestQuery.java
index 04d6ec4..0836b8d 100644
--- a/src/test/com/cloudera/sqoop/TestQuery.java
+++ b/src/test/com/cloudera/sqoop/TestQuery.java
@@ -35,6 +35,10 @@
 import com.cloudera.sqoop.testutil.SeqFileReader;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --query works in Sqoop.
@@ -161,18 +165,21 @@
     }
   }
 
+  @Test
   public void testSelectStar() throws IOException {
     runQueryTest("SELECT * FROM " + getTableName()
         + " WHERE INTFIELD2 > 4 AND $CONDITIONS",
         "1,8\n", 2, 4, getTablePath().toString());
   }
 
+  @Test
   public void testCompoundWhere() throws IOException {
     runQueryTest("SELECT * FROM " + getTableName()
         + " WHERE INTFIELD1 > 4 AND INTFIELD2 < 3 AND $CONDITIONS",
         "7,2\n", 1, 7, getTablePath().toString());
   }
 
+  @Test
   public void testFailNoConditions() throws IOException {
     String [] argv = getArgv(true, "SELECT * FROM " + getTableName(),
         getTablePath().toString(), true);
diff --git a/src/test/com/cloudera/sqoop/TestSplitBy.java b/src/test/com/cloudera/sqoop/TestSplitBy.java
index 8ab1eef..c13fbcc 100644
--- a/src/test/com/cloudera/sqoop/TestSplitBy.java
+++ b/src/test/com/cloudera/sqoop/TestSplitBy.java
@@ -35,6 +35,10 @@
 import com.cloudera.sqoop.testutil.SeqFileReader;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --split-by works.
@@ -148,11 +152,13 @@
     }
   }
 
+  @Test
   public void testSplitByFirstCol() throws IOException {
     String splitByCol = "INTFIELD1";
     runSplitByTest(splitByCol, HsqldbTestServer.getFirstColSum());
   }
 
+  @Test
   public void testSplitBySecondCol() throws IOException {
     String splitByCol = "INTFIELD2";
     runSplitByTest(splitByCol, HsqldbTestServer.getFirstColSum());
diff --git a/src/test/com/cloudera/sqoop/TestSqoopOptions.java b/src/test/com/cloudera/sqoop/TestSqoopOptions.java
index 84dc7b0..7d34d33 100644
--- a/src/test/com/cloudera/sqoop/TestSqoopOptions.java
+++ b/src/test/com/cloudera/sqoop/TestSqoopOptions.java
@@ -21,8 +21,6 @@
 import java.util.Properties;
 
 import com.cloudera.sqoop.tool.BaseSqoopTool;
-import junit.framework.JUnit4TestAdapter;
-import junit.framework.TestCase;
 
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.sqoop.manager.oracle.OracleUtils;
@@ -41,13 +39,16 @@
 import static org.apache.sqoop.Sqoop.SQOOP_RETHROW_PROPERTY;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Test aspects of the SqoopOptions class.
  */
-@RunWith(JUnit4.class)
-public class TestSqoopOptions extends TestCase {
+public class TestSqoopOptions {
 
   private Properties originalSystemProperties;
 
@@ -766,8 +767,4 @@
     validateImportOptions(extraArgs);
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestSqoopOptions.class);
-  }
 }
diff --git a/src/test/com/cloudera/sqoop/TestTargetDir.java b/src/test/com/cloudera/sqoop/TestTargetDir.java
index 4816e70..ded5392 100644
--- a/src/test/com/cloudera/sqoop/TestTargetDir.java
+++ b/src/test/com/cloudera/sqoop/TestTargetDir.java
@@ -21,7 +21,6 @@
 import java.io.IOException;
 import java.util.ArrayList;
 
-import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -33,16 +32,19 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
+
 
 /**
  * Test that --target-dir works.
  */
-@RunWith(JUnit4.class)
 public class TestTargetDir extends ImportJobTestCase {
 
   public static final Log LOG = LogFactory
@@ -149,8 +151,4 @@
     runImport(argv);
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestTargetDir.class);
-  }
 }
diff --git a/src/test/com/cloudera/sqoop/TestWhere.java b/src/test/com/cloudera/sqoop/TestWhere.java
index c989d20..340be9c 100644
--- a/src/test/com/cloudera/sqoop/TestWhere.java
+++ b/src/test/com/cloudera/sqoop/TestWhere.java
@@ -35,6 +35,10 @@
 import com.cloudera.sqoop.testutil.SeqFileReader;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --where works in Sqoop.
@@ -165,11 +169,13 @@
     }
   }
 
+  @Test
   public void testSingleClauseWhere() throws IOException {
     String whereClause = "INTFIELD2 > 4";
     runWhereTest(whereClause, "1,8\n", 2, 4);
   }
 
+  @Test
   public void testMultiClauseWhere() throws IOException {
     String whereClause = "INTFIELD1 > 4 AND INTFIELD2 < 3";
     runWhereTest(whereClause, "7,2\n", 1, 7);
diff --git a/src/test/com/cloudera/sqoop/ThirdPartyTests.java b/src/test/com/cloudera/sqoop/ThirdPartyTests.java
deleted file mode 100644
index 7e10c68..0000000
--- a/src/test/com/cloudera/sqoop/ThirdPartyTests.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import com.cloudera.sqoop.hbase.HBaseImportAddRowKeyTest;
-import com.cloudera.sqoop.hbase.HBaseImportNullTest;
-import com.cloudera.sqoop.hbase.HBaseImportTypesTest;
-import com.cloudera.sqoop.manager.DB2ManagerImportManualTest;
-
-import org.apache.sqoop.hcat.HCatalogExportTest;
-import org.apache.sqoop.hcat.HCatalogImportTest;
-
-import com.cloudera.sqoop.hbase.HBaseImportTest;
-import com.cloudera.sqoop.hbase.HBaseQueryImportTest;
-import com.cloudera.sqoop.hbase.HBaseUtilTest;
-
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-
-import com.cloudera.sqoop.manager.CubridManagerExportTest;
-import com.cloudera.sqoop.manager.CubridManagerImportTest;
-import com.cloudera.sqoop.manager.DirectMySQLTest;
-import com.cloudera.sqoop.manager.DirectMySQLExportTest;
-import com.cloudera.sqoop.manager.JdbcMySQLExportTest;
-import com.cloudera.sqoop.manager.MySQLAuthTest;
-import com.cloudera.sqoop.manager.MySQLCompatTest;
-import com.cloudera.sqoop.manager.OracleExportTest;
-import com.cloudera.sqoop.manager.OracleManagerTest;
-import com.cloudera.sqoop.manager.OracleCompatTest;
-import com.cloudera.sqoop.manager.PostgresqlExportTest;
-import com.cloudera.sqoop.manager.PostgresqlImportTest;
-
-import org.apache.sqoop.manager.cubrid.CubridAuthTest;
-import org.apache.sqoop.manager.cubrid.CubridCompatTest;
-import org.apache.sqoop.manager.mysql.MySqlCallExportTest;
-import org.apache.sqoop.manager.mysql.MySqlColumnEscapeImportTest;
-import org.apache.sqoop.manager.netezza.DirectNetezzaExportManualTest;
-import org.apache.sqoop.manager.netezza.DirectNetezzaHCatExportManualTest;
-import org.apache.sqoop.manager.netezza.DirectNetezzaHCatImportManualTest;
-import org.apache.sqoop.manager.netezza.NetezzaExportManualTest;
-import org.apache.sqoop.manager.netezza.NetezzaImportManualTest;
-import org.apache.sqoop.manager.oracle.OraOopDataDrivenDBInputFormatConnectionCloseTest;
-import org.apache.sqoop.manager.oracle.OracleCallExportTest;
-import org.apache.sqoop.manager.oracle.OracleColumnEscapeImportTest;
-import org.apache.sqoop.manager.oracle.OracleIncrementalImportTest;
-import org.apache.sqoop.manager.oracle.OracleSplitterTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeExportDelimitedFileManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeExportSequenceFileManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeImportDelimitedFileManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerDatatypeImportSequenceFileManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerHiveImportManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerManagerManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerMultiColsManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerMultiMapsManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerParseMethodsManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerQueryManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerSplitByManualTest;
-import org.apache.sqoop.manager.sqlserver.SQLServerWhereManualTest;
-
-/**
- * Test battery including all tests of vendor-specific ConnManager
- * implementations.  These tests likely aren't run by Apache Hudson, because
- * they require configuring and using Oracle, MySQL, etc., which may have
- * incompatible licenses with Apache.
- */
-public final class ThirdPartyTests extends TestCase {
-
-  private ThirdPartyTests() { }
-
-  public static Test suite() {
-    TestSuite suite = new TestSuite("Tests vendor-specific ConnManager "
-      + "implementations in Sqoop and tests with third party dependencies");
-
-    // MySQL
-    suite.addTestSuite(DirectMySQLTest.class);
-    suite.addTestSuite(DirectMySQLExportTest.class);
-    suite.addTestSuite(JdbcMySQLExportTest.class);
-    suite.addTestSuite(MySQLAuthTest.class);
-    suite.addTestSuite(MySQLCompatTest.class);
-    suite.addTestSuite(MySqlColumnEscapeImportTest.class);
-
-    // Oracle
-    suite.addTestSuite(OracleExportTest.class);
-    suite.addTestSuite(OracleManagerTest.class);
-    suite.addTestSuite(OracleCompatTest.class);
-    suite.addTestSuite(OracleIncrementalImportTest.class);
-    suite.addTestSuite(OracleSplitterTest.class);
-    suite.addTestSuite(OraOopDataDrivenDBInputFormatConnectionCloseTest.class);
-    suite.addTestSuite(OracleColumnEscapeImportTest.class);
-
-    // SQL Server
-    suite.addTestSuite(SQLServerDatatypeExportDelimitedFileManualTest.class);
-    suite.addTestSuite(SQLServerDatatypeExportSequenceFileManualTest.class);
-    suite.addTestSuite(SQLServerDatatypeImportDelimitedFileManualTest.class);
-    suite.addTestSuite(SQLServerDatatypeImportSequenceFileManualTest.class);
-    suite.addTestSuite(SQLServerHiveImportManualTest.class);
-    suite.addTestSuite(SQLServerManagerManualTest.class);
-    suite.addTestSuite(SQLServerMultiColsManualTest.class);
-    suite.addTestSuite(SQLServerMultiMapsManualTest.class);
-    suite.addTestSuite(SQLServerParseMethodsManualTest.class);
-    suite.addTestSuite(SQLServerQueryManualTest.class);
-    suite.addTestSuite(SQLServerSplitByManualTest.class);
-    suite.addTestSuite(SQLServerWhereManualTest.class);
-
-    // PostgreSQL
-    suite.addTestSuite(PostgresqlImportTest.class);
-    suite.addTestSuite(PostgresqlExportTest.class);
-
-    // Cubrid
-    suite.addTestSuite(CubridManagerImportTest.class);
-    suite.addTestSuite(CubridManagerExportTest.class);
-    suite.addTestSuite(CubridAuthTest.class);
-    suite.addTestSuite(CubridCompatTest.class);
-
-    // DB2
-    suite.addTestSuite(DB2ManagerImportManualTest.class);
-
-    // Hbase
-    suite.addTestSuite(HBaseImportTest.class);
-    suite.addTestSuite(HBaseImportAddRowKeyTest.class);
-    suite.addTestSuite(HBaseImportNullTest.class);
-    suite.addTestSuite(HBaseImportTypesTest.class);
-    suite.addTestSuite(HBaseQueryImportTest.class);
-    suite.addTestSuite(HBaseUtilTest.class);
-
-    // HCatalog
-    suite.addTestSuite(HCatalogImportTest.class);
-    suite.addTestSuite(HCatalogExportTest.class);
-
-    // Call Export tests
-    suite.addTestSuite(MySqlCallExportTest.class);
-    suite.addTestSuite(OracleCallExportTest.class);
-
-    // Netezza
-    suite.addTestSuite(NetezzaExportManualTest.class);
-    suite.addTestSuite(NetezzaImportManualTest.class);
-    suite.addTestSuite(DirectNetezzaExportManualTest.class);
-    suite.addTestSuite(DirectNetezzaHCatExportManualTest.class);
-    suite.addTestSuite(DirectNetezzaHCatImportManualTest.class);
-
-    return suite;
-  }
-
-}
-
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java b/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
index abf9f1c..fd00498 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
@@ -18,7 +18,6 @@
 
 package com.cloudera.sqoop.hbase;
 
-import junit.framework.JUnit4TestAdapter;
 import org.apache.commons.lang.StringUtils;
 import org.junit.Before;
 import org.junit.Test;
@@ -136,8 +135,4 @@
     return result.toArray(new String[result.size()]);
   }
 
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(HBaseImportAddRowKeyTest.class);
-  }
-
 }
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java b/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
index 58566fc..dd88fe7 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
@@ -22,6 +22,8 @@
 
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  *
  */
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java b/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
index a5e1bea..fa14a01 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
@@ -22,6 +22,8 @@
 
 import org.junit.Test;
 
+import static org.junit.Assert.fail;
+
 /**
  * Test imports of tables into HBase.
  */
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java b/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
index 28b9026..d71d4e3 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
@@ -22,6 +22,8 @@
 
 import org.junit.Test;
 
+import static org.junit.Assert.fail;
+
 /**
  * Test import of free-form query into HBase.
  */
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
index ad92a07..a054eb6 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
@@ -52,6 +52,10 @@
 import java.util.UUID;
 import org.apache.commons.io.FileUtils;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
 /**
  * Utility methods that facilitate HBase import tests.
  */
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java b/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
index 166eef3..4201139 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
@@ -20,13 +20,15 @@
 
 import org.junit.Test;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 
 /**
  * This tests to verify that HBase is present (default when running test cases)
  * and that when in fake not present mode, the method return false.
  */
-public class HBaseUtilTest extends TestCase {
+public class HBaseUtilTest {
 
   @Test
   public void testHBasePresent() {
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
index 1253e8d..1d67a2d 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
@@ -27,7 +27,7 @@
 import java.util.List;
 
 import com.cloudera.sqoop.Sqoop;
-import junit.framework.JUnit4TestAdapter;
+
 import org.apache.avro.Schema;
 import org.apache.avro.SchemaBuilder;
 import org.apache.avro.generic.GenericRecord;
@@ -54,18 +54,21 @@
 import com.cloudera.sqoop.tool.SqoopTool;
 import org.apache.commons.cli.ParseException;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 import org.kitesdk.data.Dataset;
 import org.kitesdk.data.DatasetReader;
 import org.kitesdk.data.Datasets;
 import org.kitesdk.data.Formats;
-import org.kitesdk.data.spi.DefaultConfiguration;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Test HiveImport capability after an import to HDFS.
  */
-@RunWith(JUnit4.class)
+
 public class TestHiveImport extends ImportJobTestCase {
 
   public static final Log LOG = LogFactory.getLog(
@@ -273,7 +276,6 @@
     assertFalse("Import actually happened!", fs.exists(hiveImportPath));
   }
 
-
   /** Test that strings and ints are handled in the normal fashion. */
   @Test
   public void testNormalHiveImport() throws IOException {
@@ -710,8 +712,4 @@
         getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestHiveImport.class);
-  }
 }
diff --git a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
index 8d6b9d5..4db629f 100644
--- a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
+++ b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
@@ -20,9 +20,6 @@
 
 import java.util.Map;
 
-import junit.framework.JUnit4TestAdapter;
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -31,19 +28,23 @@
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
+
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
 import java.sql.Types;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+
 /**
  * Test Hive DDL statement generation.
  */
-@RunWith(JUnit4.class)
-public class TestTableDefWriter extends TestCase {
+public class TestTableDefWriter {
 
   public static final Log LOG = LogFactory.getLog(
       TestTableDefWriter.class.getName());
@@ -246,8 +247,4 @@
     assertTrue(createTable.contains("`db`.`outputTable`"));
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestTableDefWriter.class);
-  }
 }
diff --git a/src/test/com/cloudera/sqoop/io/TestCodecMap.java b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
index 867f1d5..982b6ad 100644
--- a/src/test/com/cloudera/sqoop/io/TestCodecMap.java
+++ b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
@@ -20,23 +20,24 @@
 
 import java.io.IOException;
 
-import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 
-import junit.framework.TestCase;
-import org.junit.Rule;
 import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Rule;
+
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
 /**
  * Test looking up codecs by name.
  */
-@RunWith(JUnit4.class)
-public class TestCodecMap extends TestCase {
+public class TestCodecMap  {
+
 
   @Rule
   public ExpectedException thrown = ExpectedException.none();
@@ -85,8 +86,4 @@
     CodecMap.getCodec("bogus", new Configuration());
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestCodecMap.class);
-  }
 }
diff --git a/src/test/com/cloudera/sqoop/io/TestLobFile.java b/src/test/com/cloudera/sqoop/io/TestLobFile.java
index d056750..7f8ca6d 100644
--- a/src/test/com/cloudera/sqoop/io/TestLobFile.java
+++ b/src/test/com/cloudera/sqoop/io/TestLobFile.java
@@ -27,29 +27,29 @@
 import java.io.Writer;
 import java.nio.CharBuffer;
 
-import junit.framework.JUnit4TestAdapter;
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.junit.Before;
-import org.junit.Rule;
+
 import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Rule;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
 /**
  * Test the LobFile reader/writer implementation.
  */
-@RunWith(JUnit4.class)
-public class TestLobFile extends TestCase {
+public class TestLobFile {
 
   public static final Log LOG = LogFactory.getLog(
-        TestLobFile.class.getName());
+      TestLobFile.class.getName());
 
   public static final Path TEMP_BASE_DIR;
 
@@ -77,14 +77,14 @@
     fs.mkdirs(TEMP_BASE_DIR);
   }
 
-  private long [] writeClobFile(Path p, String codec,
-      String... records) throws Exception {
+  private long[] writeClobFile(Path p, String codec,
+                               String... records) throws Exception {
     if (fs.exists(p)) {
       fs.delete(p, false);
     }
 
     // memorize the offsets of each record we write.
-    long [] offsets = new long[records.length];
+    long[] offsets = new long[records.length];
 
     // Create files with four entries per index segment.
     LobFile.Writer writer = LobFile.create(p, conf, true, codec, 4);
@@ -130,7 +130,7 @@
 
       LOG.info("Got record of " + bytesRead + " chars");
       assertEquals(expected.length(), bytesRead);
-      char [] charData = buf.array();
+      char[] charData = buf.array();
       String finalRecord = new String(charData);
       assertEquals(expected, finalRecord);
 
@@ -150,7 +150,7 @@
   }
 
   private void runClobFileTest(Path p, String codec,
-      String... records) throws Exception {
+                               String... records) throws Exception {
     writeClobFile(p, codec, records);
     verifyClobFile(p, records);
     fs.delete(p, false);
@@ -194,12 +194,13 @@
    * but then read the second record completely. Verify that we
    * can re-align on a record boundary correctly. This test requires
    * at least 3 records.
-   * @param p the path to the file to create.
+   *
+   * @param p         the path to the file to create.
    * @param firstLine the first line of the first reord
-   * @param records All of the records to write to the file.
+   * @param records   All of the records to write to the file.
    */
   private void runLineAndRecordTest(Path p, String firstLine,
-      String... records) throws Exception {
+                                    String... records) throws Exception {
 
     assertTrue("This test requires 3+ records", records.length > 2);
 
@@ -234,7 +235,7 @@
     CharBuffer buf = CharBuffer.allocate(records[1].length());
     r.read(buf);
     r.close();
-    char [] chars = buf.array();
+    char[] chars = buf.array();
     String s = new String(chars);
     assertEquals(records[1], s);
 
@@ -285,10 +286,10 @@
     // Seek past the first two records and read the third.
 
     Path p = new Path(TEMP_BASE_DIR, "seek.lob");
-    String [] records = {
-      "this is the first record!",
-      "here comes record number two. It is a bit longer.",
-      "this is the third record. we can read it.",
+    String[] records = {
+        "this is the first record!",
+        "here comes record number two. It is a bit longer.",
+        "this is the third record. we can read it.",
     };
 
     // Write the file and memorize when the third record starts.
@@ -321,7 +322,7 @@
     CharBuffer buf = CharBuffer.allocate(records[2].length());
     r.read(buf);
     r.close();
-    char [] chars = buf.array();
+    char[] chars = buf.array();
     String s = new String(chars);
     assertEquals(records[2], s);
 
@@ -330,9 +331,11 @@
   }
 
 
-  /** Verifies that the next record in the LobFile is the expected one. */
+  /**
+   * Verifies that the next record in the LobFile is the expected one.
+   */
   private void verifyNextRecord(LobFile.Reader reader, long expectedId,
-      String expectedRecord) throws Exception {
+                                String expectedRecord) throws Exception {
     assertTrue(reader.next());
     assertTrue(reader.isRecordAvailable());
     assertEquals(expectedId, reader.getRecordId());
@@ -352,7 +355,7 @@
     LOG.info("Got record of " + bytesRead + " chars");
     assertEquals(expectedRecord.length(), bytesRead);
 
-    char [] charData = buf.array();
+    char[] charData = buf.array();
     String finalRecord = new String(charData);
     assertEquals(expectedRecord, finalRecord);
   }
@@ -363,20 +366,20 @@
 
     Path p = new Path(TEMP_BASE_DIR, "manyseeks.lob");
 
-    String [] records = {
-      "first record",
-      "second record",
-      "the third record",
-      "rec4 is the last in IndexSeg 0",
-      "rec5 is first in IndexSeg 1",
-      "rec6 is yet another record",
-      "rec7 is starting to feel boring",
-      "rec8 is at the end of seg 1",
-      "rec9 is all by itself in seg 2",
+    String[] records = {
+        "first record",
+        "second record",
+        "the third record",
+        "rec4 is the last in IndexSeg 0",
+        "rec5 is first in IndexSeg 1",
+        "rec6 is yet another record",
+        "rec7 is starting to feel boring",
+        "rec8 is at the end of seg 1",
+        "rec9 is all by itself in seg 2",
     };
 
     // Write the records to a file, save their offsets.
-    long [] offsets = writeClobFile(p, null, records);
+    long[] offsets = writeClobFile(p, null, records);
 
     // Sanity check that we can stream the file.
     verifyClobFile(p, records);
@@ -453,16 +456,17 @@
    * as many bytes as we expect, and that the bytes are what we
    * expect them to be. Assumes that the bytes are such that
    * input[i] == i + offset.
-   * @param reader the LobFile reader to consume data from
+   *
+   * @param reader              the LobFile reader to consume data from
    * @param expectedDeclaredLen the size we expect the LobFile to declare
-   * its record length as.
-   * @param expectedActualLen the true number of bytes we expect to read in
-   * the record.
-   * @param offset the offset amount for each of the elements of the array.
+   *                            its record length as.
+   * @param expectedActualLen   the true number of bytes we expect to read in
+   *                            the record.
+   * @param offset              the offset amount for each of the elements of the array.
    */
   private void verifyBlobRecord(LobFile.Reader reader,
-      long expectedDeclaredLen, long expectedActualLen,
-      int offset) throws Exception {
+                                long expectedDeclaredLen, long expectedActualLen,
+                                int offset) throws Exception {
 
     assertTrue(reader.next());
     assertTrue(reader.isRecordAvailable());
@@ -470,7 +474,7 @@
 
     InputStream is = reader.readBlobRecord();
 
-    byte [] bytes = new byte[(int) expectedActualLen];
+    byte[] bytes = new byte[(int) expectedActualLen];
     int numRead = is.read(bytes);
     assertEquals(expectedActualLen, numRead);
 
@@ -486,13 +490,14 @@
    * of the record to disagree with the actual length (the actual length
    * should be &gt;= the declared length).
    * The record written will have values v[i] = i + offset.
-   * @param writer the LobFile writer to put the record into
+   *
+   * @param writer      the LobFile writer to put the record into
    * @param declaredLen the length value written into the file itself
-   * @param actualLen the true number of bytes to write
-   * @param offset an amount to adjust each record's byte values by.
+   * @param actualLen   the true number of bytes to write
+   * @param offset      an amount to adjust each record's byte values by.
    */
   private void writeBlobRecord(LobFile.Writer writer, long declaredLen,
-      long actualLen, int offset) throws Exception {
+                               long actualLen, int offset) throws Exception {
     OutputStream os = writer.writeBlobRecord(declaredLen);
     for (int i = 0; i < actualLen; i++) {
       os.write(i + offset);
@@ -505,13 +510,14 @@
   /**
    * Verifies a number of records that all have the same declared
    * and actual record lengths.
-   * @param p the path to the LobFile to open
-   * @param numRecords the number of records to expect
+   *
+   * @param p           the path to the LobFile to open
+   * @param numRecords  the number of records to expect
    * @param declaredLen the declared length of each record in the file
-   * @param actualLen the true number of bytes we expect to read per record.
+   * @param actualLen   the true number of bytes we expect to read per record.
    */
   private void verifyBlobRecords(Path p, int numRecords,
-      long declaredLen, long actualLen) throws Exception {
+                                 long declaredLen, long actualLen) throws Exception {
 
     LobFile.Reader reader = LobFile.open(p, conf);
     for (int i = 0; i < numRecords; i++) {
@@ -565,10 +571,10 @@
   private void runCompressedTest(String codec) throws Exception {
     LOG.info("Testing with codec: " + codec);
     Path p = new Path(TEMP_BASE_DIR, "compressed-" + codec + ".lob");
-    String [] records = {
-      "this is the first record, It should be compressed a lot!",
-      "record 2 record 2 record 2 record 2 2 2 2 2 2 2 2 2 2 2 2",
-      "and a third and a third yes this is the third",
+    String[] records = {
+        "this is the first record, It should be compressed a lot!",
+        "record 2 record 2 record 2 record 2 2 2 2 2 2 2 2 2 2 2 2",
+        "and a third and a third yes this is the third",
     };
 
     runClobFileTest(p, codec, records);
@@ -587,9 +593,5 @@
     runCompressedTest(CodecMap.LZO);
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestLobFile.class);
-  }
 }
 
diff --git a/src/test/com/cloudera/sqoop/io/TestNamedFifo.java b/src/test/com/cloudera/sqoop/io/TestNamedFifo.java
index 40f9b3b..b11bbee 100644
--- a/src/test/com/cloudera/sqoop/io/TestNamedFifo.java
+++ b/src/test/com/cloudera/sqoop/io/TestNamedFifo.java
@@ -29,19 +29,22 @@
 
 import org.apache.hadoop.util.StringUtils;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.Shell;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 /**
  * Test the named fifo utility.
  */
-public class TestNamedFifo extends TestCase {
+public class TestNamedFifo {
 
   public static final Log LOG = LogFactory.getLog(
         TestNamedFifo.class.getName());
@@ -60,6 +63,7 @@
   private Configuration conf;
   private FileSystem fs;
 
+  @Before
   public void setUp() throws Exception {
     conf = new Configuration();
     conf.set("fs.default.name", "file:///");
@@ -157,6 +161,7 @@
     }
   }
 
+  @Test
   public void testNamedFifo() throws Exception {
 
     if (Shell.WINDOWS) {
diff --git a/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java b/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java
index c00b6b3..8b2b1e5 100644
--- a/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java
+++ b/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java
@@ -37,12 +37,20 @@
 
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Test that the splittable buffered writer system works.
  */
-public class TestSplittableBufferedWriter extends TestCase {
+public class TestSplittableBufferedWriter {
 
   public static final Log LOG = LogFactory.getLog(
       TestSplittableBufferedWriter.class.getName());
@@ -84,6 +92,7 @@
     }
   }
 
+  @Before
   public void setUp() throws IOException {
     ensureEmptyWriteDir();
   }
@@ -130,6 +139,7 @@
     assertFalse("File found: " + p + " and we did not expect it", fs.exists(p));
   }
 
+  @Test
   public void testNonSplittingTextFile() throws IOException {
     SplittingOutputStream os  = new SplittingOutputStream(getConf(),
         getWritePath(), "nonsplit-", 0, null);
@@ -177,6 +187,7 @@
     }
   }
 
+  @Test
   public void testNonSplittingGzipFile() throws IOException {
     SplittingOutputStream os  = new SplittingOutputStream(getConf(),
         getWritePath(), "nonsplit-", 0, new GzipCodec());
@@ -207,6 +218,7 @@
         "nonsplit-00000.gz"))), expectedLines);
   }
 
+  @Test
   public void testSplittingTextFile() throws IOException {
     SplittingOutputStream os  = new SplittingOutputStream(getConf(),
         getWritePath(), "split-", 10, null);
@@ -267,6 +279,7 @@
     }
   }
 
+  @Test
   public void testSplittingGzipFile() throws IOException {
     SplittingOutputStream os = new SplittingOutputStream(getConf(),
         getWritePath(), "splitz-", 3, new GzipCodec());
diff --git a/src/test/com/cloudera/sqoop/lib/TestBlobRef.java b/src/test/com/cloudera/sqoop/lib/TestBlobRef.java
index d19b769..0d010b0 100644
--- a/src/test/com/cloudera/sqoop/lib/TestBlobRef.java
+++ b/src/test/com/cloudera/sqoop/lib/TestBlobRef.java
@@ -22,30 +22,38 @@
 
 import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
 import com.cloudera.sqoop.testutil.CommonArgs;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import com.cloudera.sqoop.io.LobFile;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test that the BlobRef.parse() method does the right thing.
  * Note that we don't support inline parsing here; we only expect this to
  * really work for external BLOBs.
  */
-public class TestBlobRef extends TestCase {
+public class TestBlobRef {
 
+  @Test
   public void testEmptyStr() {
     BlobRef r = BlobRef.parse("");
     assertFalse(r.isExternal());
   }
 
+  @Test
   public void testInline() throws IOException {
     BlobRef r = BlobRef.parse("foo");
     assertFalse(r.isExternal());
   }
 
+  @Test
   public void testEmptyFile() {
     BlobRef r = BlobRef.parse("externalLob()");
     assertFalse(r.isExternal());
@@ -55,6 +63,7 @@
     assertEquals("externalLob(lf,,0,0)", r.toString());
   }
 
+  @Test
   public void testInlineNearMatch() {
     BlobRef r = BlobRef.parse("externalLob(foo)bar");
     assertFalse(r.isExternal());
@@ -69,6 +78,7 @@
     assertFalse(r.isExternal());
   }
 
+  @Test
   public void testExternal() throws IOException {
     final byte [] DATA = { 1, 2, 3, 4, 5 };
     final String FILENAME = "blobdata";
@@ -76,6 +86,7 @@
     doExternalTest(DATA, FILENAME);
   }
 
+  @Test
   public void testExternalSubdir() throws IOException {
     final byte [] DATA = { 1, 2, 3, 4, 5 };
     final String FILENAME = "_lob/blobdata";
diff --git a/src/test/com/cloudera/sqoop/lib/TestBooleanParser.java b/src/test/com/cloudera/sqoop/lib/TestBooleanParser.java
index 7449c39..c1ab7e5 100644
--- a/src/test/com/cloudera/sqoop/lib/TestBooleanParser.java
+++ b/src/test/com/cloudera/sqoop/lib/TestBooleanParser.java
@@ -18,12 +18,18 @@
 
 package com.cloudera.sqoop.lib;
 
-import junit.framework.TestCase;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test the boolean parser.
  */
-public class TestBooleanParser extends TestCase {
+public class TestBooleanParser {
+
+  @Test
   public void testBoolParser() {
     assertTrue(BooleanParser.valueOf("true"));
     assertTrue(BooleanParser.valueOf("TRUE"));
diff --git a/src/test/com/cloudera/sqoop/lib/TestClobRef.java b/src/test/com/cloudera/sqoop/lib/TestClobRef.java
index 7e961c0..7efc3b6 100644
--- a/src/test/com/cloudera/sqoop/lib/TestClobRef.java
+++ b/src/test/com/cloudera/sqoop/lib/TestClobRef.java
@@ -22,24 +22,31 @@
 
 import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
 import com.cloudera.sqoop.testutil.CommonArgs;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import com.cloudera.sqoop.io.LobFile;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test parsing of ClobRef objects.
  */
-public class TestClobRef extends TestCase {
+public class TestClobRef {
 
+  @Test
   public void testEmptyStr() {
     ClobRef r = ClobRef.parse("");
     assertFalse(r.isExternal());
     assertEquals("", r.toString());
   }
 
+  @Test
   public void testInline() throws IOException {
     ClobRef r = ClobRef.parse("foo");
     assertFalse(r.isExternal());
@@ -55,6 +62,7 @@
     assertEquals("foo", str);
   }
 
+  @Test
   public void testEmptyFile() {
     ClobRef r = ClobRef.parse("externalLob()");
     assertFalse(r.isExternal());
@@ -65,6 +73,7 @@
     assertEquals("externalLob(lf,,0,0)", r.toString());
   }
 
+  @Test
   public void testInlineNearMatch() {
     ClobRef r = ClobRef.parse("externalLob(foo)bar");
     assertFalse(r.isExternal());
@@ -83,6 +92,7 @@
     assertEquals("externalLob(lf,foo,1,2)x", r.getData());
   }
 
+  @Test
   public void testExternal() throws IOException {
     final String DATA = "This is the clob data!";
     final String FILENAME = "clobdata";
@@ -90,6 +100,7 @@
     doExternalTest(DATA, FILENAME);
   }
 
+  @Test
   public void testExternalSubdir() throws IOException {
     final String DATA = "This is the clob data!";
     final String FILENAME = "_lob/clobdata";
diff --git a/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java b/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java
index 1d2d29a..1fc9cd2 100644
--- a/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java
+++ b/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java
@@ -18,14 +18,18 @@
 
 package com.cloudera.sqoop.lib;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 
 /**
  * Test that the field formatter works in a variety of configurations.
  */
-public class TestFieldFormatter extends TestCase {
+public class TestFieldFormatter {
 
+  @Test
   public void testAllEmpty() {
     String result = FieldFormatter.escapeAndEnclose("",
         new DelimiterSet(DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR,
@@ -33,41 +37,48 @@
     assertEquals("", result);
   }
 
+  @Test
   public void testNullArgs() {
     assertNull(FieldFormatter.escapeAndEnclose(null,
       new DelimiterSet('\"', DelimiterSet.NULL_CHAR, '\"', '\\', false)));
   }
 
+  @Test
   public void testBasicStr() {
     String result = FieldFormatter.escapeAndEnclose("foo",
         DelimiterSet.DEFAULT_DELIMITERS);
     assertEquals("foo", result);
   }
 
+  @Test
   public void testEscapeSlash() {
     String result = FieldFormatter.escapeAndEnclose("foo\\bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
     assertEquals("foo\\\\bar", result);
   }
 
+  @Test
   public void testMustEnclose() {
     String result = FieldFormatter.escapeAndEnclose("foo",
         new DelimiterSet(',', '\n', '\"', DelimiterSet.NULL_CHAR, true));
     assertEquals("\"foo\"", result);
   }
 
+  @Test
   public void testEncloseComma1() {
     String result = FieldFormatter.escapeAndEnclose("foo,bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
     assertEquals("\"foo,bar\"", result);
   }
 
+  @Test
   public void testEncloseComma2() {
     String result = FieldFormatter.escapeAndEnclose("foo,bar",
         new DelimiterSet(',', ',', '\"', '\\', false));
     assertEquals("\"foo,bar\"", result);
   }
 
+  @Test
   public void testNoNeedToEnclose() {
     String result = FieldFormatter.escapeAndEnclose(
         "just another string",
@@ -75,6 +86,7 @@
     assertEquals("just another string", result);
   }
 
+  @Test
   public void testCannotEnclose() {
     // Can't enclose because encloser is nul.
     // This should escape the comma instead.
@@ -84,6 +96,7 @@
     assertEquals("foo\\,bar", result);
   }
 
+  @Test
   public void testEmptyCharToEscapeString() {
     // test what happens when the escape char is null. It should encode the
     // null char.
@@ -93,36 +106,42 @@
     assertEquals("\000", s);
   }
 
+  @Test
   public void testEscapeCentralQuote() {
     String result = FieldFormatter.escapeAndEnclose("foo\"bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
     assertEquals("foo\\\"bar", result);
   }
 
+  @Test
   public void testEscapeMultiCentralQuote() {
     String result = FieldFormatter.escapeAndEnclose("foo\"\"bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
     assertEquals("foo\\\"\\\"bar", result);
   }
 
+  @Test
   public void testDoubleEscape() {
     String result = FieldFormatter.escapeAndEnclose("foo\\\"bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
     assertEquals("foo\\\\\\\"bar", result);
   }
 
+  @Test
   public void testReverseEscape() {
     String result = FieldFormatter.escapeAndEnclose("foo\"\\bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
     assertEquals("foo\\\"\\\\bar", result);
   }
 
+  @Test
   public void testQuotedEncloser() {
     String result = FieldFormatter.escapeAndEnclose("foo\",bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
     assertEquals("\"foo\\\",bar\"", result);
   }
 
+  @Test
   public void testQuotedEscape() {
     String result = FieldFormatter.escapeAndEnclose("foo\\,bar",
         new DelimiterSet(',', '\n', '\"', '\\', false));
diff --git a/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java b/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java
index e0ca67c..c55f14b 100644
--- a/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java
+++ b/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java
@@ -26,23 +26,30 @@
 
 import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
 import com.cloudera.sqoop.testutil.CommonArgs;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
 import com.cloudera.sqoop.testutil.MockResultSet;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test deserialization of ClobRef and BlobRef fields.
  */
-public class TestLargeObjectLoader extends TestCase {
+public class TestLargeObjectLoader {
 
   protected Configuration conf;
   protected LargeObjectLoader loader;
   protected Path outDir;
 
+  @Before
   public void setUp() throws IOException, InterruptedException {
     conf = new Configuration();
     if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
@@ -59,6 +66,7 @@
     loader = new LargeObjectLoader(conf, outDir);
   }
 
+  @Test
   public void testReadClobRef()
       throws IOException, InterruptedException, SQLException {
     // This should give us an inline CLOB.
@@ -82,6 +90,7 @@
     assertEquals(MockResultSet.CLOB_DATA, str);
   }
 
+  @Test
   public void testReadBlobRef()
       throws IOException, InterruptedException, SQLException {
     // This should give us an inline BLOB.
diff --git a/src/test/com/cloudera/sqoop/lib/TestRecordParser.java b/src/test/com/cloudera/sqoop/lib/TestRecordParser.java
index 8b11d39..57bdb5f 100644
--- a/src/test/com/cloudera/sqoop/lib/TestRecordParser.java
+++ b/src/test/com/cloudera/sqoop/lib/TestRecordParser.java
@@ -20,13 +20,15 @@
 
 import java.util.ArrayList;
 import java.util.List;
-import junit.framework.TestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
 
 
 /**
  * Test that the record parser works in a variety of configurations.
  */
-public class TestRecordParser extends TestCase {
+public class TestRecordParser {
 
   private void assertListsEqual(String msg, List<String> expected,
       List<String> actual) {
@@ -100,6 +102,7 @@
     return asList;
   }
 
+  @Test
   public void testEmptyLine() throws RecordParser.ParseError {
     // an empty line should return no fields.
 
@@ -109,6 +112,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord(""));
   }
 
+  @Test
   public void testJustEOR() throws RecordParser.ParseError {
     // a line with just a newline char should return a single zero-length field.
 
@@ -118,6 +122,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("\n"));
   }
 
+  @Test
   public void testOneField() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -125,6 +130,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("the field"));
   }
 
+  @Test
   public void testOneField2() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -132,6 +138,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("the field\n"));
   }
 
+  @Test
   public void testQuotedField1() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -140,6 +147,7 @@
         parser.parseRecord("\"the field\"\n"));
   }
 
+  @Test
   public void testQuotedField2() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -148,6 +156,7 @@
         parser.parseRecord("\"the field\""));
   }
 
+  @Test
   public void testQuotedField3() throws RecordParser.ParseError {
     // quoted containing EOF
     RecordParser parser = new RecordParser(
@@ -157,6 +166,7 @@
         parser.parseRecord("\"the ,field\""));
   }
 
+  @Test
   public void testQuotedField4() throws RecordParser.ParseError {
     // quoted containing multiple EOFs
     RecordParser parser = new RecordParser(
@@ -166,6 +176,7 @@
         parser.parseRecord("\"the ,,field\""));
   }
 
+  @Test
   public void testQuotedField5() throws RecordParser.ParseError {
     // quoted containing EOF and EOR
     RecordParser parser = new RecordParser(
@@ -175,6 +186,7 @@
         parser.parseRecord("\"the ,\nfield\""));
   }
 
+  @Test
   public void testQuotedField6() throws RecordParser.ParseError {
     // quoted containing EOR
     RecordParser parser = new RecordParser(
@@ -184,6 +196,7 @@
         parser.parseRecord("\"the \nfield\""));
   }
 
+  @Test
   public void testQuotedField7() throws RecordParser.ParseError {
     // quoted containing multiple EORs
     RecordParser parser = new RecordParser(
@@ -193,6 +206,7 @@
         parser.parseRecord("\"the \n\nfield\""));
   }
 
+  @Test
   public void testQuotedField8() throws RecordParser.ParseError {
     // quoted containing escaped quoted char
     RecordParser parser = new RecordParser(
@@ -202,6 +216,7 @@
         parser.parseRecord("\"the \\\"field\""));
   }
 
+  @Test
   public void testUnquotedEscape1() throws RecordParser.ParseError {
     // field without quotes with an escaped EOF char.
     RecordParser parser = new RecordParser(
@@ -210,6 +225,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("the \\,field"));
   }
 
+  @Test
   public void testUnquotedEscape2() throws RecordParser.ParseError {
     // field without quotes with an escaped escape char.
     RecordParser parser = new RecordParser(
@@ -218,6 +234,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("the \\\\field"));
   }
 
+  @Test
   public void testTwoFields1() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -225,6 +242,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("field1,field2"));
   }
 
+  @Test
   public void testTwoFields2() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -233,6 +251,7 @@
         parser.parseRecord("field1,field2\n"));
   }
 
+  @Test
   public void testTwoFields3() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -241,6 +260,7 @@
         parser.parseRecord("\"field1\",field2\n"));
   }
 
+  @Test
   public void testTwoFields4() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -249,6 +269,7 @@
         parser.parseRecord("field1,\"field2\"\n"));
   }
 
+  @Test
   public void testTwoFields5() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -257,6 +278,7 @@
         parser.parseRecord("field1,\"field2\""));
   }
 
+  @Test
   public void testRequiredQuotes0() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', true));
@@ -265,6 +287,7 @@
         parser.parseRecord("\"field1\",\"field2\"\n"));
   }
 
+  @Test
   public void testRequiredQuotes1() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', true));
@@ -273,6 +296,7 @@
         parser.parseRecord("\"field1\",\"field2\""));
   }
 
+  @Test
   public void testRequiredQuotes2() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', true));
@@ -284,6 +308,7 @@
     }
   }
 
+  @Test
   public void testRequiredQuotes3() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', true));
@@ -295,6 +320,7 @@
     }
   }
 
+  @Test
   public void testRequiredQuotes4() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', true));
@@ -306,6 +332,7 @@
     }
   }
 
+  @Test
   public void testNull() {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', true));
@@ -319,6 +346,7 @@
   }
 
 
+  @Test
   public void testEmptyFields1() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -326,6 +354,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord(","));
   }
 
+  @Test
   public void testEmptyFields2() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -333,6 +362,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord(",\n"));
   }
 
+  @Test
   public void testEmptyFields3() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -340,6 +370,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord(",,\n"));
   }
 
+  @Test
   public void testEmptyFields4() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -347,6 +378,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord(",foo,\n"));
   }
 
+  @Test
   public void testEmptyFields5() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -354,6 +386,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord(",foo,"));
   }
 
+  @Test
   public void testEmptyFields6() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -361,6 +394,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("foo,"));
   }
 
+  @Test
   public void testTrailingText() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -368,6 +402,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("foo,bar\nbaz"));
   }
 
+  @Test
   public void testTrailingText2() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -375,6 +410,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("\nbaz"));
   }
 
+  @Test
   public void testLeadingEscape() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', '\n', '\"', '\\', false));
@@ -382,6 +418,7 @@
     assertListsEqual(null, list(strings), parser.parseRecord("\\\nbaz"));
   }
 
+  @Test
   public void testEofIsEor() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', ',', '\"', '\\', false));
@@ -390,6 +427,7 @@
         parser.parseRecord("three,different,fields"));
   }
 
+  @Test
   public void testEofIsEor2() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', ',', '\"', '\\', false));
@@ -398,6 +436,7 @@
         parser.parseRecord("three,\"different\",fields"));
   }
 
+  @Test
   public void testRepeatedParse() throws RecordParser.ParseError {
     RecordParser parser = new RecordParser(
         new DelimiterSet(',', ',', '\"', '\\', false));
diff --git a/src/test/com/cloudera/sqoop/manager/CubridManagerExportTest.java b/src/test/com/cloudera/sqoop/manager/CubridManagerExportTest.java
index 4fa74fb..36aa821 100644
--- a/src/test/com/cloudera/sqoop/manager/CubridManagerExportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/CubridManagerExportTest.java
@@ -38,6 +38,10 @@
 
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.TestExport;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test the CubridManager implementation.
@@ -104,6 +108,7 @@
    * Cubrid could not support --staging-table, Diable this test case.
    */
   @Override
+  @Test
   public void testMultiTransactionWithStaging() throws IOException,
       SQLException {
     return;
@@ -113,6 +118,7 @@
    * Cubrid could not support --staging-table, Diable this test case.
    */
   @Override
+  @Test
   public void testMultiMapTextExportWithStaging() throws IOException,
       SQLException {
     return;
@@ -282,6 +288,7 @@
   }
 
   /** Make sure mixed update/insert export work correctly. */
+  @Test
   public void testUpsertTextExport() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
     createTextFile(0, TOTAL_RECORDS, false);
diff --git a/src/test/com/cloudera/sqoop/manager/CubridManagerImportTest.java b/src/test/com/cloudera/sqoop/manager/CubridManagerImportTest.java
index a683e20..03763ca 100644
--- a/src/test/com/cloudera/sqoop/manager/CubridManagerImportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/CubridManagerImportTest.java
@@ -45,6 +45,10 @@
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test the CubridManager implementation.
  *
diff --git a/src/test/com/cloudera/sqoop/manager/DB2ManagerImportManualTest.java b/src/test/com/cloudera/sqoop/manager/DB2ManagerImportManualTest.java
index 3d9f817..2bc5c54 100644
--- a/src/test/com/cloudera/sqoop/manager/DB2ManagerImportManualTest.java
+++ b/src/test/com/cloudera/sqoop/manager/DB2ManagerImportManualTest.java
@@ -40,6 +40,10 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.util.FileListing;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test the DB2Manager implementation.
  *
diff --git a/src/test/com/cloudera/sqoop/manager/DirectMySQLExportTest.java b/src/test/com/cloudera/sqoop/manager/DirectMySQLExportTest.java
index 7f08675..ec7b84a 100644
--- a/src/test/com/cloudera/sqoop/manager/DirectMySQLExportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/DirectMySQLExportTest.java
@@ -38,6 +38,11 @@
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.TestExport;
 import com.cloudera.sqoop.mapreduce.MySQLExportMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Test the DirectMySQLManager implementation's exportJob() functionality.
@@ -136,6 +141,7 @@
   /**
    * Test a single mapper that runs several transactions serially.
    */
+  @Test
   public void testMultiTxExport() throws IOException, SQLException {
     multiFileTest(1, 20, 1,
         "-D", MySQLExportMapper.MYSQL_CHECKPOINT_BYTES_KEY + "=10");
@@ -144,6 +150,7 @@
   /**
    * Test an authenticated export using mysqlimport.
    */
+  @Test
   public void testAuthExport() throws IOException, SQLException {
     SqoopOptions options = new SqoopOptions(MySQLAuthTest.AUTH_CONNECT_STRING,
         getTableName());
@@ -213,6 +220,7 @@
   /**
    * Test an authenticated export using mysqlimport.
    */
+  @Test
   public void testEscapedByExport() throws IOException, SQLException {
     SqoopOptions options = new SqoopOptions(MySQLAuthTest.AUTH_CONNECT_STRING,
         getTableName());
@@ -295,12 +303,14 @@
   }
 
   @Override
+  @Test
   public void testMultiMapTextExportWithStaging()
     throws IOException, SQLException {
     // disable this test as staging is not supported in direct mode
   }
 
   @Override
+  @Test
   public void testMultiTransactionWithStaging()
     throws IOException, SQLException {
     // disable this test as staging is not supported in direct mode
diff --git a/src/test/com/cloudera/sqoop/manager/DirectMySQLTest.java b/src/test/com/cloudera/sqoop/manager/DirectMySQLTest.java
index faa0a3e..a58fa17 100644
--- a/src/test/com/cloudera/sqoop/manager/DirectMySQLTest.java
+++ b/src/test/com/cloudera/sqoop/manager/DirectMySQLTest.java
@@ -41,6 +41,10 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.util.FileListing;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test the DirectMySQLManager implementation.
  * This differs from MySQLManager only in its importTable() method, which
diff --git a/src/test/com/cloudera/sqoop/manager/JdbcMySQLExportTest.java b/src/test/com/cloudera/sqoop/manager/JdbcMySQLExportTest.java
index 5c6e567..6bf890b 100644
--- a/src/test/com/cloudera/sqoop/manager/JdbcMySQLExportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/JdbcMySQLExportTest.java
@@ -31,6 +31,9 @@
 
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.TestExport;
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
 
 /**
  * Test the MySQLManager implementation's exportJob() functionality.
@@ -133,6 +136,7 @@
         statementsPerTx, subArgv);
   }
 
+  @Test
   public void testIntColInBatchMode() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
 
@@ -156,6 +160,7 @@
     assertColMinAndMax(forIdx(0), gen);
   }
 
+  @Test
   public void testUpsert() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
 
diff --git a/src/test/com/cloudera/sqoop/manager/ManualMySQLTests.java b/src/test/com/cloudera/sqoop/manager/ManualMySQLTests.java
deleted file mode 100644
index 4d06dd9..0000000
--- a/src/test/com/cloudera/sqoop/manager/ManualMySQLTests.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.manager;
-
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-
-/**
- * Manual test case with all MySQL related tests.
- */
-public final class ManualMySQLTests extends TestCase {
-
-  private ManualMySQLTests() { }
-
-  public static Test suite() {
-    TestSuite suite = new TestSuite("All MySQL test cases");
-    suite.addTestSuite(DirectMySQLTest.class);
-    suite.addTestSuite(DirectMySQLExportTest.class);
-    suite.addTestSuite(JdbcMySQLExportTest.class);
-    suite.addTestSuite(MySQLAuthTest.class);
-    suite.addTestSuite(MySQLCompatTest.class);
-
-    return suite;
-  }
-
-}
diff --git a/src/test/com/cloudera/sqoop/manager/MySQLAuthTest.java b/src/test/com/cloudera/sqoop/manager/MySQLAuthTest.java
index 57900ee..d5cca5d 100644
--- a/src/test/com/cloudera/sqoop/manager/MySQLAuthTest.java
+++ b/src/test/com/cloudera/sqoop/manager/MySQLAuthTest.java
@@ -40,6 +40,11 @@
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test authentication and remote access to direct mysqldump-based imports.
  *
diff --git a/src/test/com/cloudera/sqoop/manager/OracleCompatTest.java b/src/test/com/cloudera/sqoop/manager/OracleCompatTest.java
index 5bf920a..0d615e3 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleCompatTest.java
+++ b/src/test/com/cloudera/sqoop/manager/OracleCompatTest.java
@@ -28,6 +28,9 @@
 import org.apache.hadoop.conf.Configuration;
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
 
 /**
  * Test the basic Oracle connection manager with the various column types.
@@ -194,6 +197,7 @@
 
   // Disable this test since Oracle isn't ANSI compliant.
   @Override
+  @Test
   public void testEmptyStringCol() {
     this.skipped = true;
     LOG.info(
@@ -201,6 +205,7 @@
   }
 
   @Override
+  @Test
   public void testTimestamp1() {
     verifyType(getTimestampType(),
         getTimestampInsertStr("'2009-04-24 18:24:00'"),
@@ -208,6 +213,7 @@
   }
 
   @Override
+  @Test
   public void testTimestamp2() {
     verifyType(getTimestampType(),
         getTimestampInsertStr("'2009-04-24 18:24:00.0002'"),
@@ -215,21 +221,25 @@
   }
 
   @Override
+  @Test
   public void testDate1() {
     verifyType("DATE", getDateInsertStr("'2009-01-12'"),
         getDateSeqOutput("2009-01-12"));
   }
 
   @Override
+  @Test
   public void testDate2() {
     verifyType("DATE", getDateInsertStr("'2009-04-24'"),
         getDateSeqOutput("2009-04-24"));
   }
 
+  @Test
   public void testRawVal() {
     verifyType("RAW(8)", "'12ABCD'", getVarBinarySeqOutput("12ABCD"), true);
   }
 
+  @Test
   public void testBinaryFloat() {
     verifyType("BINARY_FLOAT", getBinaryFloatInsertStr(25f), "25.0");
     verifyType("BINARY_FLOAT", getBinaryFloatInsertStr(+6.34f), "6.34");
@@ -241,6 +251,7 @@
     verifyType("BINARY_FLOAT", getBinaryFloatInsertStr(min), "1.17549E-38");
   }
 
+  @Test
   public void testBinaryDouble() {
     verifyType("BINARY_DOUBLE", getBinaryDoubleInsertStr(0.5d), "0.5");
     verifyType("BINARY_DOUBLE", getBinaryDoubleInsertStr(-1d), "-1.0");
diff --git a/src/test/com/cloudera/sqoop/manager/OracleExportTest.java b/src/test/com/cloudera/sqoop/manager/OracleExportTest.java
index 07d672f..ec56cbe 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleExportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/OracleExportTest.java
@@ -30,8 +30,9 @@
 
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.TestExport;
+import org.junit.Test;
 
-import junit.framework.AssertionFailedError;
+import static org.junit.Assert.fail;
 
 /**
  * Test the OracleManager implementation's exportJob() functionality.
@@ -251,7 +252,7 @@
       verifyExport(TOTAL_RECORDS);
       assertColMinAndMax(forIdx(0), genDate);
       assertColMinAndMax(forIdx(1), genTime);
-    } catch (AssertionFailedError afe) {
+    } catch (AssertionError afe) {
       genDate = getNewDateColGenerator();
       genTime = getNewTimeColGenerator();
 
@@ -265,6 +266,7 @@
   }
 
   /** Make sure mixed update/insert export work correctly. */
+  @Test
   public void testUpsertTextExport() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
     createTextFile(0, TOTAL_RECORDS, false);
@@ -279,6 +281,7 @@
   }
 
   /** Make sure mixed update/insert export work correctly. */
+  @Test
   public void testUpsertTextExportWithEscapingDisabled() throws IOException, SQLException {
     final int TOTAL_RECORDS = 10;
     createTextFile(0, TOTAL_RECORDS, false);
diff --git a/src/test/com/cloudera/sqoop/manager/OracleLobAvroImportTest.java b/src/test/com/cloudera/sqoop/manager/OracleLobAvroImportTest.java
index b79f87c..a23f088 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleLobAvroImportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/OracleLobAvroImportTest.java
@@ -29,6 +29,8 @@
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.testutil.LobAvroImportTestCase;
 
+import static org.junit.Assert.fail;
+
 /**
  * Tests BLOB/CLOB import for Avro with Oracle Db.
  */
diff --git a/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java b/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java
index 396f897..817141b 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java
+++ b/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java
@@ -49,6 +49,11 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.util.FileListing;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test the OracleManager implementation.
  *
@@ -473,6 +478,7 @@
     }
   }
 
+  @Test
   public void testPurgeClosedConnections() throws Exception {
     // Ensure that after an Oracle ConnManager releases any connections
     // back into the cache (or closes them as redundant), it does not
@@ -533,6 +539,7 @@
     m2.close(); // Close the manager's active connection again.
   }
 
+  @Test
   public void testSessionUserName() throws Exception {
     SqoopOptions options = new SqoopOptions(OracleUtils.CONNECT_STRING,
       TABLE_NAME);
diff --git a/src/test/com/cloudera/sqoop/manager/PGBulkloadManagerManualTest.java b/src/test/com/cloudera/sqoop/manager/PGBulkloadManagerManualTest.java
index a93da71..da354bb 100644
--- a/src/test/com/cloudera/sqoop/manager/PGBulkloadManagerManualTest.java
+++ b/src/test/com/cloudera/sqoop/manager/PGBulkloadManagerManualTest.java
@@ -20,13 +20,16 @@
 
 import java.io.IOException;
 import java.sql.Connection;
-import java.sql.SQLException;
 import java.sql.PreparedStatement;
-import java.util.Arrays;
+import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Arrays;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.JobConf;
+import org.junit.Test;
+
 import com.cloudera.sqoop.TestExport;
 import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
 
@@ -185,18 +188,18 @@
     // PGBulkloadManager does not support --columns option.
   }
 
-
+	@Test
   public void testMultiReduceExport() throws IOException, SQLException {
     multiFileTest(2, 10, 2, "-D", "mapred.reduce.tasks=2");
   }
 
-
+	@Test
   public void testMultiReduceExportWithNewProp()
       throws IOException, SQLException {
     multiFileTest(2, 10, 2, "-D", "mapreduce.job.reduces=2");
   }
 
-
+	@Test
   public void testExportWithTablespace() throws IOException, SQLException {
     multiFileTest(1, 10, 1,
                   "-D", "pgbulkload.staging.tablespace=" + TABLESPACE);
diff --git a/src/test/com/cloudera/sqoop/manager/PostgresqlExportTest.java b/src/test/com/cloudera/sqoop/manager/PostgresqlExportTest.java
index 0ac4599..ed5917f 100644
--- a/src/test/com/cloudera/sqoop/manager/PostgresqlExportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/PostgresqlExportTest.java
@@ -23,6 +23,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.junit.Before;
+import org.junit.Test;
 
 import java.io.BufferedWriter;
 import java.io.File;
@@ -36,6 +37,9 @@
 import java.sql.Statement;
 import java.util.ArrayList;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 /**
  *
  */
@@ -291,6 +295,7 @@
     output.close();
   }
 
+  @Test
   public void testExport() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,2009-04-20,400,sales",
@@ -302,6 +307,7 @@
     assertRowCount(2, escapeTableOrSchemaName(TABLE_NAME), connection);
   }
 
+  @Test
   public void testExportUsingProcedure() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
         "2,Bob,2009-04-20,400,sales",
@@ -313,6 +319,7 @@
     assertRowCount(2, escapeTableOrSchemaName(TABLE_NAME), connection);
   }
 
+  @Test
   public void testExportStaging() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,2009-04-20,400,sales",
@@ -326,6 +333,7 @@
     assertRowCount(2, escapeTableOrSchemaName(TABLE_NAME), connection);
   }
 
+  @Test
   public void testExportDirect() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,2009-04-20,400,sales",
@@ -339,6 +347,7 @@
     assertRowCount(2, escapeTableOrSchemaName(TABLE_NAME), connection);
   }
 
+  @Test
   public void testExportCustomSchema() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,2009-04-20,400,sales",
@@ -358,6 +367,7 @@
       connection);
   }
 
+  @Test
   public void testExportCustomSchemaStaging() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,2009-04-20,400,sales",
@@ -380,6 +390,7 @@
       connection);
   }
 
+  @Test
   public void testExportCustomSchemaStagingClear()
     throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
@@ -404,6 +415,7 @@
       connection);
   }
 
+  @Test
   public void testExportCustomSchemaDirect() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,2009-04-20,400,sales",
diff --git a/src/test/com/cloudera/sqoop/manager/PostgresqlImportTest.java b/src/test/com/cloudera/sqoop/manager/PostgresqlImportTest.java
index 5fddd58..70ee640 100644
--- a/src/test/com/cloudera/sqoop/manager/PostgresqlImportTest.java
+++ b/src/test/com/cloudera/sqoop/manager/PostgresqlImportTest.java
@@ -42,6 +42,10 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.util.FileListing;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test the PostgresqlManager and DirectPostgresqlManager implementations.
  * The former uses the postgres JDBC driver to perform an import;
@@ -334,6 +338,7 @@
     doImportAndVerify(false, expectedResults, TABLE_NAME, extraArgs);
   }
 
+  @Test
   public void testDirectIncrementalImport() throws IOException {
     String [] expectedResults = { };
 
@@ -344,6 +349,7 @@
     doImportAndVerify(true, expectedResults, TABLE_NAME, extraArgs);
   }
 
+  @Test
   public void testDirectIncrementalImportMerge() throws IOException {
     String [] expectedResults = { };
 
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java b/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java
index 5f934c3..9a92479 100644
--- a/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java
+++ b/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportManualTest.java
@@ -26,6 +26,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Test;
 
 import java.io.BufferedWriter;
 import java.io.File;
@@ -38,6 +39,9 @@
 import java.sql.Statement;
 import java.util.ArrayList;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 /**
  * Please see instructions in SQLServerManagerImportManualTest.
  */
@@ -296,6 +300,7 @@
     output.close();
   }
 
+  @Test
   public void testExport() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,400,sales",
@@ -307,6 +312,7 @@
     assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
   }
 
+  @Test
   public void testExportCustomSchema() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,400,sales",
@@ -327,6 +333,7 @@
     );
   }
 
+  @Test
   public void testExportTableHints() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,400,sales",
@@ -340,6 +347,7 @@
     assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
   }
 
+  @Test
   public void testExportTableHintsMultiple() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,400,sales",
@@ -353,6 +361,7 @@
     assertRowCount(2, escapeObjectName(DBO_TABLE_NAME), conn);
   }
 
+  @Test
   public void testSQLServerBinaryType() throws IOException, SQLException {
     createSQLServerBinaryTypeTable(SCHEMA_DBO, DBO_BINARY_TABLE_NAME);
     createTestFile("inputFile", new String[] {
@@ -365,6 +374,7 @@
   }
 
   /** Make sure mixed update/insert export work correctly. */
+  @Test
   public void testUpsertTextExport() throws IOException, SQLException {
     createTestFile("inputFile", new String[] {
       "2,Bob,400,sales",
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java b/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java
index 09f1e6b..1f69725 100644
--- a/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java
+++ b/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportManualTest.java
@@ -43,6 +43,10 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.util.FileListing;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test the SQLServerManager implementation.
  *
diff --git a/src/test/com/cloudera/sqoop/manager/TestHsqldbManager.java b/src/test/com/cloudera/sqoop/manager/TestHsqldbManager.java
index d64a2cc..8a6bb26 100644
--- a/src/test/com/cloudera/sqoop/manager/TestHsqldbManager.java
+++ b/src/test/com/cloudera/sqoop/manager/TestHsqldbManager.java
@@ -20,8 +20,6 @@
 
 import java.sql.SQLException;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.junit.After;
@@ -30,11 +28,15 @@
 
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+
 /**
  * Test HsqldbManager-specific functionality that overrides SqlManager
  * behavior.
  */
-public class TestHsqldbManager extends TestCase {
+public class TestHsqldbManager {
 
   public static final Log LOG = LogFactory.getLog(
       TestHsqldbManager.class.getName());
diff --git a/src/test/com/cloudera/sqoop/manager/TestSqlManager.java b/src/test/com/cloudera/sqoop/manager/TestSqlManager.java
index 57855fa..bffff4a 100644
--- a/src/test/com/cloudera/sqoop/manager/TestSqlManager.java
+++ b/src/test/com/cloudera/sqoop/manager/TestSqlManager.java
@@ -26,8 +26,6 @@
 import java.sql.Types;
 import java.util.Map;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.junit.After;
@@ -36,10 +34,15 @@
 
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+
 /**
  * Test methods of the generic SqlManager implementation.
  */
-public class TestSqlManager extends TestCase {
+public class TestSqlManager {
 
   public static final Log LOG = LogFactory.getLog(
       TestSqlManager.class.getName());
diff --git a/src/test/com/cloudera/sqoop/mapreduce/MapreduceTests.java b/src/test/com/cloudera/sqoop/mapreduce/MapreduceTests.java
deleted file mode 100644
index 3f97798..0000000
--- a/src/test/com/cloudera/sqoop/mapreduce/MapreduceTests.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.mapreduce.db.*;
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-/**
- * All tests for Sqoop new mapreduce-api (com.cloudera.sqoop.mapreduce).
- */
-public final class MapreduceTests {
-
-  private MapreduceTests() { }
-
-  public static Test suite() {
-    TestSuite suite = new TestSuite(
-        "Tests for com.cloudera.sqoop.mapreduce");
-    suite.addTestSuite(TestImportJob.class);
-    suite.addTestSuite(TestDataDrivenDBInputFormat.class);
-    suite.addTestSuite(TestIntegerSplitter.class);
-    suite.addTestSuite(TestTextSplitter.class);
-
-    return suite;
-  }
-}
-
diff --git a/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java b/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java
index d3f5549..6377ccd 100644
--- a/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java
+++ b/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java
@@ -48,6 +48,10 @@
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test aspects of the DataDrivenImportJob class' failure reporting.
@@ -58,6 +62,8 @@
  * SQOOP_RETHROW_PROPERTY = "sqoop.throwOnError".
  */
 public class TestImportJob extends ImportJobTestCase {
+
+  @Test
   public void testFailedImportDueToIOException() throws IOException {
     // Make sure that if a MapReduce job to do the import fails due
     // to an IOException, we tell the user about it.
@@ -129,6 +135,7 @@
     }
   }
 
+  @Test
   public void testFailedImportDueToJobFail() throws IOException {
     // Test that if the job returns 'false' it still fails and informs
     // the user.
@@ -162,6 +169,7 @@
     }
   }
 
+  @Test
   public void testFailedNoColumns() throws IOException {
     // Make sure that if a MapReduce job to do the import fails due
     // to an IOException, we tell the user about it.
@@ -189,6 +197,7 @@
     }
   }
 
+  @Test
   public void testFailedIllegalColumns() throws IOException {
     // Make sure that if a MapReduce job to do the import fails due
     // to an IOException, we tell the user about it.
@@ -219,6 +228,7 @@
     }
   }
 
+  @Test
   public void testDuplicateColumns() throws IOException {
     // Make sure that if a MapReduce job to do the import fails due
     // to an IOException, we tell the user about it.
@@ -280,6 +290,7 @@
     return strings.toArray(new String[0]);
   }
 
+  @Test
   public void testDeleteTargetDir() throws Exception {
     // Make sure that if a MapReduce job to do the import fails due
     // to an IOException, we tell the user about it.
@@ -328,6 +339,7 @@
     }
   }
 
+  @Test
   public void testManyColumns() throws Exception {
     int numberOfColumns = 7500;
 
diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java b/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
index fed22b8..c07a38b 100644
--- a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
+++ b/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
@@ -23,8 +23,6 @@
 import java.io.DataOutput;
 import java.io.IOException;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.*;
@@ -37,11 +35,17 @@
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.mapreduce.DBWritable;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test aspects of DataDrivenDBInputFormat.
  */
-public class TestDataDrivenDBInputFormat extends TestCase {
+public class TestDataDrivenDBInputFormat {
 
   private static final Log LOG = LogFactory.getLog(
       TestDataDrivenDBInputFormat.class);
@@ -83,13 +87,13 @@
     createConnection(driverClassName, url);
   }
 
+  @Before
   public void setUp() throws Exception {
     initialize(DRIVER_CLASS, DB_URL);
-    super.setUp();
   }
 
+  @After
   public void tearDown() throws Exception {
-    super.tearDown();
     shutdown();
   }
 
@@ -166,6 +170,7 @@
     }
   }
 
+  @Test
   public void testDateSplits() throws Exception {
     Statement s = connection.createStatement();
     final String DATE_TABLE = "datetable";
diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestIntegerSplitter.java b/src/test/com/cloudera/sqoop/mapreduce/db/TestIntegerSplitter.java
index c072fa0..e5a7777 100644
--- a/src/test/com/cloudera/sqoop/mapreduce/db/TestIntegerSplitter.java
+++ b/src/test/com/cloudera/sqoop/mapreduce/db/TestIntegerSplitter.java
@@ -18,6 +18,8 @@
 package com.cloudera.sqoop.mapreduce.db;
 
 
+import org.junit.Test;
+
 /**
  * Test that the IntegerSplitter generates sane splits.
  *
@@ -27,6 +29,7 @@
 public class TestIntegerSplitter
   extends org.apache.sqoop.mapreduce.db.TestIntegerSplitter {
 
+  @Test
   public void testDummy() {
     // Nothing to do
   }
diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/com/cloudera/sqoop/mapreduce/db/TestTextSplitter.java
index 4927c74..e585310 100644
--- a/src/test/com/cloudera/sqoop/mapreduce/db/TestTextSplitter.java
+++ b/src/test/com/cloudera/sqoop/mapreduce/db/TestTextSplitter.java
@@ -18,6 +18,8 @@
 package com.cloudera.sqoop.mapreduce.db;
 
 
+import org.junit.Test;
+
 /**
  * Test that the TextSplitter implementation creates a sane set of splits.
  * @deprecated use org.apache.sqoop.mapreduce.db.TestTextSplitter instead.
@@ -26,6 +28,7 @@
 public class TestTextSplitter extends
   org.apache.sqoop.mapreduce.db.TestTextSplitter {
 
+  @Test
   public void testDummy() {
     // Nothing to do
   }
diff --git a/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java b/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java
index 0ac6bdc..1fb7324 100644
--- a/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java
+++ b/src/test/com/cloudera/sqoop/metastore/TestSavedJobs.java
@@ -32,11 +32,15 @@
 import com.cloudera.sqoop.metastore.hsqldb.AutoHsqldbStorage;
 import com.cloudera.sqoop.tool.VersionTool;
 
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.sql.Connection;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 /**
  * Test the metastore and job-handling features.
  *
@@ -44,14 +48,14 @@
  * The metastore URL is configured to be in-memory, and drop all
  * state between individual tests.
  */
-public class TestSavedJobs extends TestCase {
+public class TestSavedJobs {
 
   public static final String TEST_AUTOCONNECT_URL =
       "jdbc:hsqldb:mem:sqoopmetastore";
   public static final String TEST_AUTOCONNECT_USER = "SA";
   public static final String TEST_AUTOCONNECT_PASS = "";
 
-  @Override
+  @Before
   public void setUp() throws Exception {
     // Delete db state between tests.
     resetJobSchema();
@@ -95,6 +99,7 @@
     return conf;
   }
 
+  @Test
   public void testAutoConnect() throws IOException {
     // By default, we should be able to auto-connect with an
     // empty connection descriptor. We should see an empty
@@ -112,6 +117,7 @@
     storage.close();
   }
 
+  @Test
   public void testCreateDeleteJob() throws IOException {
     Configuration conf = newConf();
     JobStorageFactory ssf = new JobStorageFactory(conf);
@@ -167,6 +173,7 @@
     storage.close();
   }
 
+  @Test
     public void testCreateJobWithExtraArgs() throws IOException {
         Configuration conf = newConf();
         JobStorageFactory ssf = new JobStorageFactory(conf);
@@ -207,6 +214,7 @@
         storage.close();
     }
 
+  @Test
   public void testMultiConnections() throws IOException {
     // Ensure that a job can be retrieved when the storage is
     // closed and reopened.
diff --git a/src/test/com/cloudera/sqoop/orm/TestClassWriter.java b/src/test/com/cloudera/sqoop/orm/TestClassWriter.java
index 10a0969..ea31e9d 100644
--- a/src/test/com/cloudera/sqoop/orm/TestClassWriter.java
+++ b/src/test/com/cloudera/sqoop/orm/TestClassWriter.java
@@ -30,7 +30,6 @@
 import java.util.jar.JarEntry;
 import java.util.jar.JarInputStream;
 
-import junit.framework.TestCase;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -50,11 +49,17 @@
 
 import java.lang.reflect.Field;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test that the ClassWriter generates Java classes based on the given table,
  * which compile.
  */
-public class TestClassWriter extends TestCase {
+public class TestClassWriter {
 
   public static final Log LOG =
       LogFactory.getLog(TestClassWriter.class.getName());
diff --git a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java b/src/test/com/cloudera/sqoop/orm/TestParseMethods.java
index cec7614..017fb9f 100644
--- a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java
+++ b/src/test/com/cloudera/sqoop/orm/TestParseMethods.java
@@ -39,6 +39,10 @@
 import com.cloudera.sqoop.testutil.ExplicitSetMapper;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that the parse() methods generated in user SqoopRecord implementations
@@ -87,6 +91,7 @@
     return args.toArray(new String[0]);
   }
 
+  @Test
   public void testTemporaryRootDirParse() throws Exception {
     String customRoot = "customroot";
     String[] args = new String[] {"--"+BaseSqoopTool.TEMP_ROOTDIR_ARG, customRoot};
@@ -156,6 +161,7 @@
     }
   }
 
+  @Test
   public void testDefaults() throws IOException {
     String [] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
     String [] vals = { "64", "'foo'", "128" };
@@ -164,6 +170,7 @@
     runParseTest(",", "\\n", "\\\"", "\\", false);
   }
 
+  @Test
   public void testRequiredEnclose() throws IOException {
     String [] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
     String [] vals = { "64", "'foo'", "128" };
@@ -172,6 +179,7 @@
     runParseTest(",", "\\n", "\\\"", "\\", true);
   }
 
+  @Test
   public void testStringEscapes() throws IOException {
     String [] types = {
       "VARCHAR(32)",
@@ -192,6 +200,7 @@
     runParseTest(",", "\\n", "\\\'", "\\", false);
   }
 
+  @Test
   public void testNumericTypes() throws IOException {
     String [] types = {
       "INTEGER",
@@ -218,6 +227,7 @@
     runParseTest(",", "\\n", "\\\'", "\\", false);
   }
 
+  @Test
   public void testFieldSetter() throws IOException {
     ClassLoader prevClassLoader = null;
 
diff --git a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java b/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
index f8021be..6310a39 100644
--- a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
+++ b/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
@@ -24,7 +24,6 @@
 import com.cloudera.sqoop.metastore.JobData;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.google.common.collect.ObjectArrays;
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -44,10 +43,12 @@
 import java.sql.SQLException;
 import java.util.Arrays;
 
+import static org.junit.Assert.fail;
+
 /**
  * Class that implements common methods required for tests.
  */
-public abstract class BaseSqoopTestCase extends TestCase {
+public abstract class BaseSqoopTestCase {
 
   public static class DummyDataPublisher extends SqoopJobDataPublisher {
     public static String hiveTable;
diff --git a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java b/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java
index e2adc52..2433275 100644
--- a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java
+++ b/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java
@@ -37,6 +37,9 @@
 import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
 import com.cloudera.sqoop.tool.ExportTool;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 /**
  * Class that implements common methods required for tests which export data
  * from HDFS to databases, to verify correct export.
diff --git a/src/test/com/cloudera/sqoop/testutil/ImportJobTestCase.java b/src/test/com/cloudera/sqoop/testutil/ImportJobTestCase.java
index 08408a5..6368980 100644
--- a/src/test/com/cloudera/sqoop/testutil/ImportJobTestCase.java
+++ b/src/test/com/cloudera/sqoop/testutil/ImportJobTestCase.java
@@ -23,6 +23,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -37,6 +38,9 @@
 import com.cloudera.sqoop.util.ClassLoaderStack;
 import org.junit.Before;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 /**
  * Class that implements common methods required for tests which import data
  * from SQL into HDFS and verify correct import.
diff --git a/src/test/com/cloudera/sqoop/testutil/LobAvroImportTestCase.java b/src/test/com/cloudera/sqoop/testutil/LobAvroImportTestCase.java
index 468673f..7469799 100644
--- a/src/test/com/cloudera/sqoop/testutil/LobAvroImportTestCase.java
+++ b/src/test/com/cloudera/sqoop/testutil/LobAvroImportTestCase.java
@@ -18,7 +18,9 @@
 
 package com.cloudera.sqoop.testutil;
 
+import org.junit.After;
 import org.junit.FixMethodOrder;
+import org.junit.Test;
 import org.junit.runners.MethodSorters;
 import java.io.IOException;
 import java.io.InputStream;
@@ -39,6 +41,10 @@
 import org.apache.sqoop.io.CodecMap;
 import org.apache.sqoop.lib.BlobRef;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 /**
  * Tests BLOB/CLOB import for Avro.
  */
@@ -72,7 +78,7 @@
     return false;
   }
 
-  @Override
+  @After
   public void tearDown() {
     try {
       // Clean up the database on our way out.
@@ -158,6 +164,7 @@
    * @throws IOException
    * @throws SQLException
    */
+  @Test
   public void testBlobAvroImportInline() throws IOException, SQLException {
     String [] types = { getBlobType() };
     String expectedVal = "This is short BLOB data";
@@ -185,6 +192,7 @@
    * @throws IOException
    * @throws SQLException
    */
+  @Test
   public void testBlobAvroImportExternal() throws IOException, SQLException {
     String [] types = { getBlobType() };
     String data = "This is short BLOB data";
@@ -234,6 +242,7 @@
    * @throws IOException
    * @throws SQLException
    */
+  @Test
   public void testBlobCompressedAvroImportInline()
       throws IOException, SQLException {
     String [] types = { getBlobType() };
@@ -267,6 +276,7 @@
    * @throws IOException
    * @throws SQLException
    */
+  @Test
   public void testBlobCompressedAvroImportExternal()
       throws IOException, SQLException {
     String [] types = { getBlobType() };
@@ -323,6 +333,7 @@
    * @throws IOException
    * @throws SQLException
    */
+  @Test
   public void testBlobAvroImportMultiCols() throws IOException, SQLException {
     String [] types = { getBlobType(), getBlobType(), getBlobType(), };
     String expectedVal1 = "This is short BLOB data1";
@@ -357,24 +368,29 @@
     assertEquals(getColName(2), expectedVal3, returnVal);
   }
 
+  @Test
   public void testClobAvroImportInline() throws IOException, SQLException {
     // TODO: add tests for CLOB support for Avro import
   }
 
+  @Test
   public void testClobAvroImportExternal() throws IOException, SQLException {
     // TODO: add tests for CLOB support for Avro import
   }
 
+  @Test
   public void testClobCompressedAvroImportInline()
       throws IOException, SQLException {
     // TODO: add tests for CLOB support for Avro import
   }
 
+  @Test
   public void testClobCompressedAvroImportExternal()
       throws IOException, SQLException {
     // TODO: add tests for CLOB support for Avro import
   }
 
+  @Test
   public void testClobAvroImportMultiCols() throws IOException, SQLException {
     // TODO: add tests for CLOB support for Avro import
   }
diff --git a/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java b/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java
index ecbaa83..7db044c 100644
--- a/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java
+++ b/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java
@@ -29,6 +29,12 @@
 
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Class that implements common tests that should be applied to all jdbc
  * drivers that we want to interop with.
diff --git a/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java b/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java
index 1793626..da1ef65 100644
--- a/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java
+++ b/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java
@@ -38,12 +38,15 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that tool plugins work.
  */
-public class TestToolPlugin extends TestCase {
+public class TestToolPlugin {
 
   public static final Log LOG = LogFactory
       .getLog(TestToolPlugin.class.getName());
@@ -102,6 +105,7 @@
     }
   }
 
+  @Test
   public void testPlugin() {
     // Register the plugin with SqoopTool.
     Configuration pluginConf = new Configuration();
@@ -132,6 +136,7 @@
     }
   }
 
+  @Test
   public void testNoOverrideTools() {
     // Test that you can't override an existing tool definition. First
     // registration of a tool name wins.
diff --git a/src/test/com/cloudera/sqoop/util/TestOptionsFileExpansion.java b/src/test/com/cloudera/sqoop/util/TestOptionsFileExpansion.java
index 3f0bfb9..6d3f0f3 100644
--- a/src/test/com/cloudera/sqoop/util/TestOptionsFileExpansion.java
+++ b/src/test/com/cloudera/sqoop/util/TestOptionsFileExpansion.java
@@ -22,15 +22,15 @@
 import java.io.FileWriter;
 import java.io.IOException;
 
-import junit.framework.TestCase;
-
 import org.junit.Assert;
 
 import com.cloudera.sqoop.Sqoop;
+import org.junit.Test;
+
 /**
  * Tests various options file loading scenarios.
  */
-public class TestOptionsFileExpansion extends TestCase {
+public class TestOptionsFileExpansion {
 
   /**
    * Text from options file 1. Each string represents a new line.
@@ -141,17 +141,20 @@
     "--efgh",
   };
 
+  @Test
   public void testOptionsFiles() throws Exception {
     checkOptionsFile(OPTIONS_FILE_TEXT1, OPTIONS_FILE_TEXT1_OUTPUT);
     checkOptionsFile(OPTIONS_FILE_TEXT2, OPTIONS_FILE_TEXT2_OUTPUT);
     checkOptionsFile(OPTIONS_FILE_TEXT3, OPTIONS_FILE_TEXT3_OUTPUT);
   }
 
+  @Test
   public void testInvalidOptionsFile() {
     checkInvalidOptionsFile(OPTIONS_FILE_TEXT4);
     checkInvalidOptionsFile(OPTIONS_FILE_TEXT5);
   }
 
+  @Test
   public void testMultilineQuotedText() {
     try {
       checkOptionsFile(OPTIONS_FILE_TEXT6, new String[] {});
diff --git a/src/test/com/cloudera/sqoop/util/TestSubstitutionUtils.java b/src/test/com/cloudera/sqoop/util/TestSubstitutionUtils.java
index cd13adc..b6b072f 100644
--- a/src/test/com/cloudera/sqoop/util/TestSubstitutionUtils.java
+++ b/src/test/com/cloudera/sqoop/util/TestSubstitutionUtils.java
@@ -17,14 +17,17 @@
  */
 package com.cloudera.sqoop.util;
 
-import junit.framework.TestCase;
 import org.apache.sqoop.util.SubstitutionUtils;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
 
 /**
  *
  */
-public class TestSubstitutionUtils extends TestCase {
+public class TestSubstitutionUtils {
 
+  @Test
   public void testRemoveEscapeCharacters() {
     assertEquals("\\N", SubstitutionUtils.removeEscapeCharacters("\\\\N"));
     assertEquals("\n", SubstitutionUtils.removeEscapeCharacters("\\n"));
diff --git a/src/test/org/apache/sqoop/TestAutoResetMapper.java b/src/test/org/apache/sqoop/TestAutoResetMapper.java
index 4c8282c..fd29c2d 100644
--- a/src/test/org/apache/sqoop/TestAutoResetMapper.java
+++ b/src/test/org/apache/sqoop/TestAutoResetMapper.java
@@ -37,6 +37,10 @@
 import org.junit.Before;
 
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 public class TestAutoResetMapper extends ImportJobTestCase {
 
@@ -140,6 +144,7 @@
     super.tearDown();
   }
 
+  @Test
   public void testMultiTableImportWithAutoMapperReset() throws IOException {
 
     String[] argv = getArgv();
diff --git a/src/test/org/apache/sqoop/TestBigDecimalExport.java b/src/test/org/apache/sqoop/TestBigDecimalExport.java
index 80cdad5..414e3d9 100644
--- a/src/test/org/apache/sqoop/TestBigDecimalExport.java
+++ b/src/test/org/apache/sqoop/TestBigDecimalExport.java
@@ -35,6 +35,9 @@
 
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
 
 /**
  * Test exporting lines that are created via both options of
@@ -101,10 +104,12 @@
     assertEquals(expected2, actual2);
   }
 
+  @Test
   public void testBigDecimalDefault() throws IOException, SQLException {
     runBigDecimalExport("0.000001,0.0000001");
   }
 
+  @Test
   public void testBigDecimalNoFormat() throws IOException, SQLException {
     runBigDecimalExport("0.000001,1E-7");
   }
diff --git a/src/test/org/apache/sqoop/TestBigDecimalImport.java b/src/test/org/apache/sqoop/TestBigDecimalImport.java
index 76e4704..d265d17 100644
--- a/src/test/org/apache/sqoop/TestBigDecimalImport.java
+++ b/src/test/org/apache/sqoop/TestBigDecimalImport.java
@@ -30,6 +30,9 @@
 
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
 
 /**
  * Test the sqoop.bigdecimal.format.string parameter default behavior and when
@@ -70,11 +73,13 @@
     return line;
   }
 
+  @Test
   public void testBigDecimalDefault() throws IOException {
     String line = runBigDecimalImport(null);
     assertEquals("0.000001,0.0000001", line);
   }
 
+  @Test
   public void testBigDecimalNoFormat() throws IOException {
     List<String> args = new ArrayList<String>();
     args.add("-Dsqoop.bigdecimal.format.string=false");
diff --git a/src/test/org/apache/sqoop/TestExportUsingProcedure.java b/src/test/org/apache/sqoop/TestExportUsingProcedure.java
index 8182c8d..b4b46f2 100644
--- a/src/test/org/apache/sqoop/TestExportUsingProcedure.java
+++ b/src/test/org/apache/sqoop/TestExportUsingProcedure.java
@@ -29,8 +29,6 @@
 import java.sql.Time;
 import java.sql.Types;
 
-import junit.framework.JUnit4TestAdapter;
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.manager.GenericJdbcManager;
 import org.apache.sqoop.tool.ExportTool;
@@ -41,17 +39,18 @@
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.TestExport;
 import org.junit.Rule;
+import org.junit.rules.TestName;
+
+import static org.junit.Assert.assertEquals;
+
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
+
 
 /**
  * We'll use H2 as a database as the version of HSQLDB we currently depend on
  * (1.8) doesn't include support for stored procedures.
  */
-@RunWith(JUnit4.class)
 public class TestExportUsingProcedure extends TestExport {
   private static final String PROCEDURE_NAME = "INSERT_PROCEDURE";
   /**
@@ -62,6 +61,8 @@
   private String[] names;
   private String[] types;
   private Connection connection;
+  @Rule
+  public TestName name = new TestName();
 
   @Rule
   public ExpectedException thrown = ExpectedException.none();
@@ -70,11 +71,6 @@
   public TestName testName = new TestName();
 
   @Override
-  public String getName() {
-    return testName.getMethodName();
-  }
-
-  @Override
   @Before
   public void setUp() {
     super.setUp();
@@ -104,7 +100,7 @@
     create.append(getClass().getName());
     create.append(".insertFunction");
     if (extraNames.length > 0) {
-      create.append(getName());
+      create.append(name.getMethodName());
     }
     create.append('"');
 
@@ -121,7 +117,7 @@
 
   @Override
   protected String getConnectString() {
-    return "jdbc:h2:mem:" + getName();
+    return "jdbc:h2:mem:" + name.getMethodName();
   }
 
   @Override
@@ -139,7 +135,7 @@
     // just use the old class definition even though we've compiled a
     // new one!
     String[] args = newStrArray(additionalArgv, "--" + ExportTool.CALL_ARG,
-        PROCEDURE_NAME, "--" + ExportTool.CLASS_NAME_ARG, getName(), "--"
+        PROCEDURE_NAME, "--" + ExportTool.CLASS_NAME_ARG, name.getMethodName(), "--"
             + ExportTool.CONN_MANAGER_CLASS_NAME,
         GenericJdbcManager.class.getName(), "--" + ExportTool.DRIVER_ARG,
         Driver.class.getName());
@@ -272,6 +268,7 @@
     }
   }
 
+
   public static void insertFunction(int id, String msg) throws SQLException {
     insertFunction(id, msg, new SetExtraArgs() {
       @Override
@@ -338,9 +335,4 @@
     });
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestExportUsingProcedure.class);
-  }
-
 }
diff --git a/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java b/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
index e9698be..fb89a0b 100644
--- a/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
+++ b/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
@@ -27,21 +27,28 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.sqoop.config.ConfigurationConstants;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+
 public class TestSqoopJobDataPublisher extends ImportJobTestCase {
 
     public static final Log LOG = LogFactory.getLog(TestSqoopJobDataPublisher.class.getName());
 
+    @Before
     public void setUp() {
         super.setUp();
         HiveImport.setTestMode(true);
     }
 
+    @After
     public void tearDown() {
         super.tearDown();
         HiveImport.setTestMode(false);
diff --git a/src/test/org/apache/sqoop/TestSqoopOptions.java b/src/test/org/apache/sqoop/TestSqoopOptions.java
index fdb8c8d..6d55c33 100644
--- a/src/test/org/apache/sqoop/TestSqoopOptions.java
+++ b/src/test/org/apache/sqoop/TestSqoopOptions.java
@@ -20,11 +20,16 @@
 
 import java.util.Properties;
 
-import junit.framework.TestCase;
+import org.junit.Test;
 
-public class TestSqoopOptions extends TestCase {
+import static org.junit.Assert.assertEquals;
+
+public class TestSqoopOptions {
+
+  @Test
   public void testParseColumnParsing() {
     new SqoopOptions() {
+  @Test
       public void testParseColumnMapping() {
         Properties result = new Properties();
         parseColumnMapping("test=INTEGER,test1=DECIMAL(1%2C1),test2=NUMERIC(1%2C%202)", result);
@@ -35,6 +40,7 @@
     }.testParseColumnMapping();
   }
 
+  @Test
   public void testColumnNameCaseInsensitive() {
     SqoopOptions opts = new SqoopOptions();
     opts.setColumns(new String[]{ "AAA", "bbb" });
diff --git a/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java b/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
index 9000f5d..95c9b56 100644
--- a/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
+++ b/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
@@ -52,6 +52,10 @@
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
 /**
  * Utility methods that facilitate Accumulo import tests.
  * These test use the MiniAccumuloCluster.  They are
diff --git a/src/test/org/apache/sqoop/accumulo/TestAccumuloImport.java b/src/test/org/apache/sqoop/accumulo/TestAccumuloImport.java
index d52f0f0..db78a19 100644
--- a/src/test/org/apache/sqoop/accumulo/TestAccumuloImport.java
+++ b/src/test/org/apache/sqoop/accumulo/TestAccumuloImport.java
@@ -22,6 +22,8 @@
 
 import org.junit.Test;
 
+import static org.junit.Assert.fail;
+
 /**
  * Test imports of tables into Accumulo.
  */
diff --git a/src/test/org/apache/sqoop/accumulo/TestAccumuloQueryImport.java b/src/test/org/apache/sqoop/accumulo/TestAccumuloQueryImport.java
index be73594..633ce06 100644
--- a/src/test/org/apache/sqoop/accumulo/TestAccumuloQueryImport.java
+++ b/src/test/org/apache/sqoop/accumulo/TestAccumuloQueryImport.java
@@ -22,6 +22,8 @@
 
 import org.junit.Test;
 
+import static org.junit.Assert.fail;
+
 /**
  * Test import of free-form query into Accumulo.
  */
diff --git a/src/test/org/apache/sqoop/accumulo/TestAccumuloUtil.java b/src/test/org/apache/sqoop/accumulo/TestAccumuloUtil.java
index c236b8a..631eeff 100644
--- a/src/test/org/apache/sqoop/accumulo/TestAccumuloUtil.java
+++ b/src/test/org/apache/sqoop/accumulo/TestAccumuloUtil.java
@@ -20,14 +20,15 @@
 
 import org.junit.Test;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 /**
  * This tests to verify that Accumulo is present (default when running
  * test cases) and that when in fake not present mode, the method returns
  * false.
  */
-public class TestAccumuloUtil extends TestCase {
+public class TestAccumuloUtil {
 
   @Test
   public void testAccumuloPresent() {
diff --git a/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java b/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
index 5b170b6..bd911f2 100644
--- a/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
+++ b/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
@@ -34,6 +34,7 @@
 import org.apache.sqoop.util.password.CredentialProviderPasswordLoader;
 import org.apache.sqoop.util.password.CryptoFileLoader;
 import org.apache.sqoop.util.password.PasswordLoader;
+import org.junit.Test;
 
 import javax.crypto.Cipher;
 import javax.crypto.SecretKey;
@@ -50,6 +51,12 @@
 import java.util.Collections;
 import java.util.Properties;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Set of tests for securing passwords.
  */
@@ -67,6 +74,7 @@
     }
   }
 
+  @Test
   public void testPasswordFilePathInOptionIsEnabled() throws Exception {
     String passwordFilePath = TEMP_BASE_DIR + ".pwd";
     createTempFile(passwordFilePath);
@@ -87,6 +95,7 @@
     }
   }
 
+  @Test
   public void testPasswordFileDoesNotExist() throws Exception {
     try {
       ArrayList<String> extraArgs = new ArrayList<String>();
@@ -105,6 +114,7 @@
     }
   }
 
+  @Test
   public void testPasswordFileIsADirectory() throws Exception {
     try {
       ArrayList<String> extraArgs = new ArrayList<String>();
@@ -123,6 +133,7 @@
     }
   }
 
+  @Test
   public void testBothPasswordOptions() throws Exception {
     String passwordFilePath = TEMP_BASE_DIR + ".pwd";
     createTempFile(passwordFilePath);
@@ -151,6 +162,7 @@
     }
   }
 
+  @Test
   public void testPasswordFilePath() throws Exception {
     String passwordFilePath = TEMP_BASE_DIR + ".pwd";
     createTempFile(passwordFilePath);
@@ -176,6 +188,7 @@
     }
   }
 
+  @Test
   public void testPasswordInDBConfiguration() throws Exception {
     JobConf jobConf = new JobConf(getConf());
     DBConfiguration.configureDB(jobConf, "org.hsqldb.jdbcDriver",
@@ -195,6 +208,7 @@
     assertNotNull(connection);
   }
 
+  @Test
   public void testPasswordNotInJobConf() throws Exception {
     JobConf jobConf = new JobConf(getConf());
     DBConfiguration.configureDB(jobConf, "org.hsqldb.jdbcDriver",
@@ -203,6 +217,7 @@
     assertNull(jobConf.get(DBConfiguration.PASSWORD_PROPERTY, null));
   }
 
+  @Test
   public void testPasswordInMetastoreWithRecordEnabledAndSecureOption()
     throws Exception {
     String passwordFilePath = TEMP_BASE_DIR + ".pwd";
@@ -239,6 +254,7 @@
     assertEquals(passwordFilePath, optionsFromMetastore.getPasswordFilePath());
   }
 
+  @Test
   public void testPasswordInMetastoreWithRecordDisabledAndSecureOption()
     throws Exception {
     String passwordFilePath = TEMP_BASE_DIR + ".pwd";
@@ -274,6 +290,7 @@
     assertEquals(passwordFilePath, optionsFromMetastore.getPasswordFilePath());
   }
 
+  @Test
   public void testPasswordInMetastoreWithRecordEnabledAndNonSecureOption()
     throws Exception {
     ArrayList<String> extraArgs = new ArrayList<String>();
@@ -328,6 +345,7 @@
     return args.toArray(new String[0]);
   }
 
+  @Test
   public void testCryptoFileLoader() throws Exception {
     // Current implementation is limited to ECB mode
     Object[][] ciphers = {
@@ -358,6 +376,7 @@
     }
   }
 
+  @Test
   public void testCredentialProviderLoader() throws Exception {
     CredentialProviderPasswordLoader pl =
         new CredentialProviderPasswordLoader();
@@ -403,6 +422,7 @@
     }
   }
 
+  @Test
   public void testPasswordAliasOption() throws Exception {
     CredentialProviderPasswordLoader pl =
         new CredentialProviderPasswordLoader();
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
index 5ef9b2b..32add56 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
@@ -33,7 +33,6 @@
 import java.util.TimeZone;
 
 import com.cloudera.sqoop.SqoopOptions;
-import junit.framework.JUnit4TestAdapter;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -52,13 +51,13 @@
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test that we can export HCatalog tables into databases.
  */
-@RunWith(JUnit4.class)
 public class HCatalogExportTest extends ExportJobTestCase {
   private static final Log LOG =
     LogFactory.getLog(HCatalogExportTest.class);
@@ -68,7 +67,6 @@
   public ExpectedException exception = ExpectedException.none();
 
   @Before
-  @Override
   public void setUp() {
     super.setUp();
     try {
@@ -563,8 +561,4 @@
     utils.launchHCatCli(createViewCmd);
   }
 
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(HCatalogImportTest.class);
-  }
-
 }
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
index 28f83f2..d784a20 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
@@ -35,7 +35,7 @@
 import java.util.Map;
 import java.util.TimeZone;
 
-import junit.framework.JUnit4TestAdapter;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -61,13 +61,14 @@
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Test that we can export HCatalog tables into databases.
  */
-@RunWith(JUnit4.class)
 public class HCatalogImportTest extends ImportJobTestCase {
   private static final Log LOG =
     LogFactory.getLog(HCatalogImportTest.class);
@@ -1070,7 +1071,4 @@
     utils.launchHCatCli(createViewCmd);
   }
 
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(HCatalogImportTest.class);
-  }
 }
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index d3337c7..71a371d 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -18,25 +18,22 @@
 
 package org.apache.sqoop.hcat;
 
-import junit.framework.JUnit4TestAdapter;
-import junit.framework.TestCase;
-
 import org.junit.Before;
 
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.tool.ExportTool;
 import com.cloudera.sqoop.tool.ImportTool;
-import org.junit.Rule;
+
 import org.junit.Test;
+
+import org.junit.Rule;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
 /**
  * Test basic HCatalog related features.
  */
-@RunWith(JUnit4.class)
-public class TestHCatalogBasic extends TestCase {
+public class TestHCatalogBasic {
+
   private static ImportTool importTool;
   private static ExportTool exportTool;
 
@@ -44,7 +41,6 @@
   public ExpectedException thrown = ExpectedException.none();
 
   @Before
-  @Override
   public void setUp() {
     importTool = new ImportTool();
     exportTool = new ExportTool();
@@ -400,8 +396,4 @@
     importTool.validateOptions(opts);
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestHCatalogBasic.class);
-  }
 }
diff --git a/src/test/org/apache/sqoop/manager/TestMainframeManager.java b/src/test/org/apache/sqoop/manager/TestMainframeManager.java
index 79cbcb1..9359ac4 100644
--- a/src/test/org/apache/sqoop/manager/TestMainframeManager.java
+++ b/src/test/org/apache/sqoop/manager/TestMainframeManager.java
@@ -44,6 +44,11 @@
 import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
 import com.cloudera.sqoop.util.ImportException;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+
 /**
  * Test methods of the generic SqlManager implementation.
  */
diff --git a/src/test/org/apache/sqoop/manager/TestSqlManager.java b/src/test/org/apache/sqoop/manager/TestSqlManager.java
index 08413b0..571ed50 100644
--- a/src/test/org/apache/sqoop/manager/TestSqlManager.java
+++ b/src/test/org/apache/sqoop/manager/TestSqlManager.java
@@ -26,13 +26,10 @@
 import org.junit.Test;
 
 import com.cloudera.sqoop.SqoopOptions;
-
-import junit.framework.TestCase;
-
 /**
  * Test methods of the generic SqlManager implementation.
  */
-public class TestSqlManager extends TestCase {
+public class TestSqlManager {
 
   @Test
   public void testFilteringSpecifiedColumnNamesWhenNoneSpecified() {
diff --git a/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java b/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java
index ed6ba3b..78103ec 100644
--- a/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java
+++ b/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java
@@ -36,6 +36,9 @@
 import com.cloudera.sqoop.manager.ConnManager;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test authentication.
  *
diff --git a/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java b/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java
index 420ab19..90dff97 100644
--- a/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java
@@ -38,6 +38,9 @@
 import com.cloudera.sqoop.manager.MySQLTestUtils;
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * Test free form query import with the MySQL db.
@@ -51,13 +54,13 @@
   private final String procName = "MYSQL_CALL_EXPORT_PROC";
   private MySQLTestUtils mySQLTestUtils = new MySQLTestUtils();
 
-  @Override
+  @Before
   public void setUp() {
     super.setUp();
     createObjects();
   }
 
-  @Override
+  @After
   public void tearDown() {
     try {
       Statement stmt = getManager().getConnection().createStatement();
@@ -194,6 +197,7 @@
     }
   }
 
+  @Test
   public void testExportUsingProcedure() throws IOException, SQLException {
     String[] lines = {
       "0,textfield0,2002-12-29,3300",
diff --git a/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java b/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java
index 8a6b6fe..7ecc929 100644
--- a/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java
@@ -28,12 +28,16 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.junit.After;
+import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
 import java.sql.SQLException;
 import java.util.ArrayList;
 
+import static org.junit.Assert.assertEquals;
+
 public class MySqlColumnEscapeImportTest extends ImportJobTestCase {
 
   public static final Log LOG = LogFactory.getLog(
@@ -63,7 +67,7 @@
     return "DROP TABLE IF EXISTS " + getManager().escapeTableName(table);
   }
 
-    @Override
+  @After
   public void tearDown() {
       try {
         dropTableIfExists(getTableName());
@@ -93,6 +97,7 @@
     return args.toArray(new String[0]);
   }
 
+  @Test
   public void testEscapeColumnWithDoubleQuote() throws IOException {
     String[] colNames = { "column\"withdoublequote" };
     String[] types = { "VARCHAR(50)"};
diff --git a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java
index 92012c4..b48b379 100644
--- a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java
@@ -29,6 +29,8 @@
 
 import com.cloudera.sqoop.SqoopOptions;
 
+import static org.junit.Assert.fail;
+
 /**
  * Test the DirectNetezzaManager implementation's exportJob() functionality.
  */
@@ -179,6 +181,7 @@
   }
 
 
+  @Test
   public void testDifferentNullStrings() throws IOException, SQLException {
     ColumnGenerator[] extraCols = new ColumnGenerator[] {
         new NullColumnGenerator(),
@@ -218,23 +221,27 @@
 
 
   @Override
+  @Test
   public void testMultiMapTextExportWithStaging() throws IOException,
       SQLException {
     // disable this test as staging is not supported in direct mode
   }
 
   @Override
+  @Test
   public void testMultiTransactionWithStaging() throws IOException,
       SQLException {
     // disable this test as staging is not supported in direct mode
   }
 
   @Override
+  @Test
   public void testColumnsExport() throws IOException, SQLException {
     // disable this test as it is not supported in direct mode
   }
 
   @Override
+  @Test
   public void testSequenceFileExport() throws IOException, SQLException {
     // disable this test as it is not supported in direct mode
   }
diff --git a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java
index dbf9988..03cef89 100644
--- a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java
@@ -34,6 +34,7 @@
 import org.junit.Before;
 
 import com.cloudera.sqoop.SqoopOptions;
+import org.junit.Test;
 
 /**
  * Test the DirectNetezzaManager implementation's hcatalog export functionality.
@@ -88,6 +89,7 @@
     setUpNZ();
   }
 
+  @Test
   public void testIntTypes() throws Exception {
     final int TOTAL_RECORDS = 1 * 10;
     String table = getTableName().toUpperCase();
@@ -110,6 +112,7 @@
     runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
   }
 
+  @Test
   public void testStringTypes() throws Exception {
     final int TOTAL_RECORDS = 1 * 10;
     String table = getTableName().toUpperCase();
@@ -125,6 +128,7 @@
     runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
   }
 
+  @Test
   public void testNumberTypes() throws Exception {
     final int TOTAL_RECORDS = 1 * 10;
     String table = getTableName().toUpperCase();
@@ -142,24 +146,31 @@
 
   // Disable the following tests for direct mode netezza connector
 
+  @Test
   public void testBinaryTypes() throws Exception {
   }
 
+  @Test
   public void testColumnProjection() throws Exception {
   }
 
+  @Test
   public void testStaticPartitioning() throws Exception {
   }
 
+  @Test
   public void testDynamicPartitioning() throws Exception {
   }
 
+  @Test
   public void testStaticAndDynamicPartitioning() throws Exception {
   }
 
+  @Test
   public void testSequenceFile() throws Exception {
   }
 
+  @Test
   public void testTextFile() throws Exception {
   }
 }
diff --git a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java
index cf56714..ed4ae19 100644
--- a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java
@@ -34,6 +34,7 @@
 import org.junit.Before;
 
 import com.cloudera.sqoop.SqoopOptions;
+import org.junit.Test;
 
 /**
  * Test the DirectNetezzaManager implementation's hcatalog import functionality.
@@ -88,6 +89,7 @@
     setUpNZ();
   }
 
+    @Test
   public void testIntTypes() throws Exception {
     final int TOTAL_RECORDS = 1 * 10;
     String table = getTableName().toUpperCase();
@@ -111,6 +113,7 @@
     super.runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
   }
 
+  @Test
   public void testStringTypes() throws Exception {
     final int TOTAL_RECORDS = 1 * 10;
     String table = getTableName().toUpperCase();
@@ -127,6 +130,7 @@
     runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
   }
 
+  @Test
   public void testNumberTypes() throws Exception {
     final int TOTAL_RECORDS = 1 * 10;
     String table = getTableName().toUpperCase();
@@ -144,48 +148,63 @@
   }
 
   // Disable the following for direct mode tests
+  @Test
   public void testBinaryTypes() throws Exception {
   }
 
+  @Test
   public void testColumnProjection() throws Exception {
   }
 
+  @Test
   public void testColumnProjectionMissingPartKeys() throws Exception {
   }
 
+  @Test
   public void testStaticPartitioning() throws Exception {
   }
 
+  @Test
   public void testDynamicPartitioning() throws Exception {
   }
 
+  @Test
   public void testStaticAndDynamicPartitioning() throws Exception {
   }
 
+  @Test
   public void testSequenceFile() throws Exception {
   }
 
+  @Test
   public void testTextFile() throws Exception {
   }
 
+  @Test
   public void testTableCreation() throws Exception {
   }
 
+  @Test
   public void testTableCreationWithPartition() throws Exception {
   }
 
+  @Test
   public void testTableCreationWithStorageStanza() throws Exception {
   }
 
+  @Test
   public void testHiveDropDelims() throws Exception {
   }
 
+  @Test
   public void testHiveDelimsReplacement() throws Exception {
   }
 
+  @Test
   public void testDynamicKeyInMiddle() throws Exception {
   }
 
+  @Test
   public void testCreateTableWithPreExistingTable() throws Exception {
   }
 }
diff --git a/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java
index 9ba65a8..79946c5 100644
--- a/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java
@@ -38,6 +38,8 @@
 import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
 import com.cloudera.sqoop.testutil.CommonArgs;
 
+import static org.junit.Assert.fail;
+
 /**
  * Test the Netezza implementation.
  *
diff --git a/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java
index 6a6ed0c..1adbbdb 100644
--- a/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java
@@ -45,6 +45,10 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import com.cloudera.sqoop.util.FileListing;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Test the Netezza implementation.
  *
diff --git a/src/test/org/apache/sqoop/manager/oracle/ExportTest.java b/src/test/org/apache/sqoop/manager/oracle/ExportTest.java
index 3752886..23b4c73 100644
--- a/src/test/org/apache/sqoop/manager/oracle/ExportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/ExportTest.java
@@ -18,12 +18,12 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import junit.framework.Assert;
-
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * Test exporting data into Oracle.
  */
@@ -41,14 +41,14 @@
 
     int retCode =
  TEST_CASE.runImport("TST_PRODUCT", TEST_CASE.getSqoopConf(), false);
-    Assert.assertEquals("Return code should be 0", 0, retCode);
+    assertEquals("Return code should be 0", 0, retCode);
   }
 
   @Test
   public void testProductExport() throws Exception {
     int retCode =
         TEST_CASE.runExportFromTemplateTable("TST_PRODUCT", "TST_PRODUCT_EXP", false);
-    Assert.assertEquals("Return code should be 0", 0, retCode);
+    assertEquals("Return code should be 0", 0, retCode);
   }
 
   @Test
@@ -56,7 +56,7 @@
     int retCode =
         TEST_CASE.runExportFromTemplateTable("TST_PRODUCT",
             "\"\"T5+_Pr#duct_Exp\"\"", false);
-    Assert.assertEquals("Return code should be 0", 0, retCode);
+    assertEquals("Return code should be 0", 0, retCode);
   }
 
   @AfterClass
diff --git a/src/test/org/apache/sqoop/manager/oracle/ImportTest.java b/src/test/org/apache/sqoop/manager/oracle/ImportTest.java
index 5db9bc2..0002128 100644
--- a/src/test/org/apache/sqoop/manager/oracle/ImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/ImportTest.java
@@ -18,14 +18,14 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import junit.framework.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.manager.oracle.OraOopConstants.
            OraOopOracleDataChunkMethod;
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * Test import data from Oracle.
  */
@@ -38,7 +38,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT", getSqoopConf(), false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -55,7 +55,7 @@
 
     try {
       int retCode = runImport("TST_Pr OdUCT", getSqoopConf(), false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
@@ -71,7 +71,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_PART", getSqoopConf(), false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -90,7 +90,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_PART", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -111,7 +111,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_PART", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -135,7 +135,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_PART", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -150,7 +150,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_SUBPART", getSqoopConf(), false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -169,7 +169,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_SUBPART", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -192,7 +192,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_SUBPART", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -213,7 +213,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_SUBPART", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -234,7 +234,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -249,7 +249,7 @@
 
     try {
       int retCode = runImport("\"\"T5+_Pr#duct\"\"", getSqoopConf(), false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
@@ -268,7 +268,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT_PART_IOT", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
diff --git a/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java b/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java
index 59b6e3a..8e31c3f 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java
@@ -19,7 +19,6 @@
 package org.apache.sqoop.manager.oracle;
 
 import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.junit.Before;
@@ -38,7 +37,7 @@
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
-public class OraOopDataDrivenDBInputFormatConnectionCloseTest extends TestCase {
+public class OraOopDataDrivenDBInputFormatConnectionCloseTest {
 
   private static final OraOopLog LOG = OraOopLogFactory.getLog(
       TestOraOopDataDrivenDBInputFormat.class.getName());
diff --git a/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java b/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java
index 3811e38..631e4f9 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java
@@ -46,7 +46,7 @@
 import com.cloudera.sqoop.Sqoop;
 import com.cloudera.sqoop.manager.OracleUtils;
 
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
 
 /**
  * Base test case for OraOop to handle common functions.
@@ -269,7 +269,7 @@
         rowsImported = Integer.parseInt(matcher.group(2));
       }
     }
-    Assert.assertEquals("Incorrect number of rows imported", rowsInTable,
+    assertEquals("Incorrect number of rows imported", rowsInTable,
         rowsImported);
     return retCode;
   }
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java
index 1d3417a..a473f67 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java
@@ -38,6 +38,7 @@
 import com.cloudera.sqoop.manager.OracleUtils;
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.junit.Test;
 
 /**
  * Test free form query import with the MySQL db.
@@ -194,6 +195,7 @@
       statement.close();
     }
   }
+  @Test
   public void testExportUsingProcedure() throws IOException, SQLException {
     String[] lines = {
       "0,textfield0,2002-12-29 08:40:00,3300",
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java
index d428040..1e3b799 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java
@@ -28,12 +28,16 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.junit.After;
+import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
 import java.sql.SQLException;
 import java.util.ArrayList;
 
+import static org.junit.Assert.assertEquals;
+
 public class OracleColumnEscapeImportTest extends ImportJobTestCase {
 
   public static final Log LOG = LogFactory.getLog(
@@ -61,7 +65,7 @@
     OracleUtils.dropTable(table, getManager());
   }
 
-  @Override
+  @After
   public void tearDown() {
     try {
       OracleUtils.dropTable(getTableName(), getManager());
@@ -92,6 +96,7 @@
     return args.toArray(new String[0]);
   }
 
+  @Test
   public void testRegexpReplaceEscapeWithSpecialCharacters() throws IOException {
     String [] types = { "VARCHAR(50)"};
     String [] vals = { "'hello, world:'"};
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java
index e596646..2a908b3 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java
@@ -29,6 +29,8 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.junit.After;
+import org.junit.Test;
 
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -42,6 +44,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * Test free form query import with the Oracle db.
  */
@@ -75,7 +79,7 @@
   /** the names of the tables we're creating. */
   private List<String> tableNames;
 
-  @Override
+  @After
   public void tearDown() {
     // Clean up the database on our way out.
     for (String tableName : tableNames) {
@@ -128,6 +132,7 @@
    * Create a tables with a date column.  Run incremental import on the table
    * with date column as check-column.
    */
+  @Test
   public void testIncrementalImportWithLastModified() throws IOException {
     tableNames = new ArrayList<String>();
     String [] types = { "INT", "VARCHAR(10)", "DATE", };
@@ -156,6 +161,7 @@
                  expectedVal, output);
   }
 
+  @Test
   public void testIncrementalImportWithLastModifiedTimestamp() throws IOException {
     tableNames = new ArrayList<String>();
     String [] types = { "INT", "VARCHAR(10)", "TIMESTAMP", };
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java
index fc5f56b..6878608 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java
@@ -30,6 +30,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.junit.Test;
 
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -43,6 +44,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * Test various custom splitters for Oracle.
  */
@@ -111,6 +114,7 @@
     return args.toArray(new String[0]);
   }
 
+  @Test
   public void testTimestampSplitter() throws IOException {
     tableNames = new ArrayList<String>();
     String [] types = { "INT", "VARCHAR(10)", "TIMESTAMP", };
diff --git a/src/test/org/apache/sqoop/manager/oracle/TestOraOopJdbcUrl.java b/src/test/org/apache/sqoop/manager/oracle/TestOraOopJdbcUrl.java
index e842849..33f51af 100644
--- a/src/test/org/apache/sqoop/manager/oracle/TestOraOopJdbcUrl.java
+++ b/src/test/org/apache/sqoop/manager/oracle/TestOraOopJdbcUrl.java
@@ -17,9 +17,6 @@
  */
 
 package org.apache.sqoop.manager.oracle;
-
-import static org.junit.Assert.*;
-import junit.framework.Assert;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -29,6 +26,10 @@
 import org.apache.sqoop.manager.oracle.OraOopUtilities.
            JdbcOracleThinConnectionParsingError;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Unit tests for OraOopJdbcUrl.
  */
@@ -62,21 +63,21 @@
     // Null JDBC URL...
     try {
       actual = new OraOopJdbcUrl(null).parseJdbcOracleThinConnectionString();
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       /* This is what we want to happen. */
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     }
 
     // Empty JDBC URL...
     try {
       actual = new OraOopJdbcUrl("").parseJdbcOracleThinConnectionString();
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       /* This is what we want to happen. */
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     }
 
     // Incorrect number of fragments in the URL...
@@ -84,7 +85,7 @@
       actual =
           new OraOopJdbcUrl("jdbc:oracle:oci8:@dbname.domain")
               .parseJdbcOracleThinConnectionString();
-      Assert.fail(
+      fail(
           "A JdbcOracleThinConnectionParsingError should be been thrown.");
     } catch (JdbcOracleThinConnectionParsingError ex) {
       // This is what we want to happen.
@@ -102,7 +103,7 @@
           new OraOopJdbcUrl(
               "jdbc:oracle:loremipsum:@hostname.domain.com.au:port1521:dbsid")
               .parseJdbcOracleThinConnectionString();
-      Assert.fail(
+      fail(
           "A JdbcOracleThinConnectionParsingError should be been thrown.");
     } catch (JdbcOracleThinConnectionParsingError ex) {
       // This is what we want to happen.
@@ -125,7 +126,7 @@
           new OraOopJdbcUrl(
               "jdbc:oracle:thin:@hostname.domain.com.au:port1521:dbsid")
               .parseJdbcOracleThinConnectionString();
-      Assert.fail(
+      fail(
           "An JdbcOracleThinConnectionParsingError should be been thrown.");
     } catch (JdbcOracleThinConnectionParsingError ex) {
       assertTrue(
@@ -139,7 +140,7 @@
           new OraOopJdbcUrl(
               "jdbc:oracle:thin:@hostname.domain.com.au:-1521:dbsid")
               .parseJdbcOracleThinConnectionString();
-      Assert.fail(
+      fail(
           "An JdbcOracleThinConnectionParsingError should be been thrown.");
     } catch (JdbcOracleThinConnectionParsingError ex) {
       assertTrue(
@@ -153,11 +154,11 @@
           new OraOopJdbcUrl(
               "JDBC:Oracle:tHiN:@hostname.domain.com.au:1521:dbsid")
               .parseJdbcOracleThinConnectionString();
-      Assert.assertEquals("hostname.domain.com.au", actual.getHost());
-      Assert.assertEquals(1521, actual.getPort());
-      Assert.assertEquals("dbsid", actual.getSid());
+      assertEquals("hostname.domain.com.au", actual.getHost());
+      assertEquals(1521, actual.getPort());
+      assertEquals("dbsid", actual.getSid());
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail(ex.getMessage());
+      fail(ex.getMessage());
     }
 
     // Valid JDBC URL...
@@ -166,11 +167,11 @@
           new OraOopJdbcUrl(
               " JDBC : Oracle : tHiN : @hostname.domain.com.au : 1529 : dbsid")
               .parseJdbcOracleThinConnectionString();
-      Assert.assertEquals("hostname.domain.com.au", actual.getHost());
-      Assert.assertEquals(1529, actual.getPort());
-      Assert.assertEquals("dbsid", actual.getSid());
+      assertEquals("hostname.domain.com.au", actual.getHost());
+      assertEquals(1529, actual.getPort());
+      assertEquals("dbsid", actual.getSid());
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail(ex.getMessage());
+      fail(ex.getMessage());
     }
 
     // Valid (sid-based) JDBC URL with parameters...
@@ -179,12 +180,12 @@
           new OraOopJdbcUrl(
               "jdbc:oracle:thin:@hostname:1521:dbsid?param1=loremipsum")
               .parseJdbcOracleThinConnectionString();
-      Assert.assertEquals("hostname", actual.getHost());
-      Assert.assertEquals(1521, actual.getPort());
-      Assert.assertEquals("dbsid", actual.getSid());
-      Assert.assertEquals(null, actual.getService());
+      assertEquals("hostname", actual.getHost());
+      assertEquals(1521, actual.getPort());
+      assertEquals("dbsid", actual.getSid());
+      assertEquals(null, actual.getService());
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail(ex.getMessage());
+      fail(ex.getMessage());
     }
 
     // Valid (service-based) JDBC URL...
@@ -193,12 +194,12 @@
           new OraOopJdbcUrl(
               "jdbc:oracle:thin:@hostname:1521/dbservice.dbdomain")
               .parseJdbcOracleThinConnectionString();
-      Assert.assertEquals("hostname", actual.getHost());
-      Assert.assertEquals(1521, actual.getPort());
-      Assert.assertEquals(null, actual.getSid());
-      Assert.assertEquals("dbservice.dbdomain", actual.getService());
+      assertEquals("hostname", actual.getHost());
+      assertEquals(1521, actual.getPort());
+      assertEquals(null, actual.getSid());
+      assertEquals("dbservice.dbdomain", actual.getService());
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail(ex.getMessage());
+      fail(ex.getMessage());
     }
 
     // Valid (service-based) JDBC URL with slashes...
@@ -207,12 +208,12 @@
           new OraOopJdbcUrl(
               "jdbc:oracle:thin:@//hostname:1521/dbservice.dbdomain")
               .parseJdbcOracleThinConnectionString();
-      Assert.assertEquals("hostname", actual.getHost());
-      Assert.assertEquals(1521, actual.getPort());
-      Assert.assertEquals(null, actual.getSid());
-      Assert.assertEquals("dbservice.dbdomain", actual.getService());
+      assertEquals("hostname", actual.getHost());
+      assertEquals(1521, actual.getPort());
+      assertEquals(null, actual.getSid());
+      assertEquals("dbservice.dbdomain", actual.getService());
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail(ex.getMessage());
+      fail(ex.getMessage());
     }
 
     // Valid (service-based) JDBC URL with parameters...
@@ -220,12 +221,12 @@
       actual = new OraOopJdbcUrl(
          "jdbc:oracle:thin:@hostname:1521/dbservice.dbdomain?param1=loremipsum")
               .parseJdbcOracleThinConnectionString();
-      Assert.assertEquals("hostname", actual.getHost());
-      Assert.assertEquals(1521, actual.getPort());
-      Assert.assertEquals(null, actual.getSid());
-      Assert.assertEquals("dbservice.dbdomain", actual.getService());
+      assertEquals("hostname", actual.getHost());
+      assertEquals(1521, actual.getPort());
+      assertEquals(null, actual.getSid());
+      assertEquals("dbservice.dbdomain", actual.getService());
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail(ex.getMessage());
+      fail(ex.getMessage());
     }
 
     // Valid (service-based) JDBC URL with slashes and parameters...
@@ -233,12 +234,12 @@
       actual = new OraOopJdbcUrl(
        "jdbc:oracle:thin:@//hostname:1521/dbservice.dbdomain?param1=loremipsum")
               .parseJdbcOracleThinConnectionString();
-      Assert.assertEquals("hostname", actual.getHost());
-      Assert.assertEquals(1521, actual.getPort());
-      Assert.assertEquals(null, actual.getSid());
-      Assert.assertEquals("dbservice.dbdomain", actual.getService());
+      assertEquals("hostname", actual.getHost());
+      assertEquals(1521, actual.getPort());
+      assertEquals(null, actual.getSid());
+      assertEquals("dbservice.dbdomain", actual.getService());
     } catch (JdbcOracleThinConnectionParsingError ex) {
-      Assert.fail(ex.getMessage());
+      fail(ex.getMessage());
     }
   }
 
@@ -250,7 +251,7 @@
     // Null JDBC URL...
     try {
       actual = new OraOopJdbcUrl(null).getConnectionUrl();
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       /* This is what we want to happen. */
     }
@@ -258,7 +259,7 @@
     // Empty JDBC URL...
     try {
       actual = new OraOopJdbcUrl("").getConnectionUrl();
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       /* This is what we want to happen. */
     }
@@ -267,7 +268,7 @@
     actual =
         new OraOopJdbcUrl("jdbc:oracle:thin:@hostname.domain:1521:dbsid")
             .getConnectionUrl();
-    Assert.assertEquals("jdbc:oracle:thin:@hostname.domain:1521:dbsid", actual);
+    assertEquals("jdbc:oracle:thin:@hostname.domain:1521:dbsid", actual);
 
   }
 
diff --git a/src/test/org/apache/sqoop/manager/oracle/TestOraOopUtilities.java b/src/test/org/apache/sqoop/manager/oracle/TestOraOopUtilities.java
index 93592af..0e8f3fe 100644
--- a/src/test/org/apache/sqoop/manager/oracle/TestOraOopUtilities.java
+++ b/src/test/org/apache/sqoop/manager/oracle/TestOraOopUtilities.java
@@ -23,11 +23,12 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import junit.framework.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 /**
  * Unit tests for OraOopUtilities.
  */
@@ -48,60 +49,60 @@
 
     // table
     context = OraOopUtilities.decodeOracleTableName("oraoop", "junk", null);
-    Assert.assertEquals(context.getSchema(), "ORAOOP");
-    Assert.assertEquals(context.getName(), "JUNK");
+    assertEquals(context.getSchema(), "ORAOOP");
+    assertEquals(context.getName(), "JUNK");
 
     // "table"
     context = OraOopUtilities.decodeOracleTableName("oraoop", "\"Junk\"", null);
-    Assert.assertEquals(context.getSchema(), "ORAOOP");
-    Assert.assertEquals(context.getName(), "Junk");
+    assertEquals(context.getSchema(), "ORAOOP");
+    assertEquals(context.getName(), "Junk");
 
     // schema.table
     context =
         OraOopUtilities.decodeOracleTableName("oraoop", "targusr.junk", null);
-    Assert.assertEquals(context.getSchema(), "TARGUSR");
-    Assert.assertEquals(context.getName(), "JUNK");
+    assertEquals(context.getSchema(), "TARGUSR");
+    assertEquals(context.getName(), "JUNK");
 
     // schema."table"
     context =
         OraOopUtilities.decodeOracleTableName("oraoop", "targusr.\"Junk\"",
             null);
-    Assert.assertEquals(context.getSchema(), "TARGUSR");
-    Assert.assertEquals(context.getName(), "Junk");
+    assertEquals(context.getSchema(), "TARGUSR");
+    assertEquals(context.getName(), "Junk");
 
     // "schema".table
     context =
         OraOopUtilities.decodeOracleTableName("oraoop", "\"Targusr\".junk",
             null);
-    Assert.assertEquals(context.getSchema(), "Targusr");
-    Assert.assertEquals(context.getName(), "JUNK");
+    assertEquals(context.getSchema(), "Targusr");
+    assertEquals(context.getName(), "JUNK");
 
     // "schema"."table"
     String inputStr = "\"Targusr\".\"Junk\"";
     context = OraOopUtilities.decodeOracleTableName("oraoop", inputStr, null);
-    Assert.assertEquals(context.getSchema(), "Targusr");
-    Assert.assertEquals(context.getName(), "Junk");
+    assertEquals(context.getSchema(), "Targusr");
+    assertEquals(context.getName(), "Junk");
 
     // Test for "." within schema...
     context =
         OraOopUtilities.decodeOracleTableName("oraoop", "\"targ.usr\".junk",
             null);
-    Assert.assertEquals(context.getSchema(), "targ.usr");
-    Assert.assertEquals(context.getName(), "JUNK");
+    assertEquals(context.getSchema(), "targ.usr");
+    assertEquals(context.getName(), "JUNK");
 
     // Test for "." within table...
     context =
         OraOopUtilities.decodeOracleTableName("oraoop",
             "targusr.\"junk.tab.with.dots\"", null);
-    Assert.assertEquals(context.getSchema(), "TARGUSR");
-    Assert.assertEquals(context.getName(), "junk.tab.with.dots");
+    assertEquals(context.getSchema(), "TARGUSR");
+    assertEquals(context.getName(), "junk.tab.with.dots");
 
     // Test for "." within schema and within table...
     context =
         OraOopUtilities.decodeOracleTableName("oraoop",
             "\"targ.usr\".\"junk.tab.with.dots\"", null);
-    Assert.assertEquals(context.getSchema(), "targ.usr");
-    Assert.assertEquals(context.getName(), "junk.tab.with.dots");
+    assertEquals(context.getSchema(), "targ.usr");
+    assertEquals(context.getName(), "junk.tab.with.dots");
   }
 
   @Test
@@ -110,7 +111,7 @@
     String actual = OraOopUtilities.getCurrentMethodName();
     String expected = "testgetCurrentMethodName()";
 
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
   }
 
@@ -122,11 +123,11 @@
 
     expected = "1_1";
     actual = OraOopUtilities.generateDataChunkId(1, 1);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
     expected = "1234_99";
     actual = OraOopUtilities.generateDataChunkId(1234, 99);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
   }
 
   @Test
@@ -134,7 +135,7 @@
 
     try {
       OraOopUtilities.getDuplicatedStringArrayValues(null, false);
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       // This is what we want to happen.
     }
@@ -143,49 +144,49 @@
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] {}, false);
-    Assert.assertEquals(0, duplicates.length);
+    assertEquals(0, duplicates.length);
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] { "a", "b",
             "c", }, false);
-    Assert.assertEquals(0, duplicates.length);
+    assertEquals(0, duplicates.length);
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] { "a", "A",
             "b", }, false);
-    Assert.assertEquals(0, duplicates.length);
+    assertEquals(0, duplicates.length);
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] { "a", "A",
             "b", }, true);
-    Assert.assertEquals(1, duplicates.length);
-    Assert.assertEquals("A", duplicates[0]);
+    assertEquals(1, duplicates.length);
+    assertEquals("A", duplicates[0]);
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] { "A", "a",
             "b", }, true);
-    Assert.assertEquals(1, duplicates.length);
-    Assert.assertEquals("a", duplicates[0]);
+    assertEquals(1, duplicates.length);
+    assertEquals("a", duplicates[0]);
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] { "A", "a",
             "b", "A", }, false);
-    Assert.assertEquals(1, duplicates.length);
-    Assert.assertEquals("A", duplicates[0]);
+    assertEquals(1, duplicates.length);
+    assertEquals("A", duplicates[0]);
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] { "A", "a",
             "b", "A", }, true);
-    Assert.assertEquals(2, duplicates.length);
-    Assert.assertEquals("a", duplicates[0]);
-    Assert.assertEquals("A", duplicates[1]);
+    assertEquals(2, duplicates.length);
+    assertEquals("a", duplicates[0]);
+    assertEquals("A", duplicates[1]);
 
     duplicates =
         OraOopUtilities.getDuplicatedStringArrayValues(new String[] { "A", "a",
             "b", "A", "A", }, true);
-    Assert.assertEquals(2, duplicates.length);
-    Assert.assertEquals("a", duplicates[0]);
-    Assert.assertEquals("A", duplicates[1]);
+    assertEquals(2, duplicates.length);
+    assertEquals("a", duplicates[0]);
+    assertEquals("A", duplicates[1]);
   }
 
   @Test
@@ -206,17 +207,14 @@
     } catch (Exception ex) {
       String msg = OraOopUtilities.getFullExceptionMessage(ex);
       if (!msg.contains("IOException") || !msg.contains("lorem ipsum!")) {
-        Assert
-            .fail("Inner exception text has not been included in the message");
+        fail("Inner exception text has not been included in the message");
       }
       if (!msg.contains("SQLException") || !msg.contains("dolor sit amet")) {
-        Assert
-            .fail("Inner exception text has not been included in the message");
+        fail("Inner exception text has not been included in the message");
       }
       if (!msg.contains("RuntimeException")
           || !msg.contains("consectetur adipisicing elit")) {
-        Assert
-            .fail("Outer exception text has not been included in the message");
+        fail("Outer exception text has not been included in the message");
       }
     }
   }
@@ -225,7 +223,7 @@
   public void testGetOraOopOracleDataChunkMethod() {
     try {
       OraOopUtilities.getOraOopOracleDataChunkMethod(null);
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       // This is what we want to happen.
     }
@@ -235,7 +233,7 @@
 
     // Check the default is ROWID
     dataChunkMethod = OraOopUtilities.getOraOopOracleDataChunkMethod(conf);
-    Assert.assertEquals(OraOopConstants.OraOopOracleDataChunkMethod.ROWID,
+    assertEquals(OraOopConstants.OraOopOracleDataChunkMethod.ROWID,
         dataChunkMethod);
 
     // Invalid value specified
@@ -246,18 +244,17 @@
     String logText = OraOopUtilities.LOG.getLogEntries();
     OraOopUtilities.LOG.setCacheLogEntries(false);
     if (!logText.toLowerCase().contains("loremipsum")) {
-      Assert
-          .fail("The LOG should inform the user they've selected an invalid "
+      fail("The LOG should inform the user they've selected an invalid "
               + "data chunk method - and what that was.");
     }
-    Assert.assertEquals("Should have used the default value",
+    assertEquals("Should have used the default value",
         OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD_DEFAULT,
         dataChunkMethod);
 
     // Valid value specified
     conf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD, "partition");
     dataChunkMethod = OraOopUtilities.getOraOopOracleDataChunkMethod(conf);
-    Assert.assertEquals(OraOopConstants.OraOopOracleDataChunkMethod.PARTITION,
+    assertEquals(OraOopConstants.OraOopOracleDataChunkMethod.PARTITION,
         dataChunkMethod);
   }
 
@@ -268,7 +265,7 @@
     try {
       OraOopUtilities.getOraOopOracleBlockToSplitAllocationMethod(null,
           OraOopConstants.OraOopOracleBlockToSplitAllocationMethod.RANDOM);
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       // This is what we want to happen.
     }
@@ -280,7 +277,7 @@
     allocationMethod =
         OraOopUtilities.getOraOopOracleBlockToSplitAllocationMethod(conf,
             OraOopConstants.OraOopOracleBlockToSplitAllocationMethod.RANDOM);
-    Assert.assertEquals(
+    assertEquals(
         OraOopConstants.OraOopOracleBlockToSplitAllocationMethod.RANDOM,
         allocationMethod);
 
@@ -289,7 +286,7 @@
         OraOopUtilities.getOraOopOracleBlockToSplitAllocationMethod(
            conf,
            OraOopConstants.OraOopOracleBlockToSplitAllocationMethod.SEQUENTIAL);
-    Assert.assertEquals(
+    assertEquals(
         OraOopConstants.OraOopOracleBlockToSplitAllocationMethod.SEQUENTIAL,
         allocationMethod);
 
@@ -305,13 +302,12 @@
     String logText = OraOopUtilities.LOG.getLogEntries();
     OraOopUtilities.LOG.setCacheLogEntries(false);
     if (!logText.toLowerCase().contains("loremipsum")) {
-      Assert
-          .fail("The LOG should inform the user they've selected an invalid "
+      fail("The LOG should inform the user they've selected an invalid "
               + "allocation method - and what that was.");
     }
 
     if (!logText.contains("ROUNDROBIN or SEQUENTIAL or RANDOM")) {
-      Assert.fail("The LOG should inform the user what the valid choices are.");
+      fail("The LOG should inform the user what the valid choices are.");
     }
 
     // An valid property value specified...
@@ -321,7 +317,7 @@
         OraOopUtilities.getOraOopOracleBlockToSplitAllocationMethod(
            conf,
            OraOopConstants.OraOopOracleBlockToSplitAllocationMethod.SEQUENTIAL);
-    Assert.assertEquals(
+    assertEquals(
         OraOopConstants.OraOopOracleBlockToSplitAllocationMethod.SEQUENTIAL,
         allocationMethod);
   }
@@ -333,7 +329,7 @@
     try {
       OraOopUtilities.getOraOopTableImportWhereClauseLocation(null,
           OraOopConstants.OraOopTableImportWhereClauseLocation.SPLIT);
-      Assert.fail("An IllegalArgumentException should be been thrown.");
+      fail("An IllegalArgumentException should be been thrown.");
     } catch (IllegalArgumentException ex) {
       // This is what we want to happen.
     }
@@ -345,7 +341,7 @@
     location =
         OraOopUtilities.getOraOopTableImportWhereClauseLocation(conf,
             OraOopConstants.OraOopTableImportWhereClauseLocation.SPLIT);
-    Assert.assertEquals(
+    assertEquals(
         OraOopConstants.OraOopTableImportWhereClauseLocation.SPLIT, location);
 
     // An invalid property value specified...
@@ -359,13 +355,12 @@
     String logText = OraOopUtilities.LOG.getLogEntries();
     OraOopUtilities.LOG.setCacheLogEntries(false);
     if (!logText.toLowerCase().contains("loremipsum")) {
-      Assert
-          .fail("The LOG should inform the user they've selected an invalid "
+      fail("The LOG should inform the user they've selected an invalid "
               + "where-clause-location - and what that was.");
     }
 
     if (!logText.contains("SUBSPLIT or SPLIT")) {
-      Assert.fail("The LOG should inform the user what the valid choices are.");
+      fail("The LOG should inform the user what the valid choices are.");
     }
 
     // An valid property value specified...
@@ -374,7 +369,7 @@
     location =
         OraOopUtilities.getOraOopTableImportWhereClauseLocation(conf,
             OraOopConstants.OraOopTableImportWhereClauseLocation.SUBSPLIT);
-    Assert.assertEquals(
+    assertEquals(
         OraOopConstants.OraOopTableImportWhereClauseLocation.SPLIT, location);
 
   }
@@ -384,11 +379,11 @@
 
     String expected = "   a";
     String actual = OraOopUtilities.padLeft("a", 4);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
     expected = "abcd";
     actual = OraOopUtilities.padLeft("abcd", 3);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
   }
 
   @Test
@@ -396,11 +391,11 @@
 
     String expected = "a   ";
     String actual = OraOopUtilities.padRight("a", 4);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
 
     expected = "abcd";
     actual = OraOopUtilities.padRight("abcd", 3);
-    Assert.assertEquals(expected, actual);
+    assertEquals(expected, actual);
   }
 
   @Test
@@ -414,7 +409,7 @@
             "alter session set timezone = '{oracle.sessionTimeZone|GMT}';",
             conf);
     String expected = "alter session set timezone = 'GMT';";
-    Assert.assertEquals("OraOop configuration expression failure.", expected,
+    assertEquals("OraOop configuration expression failure.", expected,
         actual);
 
     // Configuration property value exists...
@@ -424,7 +419,7 @@
             "alter session set timezone = '{oracle.sessionTimeZone|GMT}';",
             conf);
     expected = "alter session set timezone = 'Africa/Algiers';";
-    Assert.assertEquals("OraOop configuration expression failure.", expected,
+    assertEquals("OraOop configuration expression failure.", expected,
         actual);
 
     // Multiple properties in one expression...
@@ -436,14 +431,14 @@
         OraOopUtilities.replaceConfigurationExpression("set {expr1}={expr2};",
             conf);
     expected = "set 1=2;";
-    Assert.assertEquals("OraOop configuration expression failure.", expected,
+    assertEquals("OraOop configuration expression failure.", expected,
         actual);
 
     actual =
         OraOopUtilities.replaceConfigurationExpression(
             "set {expr4|0}={expr5|5};", conf);
     expected = "set 4=5;";
-    Assert.assertEquals("OraOop configuration expression failure.", expected,
+    assertEquals("OraOop configuration expression failure.", expected,
         actual);
   }
 
@@ -451,12 +446,12 @@
   public void testStackContainsClass() {
 
     if (OraOopUtilities.stackContainsClass("lorem.ipsum.dolor")) {
-      Assert.fail("There's no way the stack actually contains this!");
+      fail("There's no way the stack actually contains this!");
     }
 
     String expected = "org.apache.sqoop.manager.oracle.TestOraOopUtilities";
     if (!OraOopUtilities.stackContainsClass(expected)) {
-      Assert.fail("The stack should contain the class:" + expected);
+      fail("The stack should contain the class:" + expected);
     }
   }
 
@@ -465,20 +460,20 @@
     org.apache.hadoop.conf.Configuration conf = new Configuration();
 
     String hint = OraOopUtilities.getImportHint(conf);
-    Assert.assertEquals("Default import hint", "/*+ NO_INDEX(t) */ ", hint);
+    assertEquals("Default import hint", "/*+ NO_INDEX(t) */ ", hint);
 
     conf.set("oraoop.import.hint", "NO_INDEX(t) SCN_ASCENDING");
     hint = OraOopUtilities.getImportHint(conf);
-    Assert.assertEquals("Changed import hint",
+    assertEquals("Changed import hint",
         "/*+ NO_INDEX(t) SCN_ASCENDING */ ", hint);
 
     conf.set("oraoop.import.hint", "       ");
     hint = OraOopUtilities.getImportHint(conf);
-    Assert.assertEquals("Whitespace import hint", "", hint);
+    assertEquals("Whitespace import hint", "", hint);
 
     conf.set("oraoop.import.hint", "");
     hint = OraOopUtilities.getImportHint(conf);
-    Assert.assertEquals("Blank import hint", "", hint);
+    assertEquals("Blank import hint", "", hint);
 
   }
 
@@ -491,43 +486,43 @@
     expected.add("abcde");
     expected.add("ghijklm");
     result = OraOopUtilities.splitStringList("abcde,ghijklm");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("\"abcde\"");
     expected.add("\"ghijklm\"");
     result = OraOopUtilities.splitStringList("\"abcde\",\"ghijklm\"");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("abcde");
     expected.add("\"ghijklm\"");
     result = OraOopUtilities.splitStringList("abcde,\"ghijklm\"");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("\"abcde\"");
     expected.add("ghijklm");
     result = OraOopUtilities.splitStringList("\"abcde\",ghijklm");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("\"ab,cde\"");
     expected.add("ghijklm");
     result = OraOopUtilities.splitStringList("\"ab,cde\",ghijklm");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("abcde");
     expected.add("\"ghi,jklm\"");
     result = OraOopUtilities.splitStringList("abcde,\"ghi,jklm\"");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("\"ab,cde\"");
     expected.add("\"ghi,jklm\"");
     result = OraOopUtilities.splitStringList("\"ab,cde\",\"ghi,jklm\"");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("\"ab,cde\"");
@@ -541,7 +536,7 @@
         OraOopUtilities
             .splitStringList("\"ab,cde\",\"ghi,jklm\",\",Lorem\",\"ip!~sum\","
                 + "\"do,lo,,r\",\"s#it\",\"am$e$t\"");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("LOREM");
@@ -550,7 +545,7 @@
     expected.add("SIT");
     expected.add("AMET");
     result = OraOopUtilities.splitStringList("LOREM,IPSUM,DOLOR,SIT,AMET");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
   }
 
   @Test
@@ -566,7 +561,7 @@
     expected.add("AMET");
     result =
         OraOopUtilities.splitOracleStringList("lorem,ipsum,dolor,sit,amet");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("LOREM");
@@ -577,7 +572,7 @@
     result =
         OraOopUtilities
             .splitOracleStringList("lorem,\"ipsum\",\"dolor\",sit,\"amet\"");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
 
     expected = new ArrayList<String>();
     expected.add("LOREM");
@@ -588,7 +583,7 @@
     result =
         OraOopUtilities
           .splitOracleStringList("lorem,\"ip,sum\",\"dol$or\",sit,\"am!~#et\"");
-    Assert.assertEquals(expected, result);
+    assertEquals(expected, result);
   }
 
   @Test
@@ -602,18 +597,18 @@
     conf.set(confProperty, "");
     OraOopUtilities.appendJavaSecurityEgd(conf);
     actual = conf.get(confProperty);
-    Assert.assertEquals("Append to empty string", expected, actual);
+    assertEquals("Append to empty string", expected, actual);
 
     expected = "-Djava.security.egd=file:/dev/random";
     conf.set(confProperty, expected);
     OraOopUtilities.appendJavaSecurityEgd(conf);
     actual = conf.get(confProperty);
-    Assert.assertEquals("Append to empty string", expected, actual);
+    assertEquals("Append to empty string", expected, actual);
 
     expected = confValue + " -Xmx201m";
     conf.set(confProperty, "-Xmx201m");
     OraOopUtilities.appendJavaSecurityEgd(conf);
     actual = conf.get(confProperty);
-    Assert.assertEquals("Append to empty string", expected, actual);
+    assertEquals("Append to empty string", expected, actual);
   }
 }
diff --git a/src/test/org/apache/sqoop/manager/oracle/TestOracleTable.java b/src/test/org/apache/sqoop/manager/oracle/TestOracleTable.java
index 854d826..b9f8c0f 100644
--- a/src/test/org/apache/sqoop/manager/oracle/TestOracleTable.java
+++ b/src/test/org/apache/sqoop/manager/oracle/TestOracleTable.java
@@ -18,10 +18,10 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import junit.framework.Assert;
-
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * Unit tests for OracleTable.
  */
@@ -30,13 +30,13 @@
   @Test
   public void testToString() {
     OracleTable table = new OracleTable("ORAOOP", "TEST_TABLE");
-    Assert.assertEquals("\"ORAOOP\".\"TEST_TABLE\"", table.toString());
+    assertEquals("\"ORAOOP\".\"TEST_TABLE\"", table.toString());
 
     table = new OracleTable("", "TEST_TABLE2");
-    Assert.assertEquals("\"TEST_TABLE2\"", table.toString());
+    assertEquals("\"TEST_TABLE2\"", table.toString());
 
     table = new OracleTable("TEST_TABLE3");
-    Assert.assertEquals("\"TEST_TABLE3\"", table.toString());
+    assertEquals("\"TEST_TABLE3\"", table.toString());
   }
 
 }
diff --git a/src/test/org/apache/sqoop/manager/oracle/TimestampDataTest.java b/src/test/org/apache/sqoop/manager/oracle/TimestampDataTest.java
index 6ceccd1..1babf6c 100644
--- a/src/test/org/apache/sqoop/manager/oracle/TimestampDataTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/TimestampDataTest.java
@@ -18,11 +18,11 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import junit.framework.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * These tests need to be separate as changing the mapping type for timestamp
  * requires the tests to be run in a different process. Maven needs to be setup
@@ -40,7 +40,7 @@
 
     try {
       int retCode = runImport("TST_PRODUCT", sqoopConf, false);
-      Assert.assertEquals("Return code should be 0", 0, retCode);
+      assertEquals("Return code should be 0", 0, retCode);
 
     } finally {
       cleanupFolders();
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java b/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
index 73976a3..8c5176a 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
@@ -39,6 +39,11 @@
 import com.cloudera.sqoop.testutil.ExportJobTestCase;
 import com.cloudera.sqoop.tool.ExportTool;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+
 /**
  * Test utilities for export to SQL Server.
 */
@@ -111,12 +116,14 @@
 
   public abstract void createFile(DATATYPES dt, String data) throws Exception;
 
+  @Test
   public void testVarBinary() {
 
     exportTestMethod(DATATYPES.VARBINARY);
 
   }
 
+  @Test
   public void testTime() {
 
     exportTestMethod(DATATYPES.TIME);
@@ -151,11 +158,13 @@
 
   }
 
+  @Test
   public void testDecimal() {
     exportTestMethod(DATATYPES.DECIMAL);
 
   }
 
+  @Test
   public void testNumeric() {
     exportTestMethod(DATATYPES.NUMERIC);
 
@@ -203,6 +212,7 @@
 
   }
 
+  @Test
   public void testMoney() {
     exportTestMethod(DATATYPES.MONEY);
 
@@ -250,11 +260,13 @@
 
   }
 
+  @Test
   public void testImage() {
     exportTestMethod(DATATYPES.IMAGE);
 
   }
 
+  @Test
   public void testBinary() {
     exportTestMethod(DATATYPES.BINARY);
 
@@ -596,17 +608,6 @@
 
   }
 
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @param includeHadoopFlags
-   *            if true, then include -D various.settings=values
-   * @param rowsPerStmt
-   *            number of rows to export in a single INSERT statement.
-   * @param statementsPerTx
-   *            ## of statements to use in a transaction.
-   * @return the argv as an array of strings.
-   */
   protected String[] getArgv(DATATYPES dt) {
     ArrayList<String> args = new ArrayList<String>();
 
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
index 97034a1..21676f0 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileManualTest.java
@@ -38,6 +38,10 @@
 import com.cloudera.sqoop.tool.CodeGenTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 /**
 * Export sequence file to SQL Server test.
 */
@@ -182,17 +186,6 @@
     return codeGenArgv.toArray(new String[0]);
   }
 
-  /**
-  * Create the argv to pass to Sqoop.
-  *
-  * @param includeHadoopFlags
-  *            if true, then include -D various.settings=values
-  * @param rowsPerStmt
-  *            number of rows to export in a single INSERT statement.
-  * @param statementsPerTx
-  *            ## of statements to use in a transaction.
-  * @return the argv as an array of strings.
-  */
   protected String[] getArgv(DATATYPES dt, String... additionalArgv) {
     ArrayList<String> args = new ArrayList<String>();
 
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java
index 87bc203..519fb52 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileManualTest.java
@@ -38,6 +38,10 @@
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test import delimited file from SQL Server.
@@ -218,6 +222,7 @@
   }
 
 
+  @Test
   public void testVarBinary() {
     if (!supportsVarBinary()) {
       return;
@@ -225,6 +230,7 @@
     dataTypeTest(DATATYPES.VARBINARY);
   }
 
+  @Test
   public void testTime() {
     if (!supportsTime()) {
       skipped = true;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java
index 8b30da0..a0dad8a 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileManualTest.java
@@ -30,13 +30,17 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils.*;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestData.KEY_STRINGS;
 
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Testing import of a sequence file to SQL Server.
  */
@@ -122,6 +126,7 @@
     return opts;
   }
 
+  @Before
   public void setUp() {
     try {
       super.setUp();
@@ -151,6 +156,7 @@
     }
   }
 
+  @After
   public void tearDown() {
     try {
       super.tearDown();
@@ -209,6 +215,7 @@
   verifyType("BIT", getFalseBoolLiteralSqlInput(), getFalseBoolSeqOutput());
   }
 
+  @Test
   public void testBoolean() {
     try {
       super.testBoolean();
@@ -218,6 +225,7 @@
     }
   }
 
+  @Test
   public void testBoolean2() {
     try {
       super.testBoolean2();
@@ -227,6 +235,7 @@
     }
   }
 
+  @Test
   public void testBoolean3() {
     try {
       super.testBoolean3();
@@ -236,6 +245,7 @@
     }
   }
 
+  @Test
   public void testDouble1() {
     try {
       super.testDouble1();
@@ -255,6 +265,7 @@
     }
   }
 
+  @Test
   public void testClob1() {
     try {
       super.testClob1();
@@ -264,6 +275,7 @@
     }
   }
 
+  @Test
   public void testBlob1() {
     try {
     super.testBlob1();
@@ -273,6 +285,7 @@
     }
   }
 
+  @Test
   public void testLongVarChar() {
     try {
       super.testLongVarChar();
@@ -282,6 +295,7 @@
     }
   }
 
+  @Test
   public void testTimestamp1() {
     try {
       super.testTimestamp1();
@@ -291,6 +305,7 @@
     }
   }
 
+  @Test
   public void testTimestamp2() {
     try {
       super.testTimestamp2();
@@ -300,6 +315,7 @@
     }
   }
 
+  @Test
   public void testTimestamp3() {
     try {
       super.testTimestamp3();
@@ -309,6 +325,7 @@
     }
   }
 
+  @Test
   public void testVarBinary() {
     if (!supportsVarBinary()) {
       return;
@@ -316,6 +333,7 @@
     dataTypeTest(DATATYPES.VARBINARY);
   }
 
+  @Test
   public void testTime() {
     if (!supportsTime()) {
       skipped = true;
@@ -360,23 +378,29 @@
     dataTypeTest(DATATYPES.DATETIMEOFFSET);
   }
 
+  @Test
   public void testDecimal() {
     dataTypeTest(DATATYPES.DECIMAL);
   }
 
+  @Test
   public void testNumeric() {
     dataTypeTest(DATATYPES.NUMERIC);
   }
 
+  @Test
   public void testNumeric1() {
   }
 
+  @Test
   public void testNumeric2() {
   }
 
+  @Test
   public void testDecimal1() {
   }
 
+  @Test
   public void testDecimal2() {
   }
 
@@ -418,7 +442,7 @@
   }
 
   @Test
-    public void testTinyInt2() {
+  public void testTinyInt2() {
   }
 
   @Test
@@ -436,6 +460,7 @@
     dataTypeTest(DATATYPES.DATE);
   }
 
+  @Test
   public void testMoney() {
     dataTypeTest(DATATYPES.MONEY);
   }
@@ -475,10 +500,12 @@
     dataTypeTest(DATATYPES.NVARCHAR);
   }
 
+  @Test
   public void testImage() {
     dataTypeTest(DATATYPES.IMAGE);
   }
 
+  @Test
   public void testBinary() {
     dataTypeTest(DATATYPES.BINARY);
   }
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportManualTest.java
index 077613f..1999272 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportManualTest.java
@@ -33,15 +33,18 @@
 import com.cloudera.sqoop.hive.TestHiveImport;
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.tool.SqoopTool;
+import org.junit.Before;
+
+import static org.junit.Assert.fail;
 
 /**
  * Test import to Hive from SQL Server.
  */
 public class SQLServerHiveImportManualTest extends TestHiveImport {
 
+  @Before
   public void setUp() {
     super.setUp();
-
   }
 
   protected boolean useHsqldbTestServer() {
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerManualTest.java
index ee576c9..1178e3c 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerManualTest.java
@@ -26,7 +26,6 @@
 import java.sql.SQLException;
 import java.sql.Types;
 import java.util.Map;
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -43,10 +42,15 @@
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.tool.SqoopTool;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+
 /**
  * Test methods of the generic SqlManager implementation.
  */
-public class SQLServerManagerManualTest extends TestCase {
+public class SQLServerManagerManualTest  {
 
   public static final Log LOG = LogFactory.getLog(
     SQLServerManagerManualTest.class.getName());
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java
index 66b4a51..6a8ab51 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java
@@ -26,6 +26,7 @@
 
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.TestMultiCols;
+import org.junit.Test;
 
 /**
  * Test multiple columns SQL Server.
@@ -76,26 +77,32 @@
 
   }
 
+  @Test
   public void testMixed4() {
     // Overridden to bypass test case invalid for MSSQL server
   }
 
+  @Test
   public void testMixed5() {
     // Overridden to bypass test case invalid for MSSQL server
   }
 
+  @Test
   public void testMixed6() {
     // Overridden to bypass test case invalid for MSSQL server
   }
 
+  @Test
   public void testSkipFirstCol() {
     // Overridden to bypass test case invalid for MSSQL server
   }
 
+  @Test
   public void testSkipSecondCol() {
     // Overridden to bypass test case invalid for MSSQL server
   }
 
+  @Test
   public void testSkipThirdCol() {
     // Overridden to bypass test case invalid for MSSQL server
   }
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java
index 58ef4b4..c9a5b5e 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java
@@ -48,12 +48,19 @@
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.tool.SqoopTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that using multiple mapper splits works.
  */
 public class SQLServerMultiMapsManualTest extends ImportJobTestCase {
 
+  @Before
   public void setUp() {
     super.setUp();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -67,6 +74,7 @@
 
   }
 
+  @After
   public void tearDown() {
     super.tearDown();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -229,6 +237,7 @@
     }
   }
 
+  @Test
   public void testSplitByFirstCol() throws IOException {
     runMultiMapTest("L_ORDERKEY", 10);
   }
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java
index 21c950a..cd05aec 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java
@@ -48,6 +48,10 @@
 import com.cloudera.sqoop.testutil.ReparseMapper;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
 
 /**
  * Test that the parse() methods generated in user SqoopRecord implementations
@@ -55,6 +59,7 @@
  */
 public class SQLServerParseMethodsManualTest extends ImportJobTestCase {
 
+  @Before
   public void setUp() {
     super.setUp();
     Path p = new Path(getWarehouseDir());
@@ -170,6 +175,7 @@
     }
   }
 
+  @Test
   public void testDefaults() throws IOException {
     String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
     String[] vals = { "64", "'foo'", "128" };
@@ -178,6 +184,7 @@
     runParseTest(",", "\\n", "\\\"", "\\", false);
   }
 
+  @Test
   public void testRequiredEnclose() throws IOException {
     String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
     String[] vals = { "64", "'foo'", "128" };
@@ -186,6 +193,7 @@
     runParseTest(",", "\\n", "\\\"", "\\", true);
   }
 
+  @Test
   public void testStringEscapes() throws IOException {
     String[] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)",
         "VARCHAR(32)", "VARCHAR(32)", };
@@ -196,6 +204,7 @@
     runParseTest(",", "\\n", "\\\'", "\\", false);
   }
 
+  @Test
   public void testNumericTypes() throws IOException {
     String[] types = { "INTEGER", "REAL", "FLOAT", "DATE", "TIME", "BIT", };
     String[] vals = { "42", "36.0", "127.1", "'2009-07-02'", "'11:24:00'",
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java
index 613bbce..0057ac9 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java
@@ -42,12 +42,19 @@
 import com.cloudera.sqoop.testutil.SeqFileReader;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --query works in Sqoop.
  */
 public class SQLServerQueryManualTest extends ImportJobTestCase {
 
+  @Before
   public void setUp() {
     super.setUp();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -61,6 +68,7 @@
 
   }
 
+  @After
   public void tearDown() {
     super.tearDown();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -202,6 +210,7 @@
     }
   }
 
+  @Test
   public void testSelectStar() throws IOException {
     runQueryTest("SELECT * FROM " + getTableName()
         + " WHERE L_ORDERKEY > 0 AND $CONDITIONS",
@@ -209,6 +218,7 @@
             + "nocomments\n", 4, 10, getTablePath().toString());
   }
 
+  @Test
   public void testCompoundWhere() throws IOException {
     runQueryTest("SELECT * FROM " + getTableName()
         + " WHERE L_ORDERKEY > 1 AND L_PARTKEY < 4 AND $CONDITIONS",
@@ -216,6 +226,7 @@
             + "nocomments\n", 1, 2, getTablePath().toString());
   }
 
+  @Test
   public void testFailNoConditions() throws IOException {
     String[] argv = getArgv(true, "SELECT * FROM " + getTableName(),
         getTablePath().toString() + "where $CONDITIONS", true);
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByManualTest.java
index 4729aac..f85245a 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByManualTest.java
@@ -42,12 +42,19 @@
 import com.cloudera.sqoop.testutil.SeqFileReader;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --split-by works.
  */
 public class SQLServerSplitByManualTest extends ImportJobTestCase {
 
+  @Before
   public void setUp() {
     super.setUp();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -61,6 +68,7 @@
 
   }
 
+  @After
   public void tearDown() {
     super.tearDown();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -187,11 +195,13 @@
     }
   }
 
+  @Test
   public void testSplitByFirstCol() throws IOException {
     String splitByCol = "L_ORDERKEY";
     runSplitByTest(splitByCol, 10);
   }
 
+  @Test
   public void testSplitBySecondCol() throws IOException {
     String splitByCol = "L_PARTKEY";
     runSplitByTest(splitByCol, 10);
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereManualTest.java
index 9fad144..10ae03b 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereManualTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereManualTest.java
@@ -43,6 +43,12 @@
 import com.cloudera.sqoop.testutil.SeqFileReader;
 import com.cloudera.sqoop.tool.ImportTool;
 import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that --where works in Sqoop. Methods essentially copied out of the other
@@ -50,6 +56,7 @@
  */
 public class SQLServerWhereManualTest extends ImportJobTestCase {
 
+ @Before
   public void setUp(){
     super.setUp();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -63,6 +70,7 @@
 
   }
 
+  @After
   public void tearDown(){
     super.tearDown();
     MSSQLTestUtils utils = new MSSQLTestUtils();
@@ -205,6 +213,7 @@
   }
  }
 
+  @Test
  public void testSingleClauseWhere() throws IOException {
   String whereClause = "L_ORDERKEY > 0 ";
   runWhereTest(whereClause,
@@ -212,6 +221,7 @@
       + "\n", 4, 10);
  }
 
+  @Test
  public void testMultiClauseWhere() throws IOException {
   String whereClause = "L_ORDERKEY > 1 AND L_PARTKEY < 4";
   runWhereTest(whereClause,
diff --git a/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java b/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java
index 19440ff..250ffa6 100644
--- a/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java
+++ b/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java
@@ -25,6 +25,7 @@
 import java.util.Map;
 import java.util.Set;
 
+import static org.junit.Assert.assertEquals;
 import static org.mockito.Matchers.anyInt;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.mock;
@@ -41,12 +42,11 @@
 import com.cloudera.sqoop.manager.ConnManager;
 import com.cloudera.sqoop.manager.ExportJobContext;
 
-import junit.framework.TestCase;
 
 /**
  * Test methods of the JdbcExportJob implementation.
  */
-public class TestJdbcExportJob extends TestCase {
+public class TestJdbcExportJob {
 
   @Test
   public void testAvroWithNoColumnsSpecified() throws Exception {
diff --git a/src/test/org/apache/sqoop/mapreduce/TestJobBase.java b/src/test/org/apache/sqoop/mapreduce/TestJobBase.java
index f228a35..017f984 100644
--- a/src/test/org/apache/sqoop/mapreduce/TestJobBase.java
+++ b/src/test/org/apache/sqoop/mapreduce/TestJobBase.java
@@ -18,6 +18,7 @@
 
 package org.apache.sqoop.mapreduce;
 
+import static org.junit.Assert.assertEquals;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
@@ -33,9 +34,7 @@
 import com.cloudera.sqoop.SqoopOptions;
 import com.cloudera.sqoop.mapreduce.JobBase;
 
-import junit.framework.TestCase;
-
-public class TestJobBase extends TestCase {
+public class TestJobBase {
 
   SqoopOptions options;
   Configuration conf;
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java
index 1e557a5..8257435 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java
@@ -20,13 +20,15 @@
 
 import java.math.BigDecimal;
 
-import junit.framework.TestCase;
-
 import com.cloudera.sqoop.mapreduce.db.BigDecimalSplitter;
+import org.junit.Test;
 
-public class TestBigDecimalSplitter extends TestCase {
+import static org.junit.Assert.assertEquals;
+
+public class TestBigDecimalSplitter {
 
   /* Test if the decimal split sizes are generated as expected */
+  @Test
   public void testDecimalTryDivide() {
     BigDecimal numerator = new BigDecimal("2");
     BigDecimal denominator = new BigDecimal("4");
@@ -37,6 +39,7 @@
   }
 
   /* Test if the integer split sizes are generated as expected */
+  @Test
   public void testIntegerTryDivide() {
 	BigDecimal numerator = new BigDecimal("99");
 	BigDecimal denominator = new BigDecimal("3");
@@ -47,6 +50,7 @@
   }
 
   /* Test if the recurring decimal split sizes are generated as expected */
+  @Test
   public void testRecurringTryDivide() {
 	BigDecimal numerator = new BigDecimal("1");
 	BigDecimal denominator = new BigDecimal("3");
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestDBConfiguration.java b/src/test/org/apache/sqoop/mapreduce/db/TestDBConfiguration.java
index cad1004..3160db9 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestDBConfiguration.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestDBConfiguration.java
@@ -20,13 +20,16 @@
 
 import java.util.Properties;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
 
 /**
  * Test aspects of DBConfiguration.
  */
-public class TestDBConfiguration extends TestCase {
+public class TestDBConfiguration {
 
+  @Test
   public void testPropertiesToString() {
     Properties connParams = new Properties();
     connParams.setProperty("a", "value-a");
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java
index e93b6ad..efd0b95 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java
@@ -20,14 +20,16 @@
 import java.sql.SQLException;
 import java.util.List;
 
-import junit.framework.TestCase;
-
 import com.cloudera.sqoop.mapreduce.db.IntegerSplitter;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Test that the IntegerSplitter generates sane splits.
  */
-public class TestIntegerSplitter extends TestCase {
+public class TestIntegerSplitter {
   private long [] toLongArray(List<Long> in) {
     long [] out = new long[in.size()];
     for (int i = 0; i < in.size(); i++) {
@@ -75,24 +77,28 @@
     }
   }
 
+  @Test
   public void testEvenSplits() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(10,-1, 0, 100);
     long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, };
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testOddSplits() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(10,-1, 0, 95);
     long [] expected = { 0, 10, 20, 30, 40, 50, 59, 68, 77, 86, 95, };
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testSingletonSplit() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(1,-1, 5, 5);
     long [] expected = { 5, 5 };
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testSingletonSplit2() throws SQLException {
     // Same test, but overly-high numSplits
     List<Long> splits = new IntegerSplitter().split(5,-1, 5, 5);
@@ -100,12 +106,14 @@
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testTooManySplits() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(5,-1, 3, 5);
     long [] expected = { 3, 4, 5, 5};
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testExactSplitsAsInterval() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(5,-1, 1, 5);
     long [] expected = { 1, 2, 3, 4, 5, 5};
@@ -118,30 +126,35 @@
    *
    * @throws SQLException
    */
+  @Test
   public void testBigIntSplits() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(4,-1, 14,
         7863696997872966707L);
     assertEquals(splits.size(), 5);
   }
 
+  @Test
   public void testEvenSplitsWithLimit() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(5, 10, 0, 100);
     long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testOddSplitsWithLimit() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(5, 10, 0, 95);
     long [] expected = { 0, 10, 20, 30, 40, 50, 59, 68, 77, 86, 95};
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testSplitWithBiggerLimit() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(10, 15, 0, 100);
     long [] expected = {0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100};
     assertLongArrayEquals(expected, toLongArray(splits));
   }
 
+  @Test
   public void testFractionalSplitWithLimit() throws SQLException {
     List<Long> splits = new IntegerSplitter().split(5, 1, 1, 10);
     long [] expected = {1,2, 3, 4, 5, 6, 7, 8, 9, 10, 10};
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
index c402a54..adb795e 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
@@ -22,21 +22,20 @@
 import java.util.List;
 
 import com.cloudera.sqoop.mapreduce.db.TextSplitter;
-
-import junit.framework.JUnit4TestAdapter;
-import junit.framework.TestCase;
 import org.apache.sqoop.validation.ValidationException;
-import org.junit.Rule;
 import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import org.junit.Rule;
+
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
+
 
 /**
  * Test that the TextSplitter implementation creates a sane set of splits.
  */
-@RunWith(JUnit4.class)
-public class TestTextSplitter extends TestCase {
+public class TestTextSplitter {
 
   @Rule
   public ExpectedException thrown = ExpectedException.none();
@@ -172,9 +171,4 @@
     assertEquals(false, splitter2.isUseNCharStrings());
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestTextSplitter.class);
-  }
-
 }
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java b/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
index 32ebf45..6a521bf 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
@@ -20,24 +20,20 @@
 import java.sql.ResultSet;
 import java.util.List;
 
-import junit.framework.JUnit4TestAdapter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.sqoop.validation.ValidationException;
 
-import com.cloudera.sqoop.Sqoop;
 import com.cloudera.sqoop.testutil.MockResultSet;
 
-import junit.framework.TestCase;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 
-@RunWith(JUnit4.class)
-public class TextSplitterHadoopConfIntegrationTest extends TestCase {
+import static org.junit.Assert.assertFalse;
+
+public class TextSplitterHadoopConfIntegrationTest {
   private static final String TEXT_COL_NAME = "text_col_name";
 
   @Rule
@@ -66,9 +62,5 @@
     assertFalse(splits.isEmpty());
   }
 
-  //workaround: ant kept falling back to JUnit3
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TextSplitterHadoopConfIntegrationTest.class);
-  }
 }
 
diff --git a/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java b/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java
index fbbffe9..ddf046e 100644
--- a/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java
+++ b/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java
@@ -19,19 +19,15 @@
 package org.apache.sqoop.tool;
 
 import com.cloudera.sqoop.SqoopOptions;
-import junit.framework.JUnit4TestAdapter;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
 import org.mockito.Mockito;
 
 import static org.hamcrest.CoreMatchers.sameInstance;
 import static org.mockito.Mockito.mock;
 
-@RunWith(JUnit4.class)
 public class TestBaseSqoopTool {
 
   @Rule
@@ -73,8 +69,4 @@
     testBaseSqoopTool.rethrowIfRequired(testSqoopOptions, expectedCauseException);
   }
 
-  public static junit.framework.Test suite() {
-    return new JUnit4TestAdapter(TestBaseSqoopTool.class);
-  }
-
 }
diff --git a/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java b/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java
index 3e502d0..d51e33e 100644
--- a/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java
+++ b/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java
@@ -36,6 +36,12 @@
 import com.cloudera.sqoop.cli.ToolOptions;
 import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
 public class TestMainframeImportTool extends BaseSqoopTestCase {
 
   private static final Log LOG = LogFactory.getLog(TestMainframeImportTool.class
diff --git a/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java b/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java
index f38164c..f5808b2 100644
--- a/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java
+++ b/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java
@@ -19,14 +19,18 @@
 package org.apache.sqoop.validation;
 
 import com.cloudera.sqoop.SqoopOptions;
-import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * Tests for AbortOnFailureHandler.
  */
-public class AbortOnFailureHandlerTest extends TestCase {
+public class AbortOnFailureHandlerTest {
 
+  @Test
   public void testAbortOnFailureHandlerIsDefaultOption() {
     assertEquals(AbortOnFailureHandler.class,
       new SqoopOptions(new Configuration()).getValidationFailureHandlerClass());
@@ -35,6 +39,7 @@
   /**
    * Positive case.
    */
+  @Test
   public void testAbortOnFailureHandlerAborting() {
     try {
       Validator validator = new RowCountValidator();
@@ -51,6 +56,7 @@
   /**
    * Negative case.
    */
+  @Test
   public void testAbortOnFailureHandlerNotAborting() {
     try {
       Validator validator = new RowCountValidator();
diff --git a/src/test/org/apache/sqoop/validation/AbsoluteValidationThresholdTest.java b/src/test/org/apache/sqoop/validation/AbsoluteValidationThresholdTest.java
index 9ac5074..86a99c4 100644
--- a/src/test/org/apache/sqoop/validation/AbsoluteValidationThresholdTest.java
+++ b/src/test/org/apache/sqoop/validation/AbsoluteValidationThresholdTest.java
@@ -18,17 +18,21 @@
 
 package org.apache.sqoop.validation;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Tests for AbsoluteValidationThreshold.
  */
-public class AbsoluteValidationThresholdTest extends TestCase {
+public class AbsoluteValidationThresholdTest {
 
   /**
    * Test the implementation for AbsoluteValidationThreshold.
    * Both arguments should be same else fail.
    */
+  @Test
   public void testAbsoluteValidationThreshold() {
     ValidationThreshold validationThreshold = new AbsoluteValidationThreshold();
     assertTrue(validationThreshold.compare(100, 100));
diff --git a/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java b/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java
index 035d3b1..9ba62d4 100644
--- a/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java
+++ b/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java
@@ -22,11 +22,16 @@
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.tool.ImportTool;
+import org.junit.Test;
 
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 /**
  * Tests for RowCountValidator.
  */
@@ -43,6 +48,7 @@
    *
    * @throws Exception
    */
+  @Test
   public void testValidateOptionIsEnabledInCLI() throws Exception {
     String[] types = {"INT NOT NULL PRIMARY KEY", "VARCHAR(32)", "VARCHAR(32)"};
     String[] insertVals = {"1", "'Bob'", "'sales'"};
@@ -59,6 +65,7 @@
     }
   }
 
+  @Test
   public void testValidationOptionsParsedCorrectly() throws Exception {
     String[] types = {"INT NOT NULL PRIMARY KEY", "VARCHAR(32)", "VARCHAR(32)"};
     String[] insertVals = {"1", "'Bob'", "'sales'"};
@@ -96,6 +103,7 @@
     }
   }
 
+  @Test
   public void testInvalidValidationOptions() throws Exception {
     String[] types = {"INT NOT NULL PRIMARY KEY", "VARCHAR(32)", "VARCHAR(32)"};
     String[] insertVals = {"1", "'Bob'", "'sales'"};
@@ -140,6 +148,7 @@
   /**
    * Negative case where the row counts do NOT match.
    */
+  @Test
   public void testValidatorWithDifferentRowCounts() {
     try {
       Validator validator = new RowCountValidator();
@@ -156,6 +165,7 @@
   /**
    * Positive case where the row counts match.
    */
+  @Test
   public void testValidatorWithMatchingRowCounts() {
     try {
       Validator validator = new RowCountValidator();
@@ -170,6 +180,7 @@
    *
    * @throws Exception
    */
+  @Test
   public void testValidatorForImportTable() throws Exception {
     String[] types = {"INT NOT NULL PRIMARY KEY", "VARCHAR(32)", "VARCHAR(32)"};
     String[] insertVals = {"1", "'Bob'", "'sales'"};