SQOOP-3273: Removing com.cloudera.sqoop packages

(Szabolcs Vasas via Anna Szonyi)
diff --git a/build.xml b/build.xml
index 2094bae..a9dbdd5 100644
--- a/build.xml
+++ b/build.xml
@@ -132,8 +132,6 @@
       value="${findbugs.out.dir}/report.xml" />
   <property name="findbugs.output.html.file"
       value="${findbugs.out.dir}/report.html" />
-  <property name="findbugs.excludes"
-      location="${test.dir}/findbugsExcludeFile.xml" />
 
   <!-- maven -->
   <property name="mvn.build.dir" value="${build.dir}/m2" />
@@ -449,7 +447,7 @@
     <if>
       <equals arg1="${windows}" arg2="true" />
       <then>
-        <java classname="com.cloudera.sqoop.Sqoop"
+        <java classname="org.apache.sqoop.Sqoop"
             fork="true"
             failonerror="true"
             output="${build.dir}/tools-list"
@@ -466,7 +464,7 @@
         </exec>
       </then>
       <else>
-        <java classname="com.cloudera.sqoop.Sqoop"
+        <java classname="org.apache.sqoop.Sqoop"
             fork="true"
             failonerror="true"
             output="${build.dir}/tools-list"
@@ -1097,7 +1095,6 @@
       doctitle="${Name} ${version} API"
       bottom="Copyright &amp;copy; ${year} The Apache Software Foundation">
       <packageset dir="${src.dir}">
-        <include name="com/cloudera/sqoop/lib/**" />
         <include name="org/apache/sqoop/lib/**" />
       </packageset>
       <classpath>
@@ -1134,7 +1131,7 @@
     <mkdir dir="${findbugs.out.dir}"/>
     <findbugs home="${findbugs.home}" output="xml:withMessages"
         outputFile="${findbugs.output.xml.file}" effort="max"
-        excludeFilter="${findbugs.excludes}" jvmargs="-Xms512m -Xmx512m">
+        jvmargs="-Xms512m -Xmx512m">
       <auxClasspath>
         <path refid="test.classpath"/>
       </auxClasspath>
diff --git a/conf/sqoop-site-template.xml b/conf/sqoop-site-template.xml
index 2182da3..a906cb4 100644
--- a/conf/sqoop-site-template.xml
+++ b/conf/sqoop-site-template.xml
@@ -38,7 +38,7 @@
   <!--
   <property>
     <name>sqoop.connection.factories</name>
-    <value>com.cloudera.sqoop.manager.DefaultManagerFactory</value>
+    <value>org.apache.sqoop.manager.DefaultManagerFactory</value>
     <description>A comma-delimited list of ManagerFactory implementations
       which are consulted, in order, to instantiate ConnManager instances
       used to drive connections to databases.
diff --git a/ivy.xml b/ivy.xml
index d153038..6be4fa2 100644
--- a/ivy.xml
+++ b/ivy.xml
@@ -20,7 +20,7 @@
 <ivy-module version="2.0" xmlns:m="http://ant.apache.org/ivy/maven"
             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
             xsi:noNamespaceSchemaLocation="http://ant.apache.org/ivy/schemas/ivy.xsd">
-  <info organisation="com.cloudera.sqoop" module="${name}">
+  <info organisation="org.apache.sqoop" module="${name}">
     <license name="Apache 2.0"/>
     <ivyauthor name="Sqoop Team" url="http://github.com/cloudera/sqoop" />
     <description>
diff --git a/src/docs/dev/api-reference.txt b/src/docs/dev/api-reference.txt
index 80453ee..46a57ee 100644
--- a/src/docs/dev/api-reference.txt
+++ b/src/docs/dev/api-reference.txt
@@ -49,10 +49,10 @@
 
 The full set of methods guaranteed to exist in an auto-generated class
 is specified in the abstract class
-+com.cloudera.sqoop.lib.SqoopRecord+.
++org.apache.sqoop.lib.SqoopRecord+.
 
 Instances of +SqoopRecord+ may depend on Sqoop's public API. This is all classes
-in the +com.cloudera.sqoop.lib+ package. These are briefly described below.
+in the +org.apache.sqoop.lib+ package. These are briefly described below.
 Clients of Sqoop should not need to directly interact with any of these classes,
 although classes generated by Sqoop will depend on them. Therefore, these APIs
 are considered public and care will be taken when forward-evolving them.
@@ -82,11 +82,11 @@
 read from databases, differences in the SQL supported by different vendors as
 well as JDBC metadata necessitates vendor-specific codepaths for most databases.
 Sqoop's solution to this problem is by introducing the +ConnManager+ API
-(+com.cloudera.sqoop.manager.ConnMananger+).
+(+org.apache.sqoop.manager.ConnMananger+).
 
 +ConnManager+ is an abstract class defining all methods that interact with the
 database itself. Most implementations of +ConnManager+ will extend the
-+com.cloudera.sqoop.manager.SqlManager+ abstract class, which uses standard
++org.apache.sqoop.manager.SqlManager+ abstract class, which uses standard
 SQL to perform most actions. Subclasses are required to implement the
 +getConnection()+ method which returns the actual JDBC connection to the
 database. Subclasses are free to override all other methods as well. The
@@ -118,7 +118,7 @@
 class +ManagerFactory+ (See
 http://issues.apache.org/jira/browse/MAPREDUCE-750[]). One
 +ManagerFactory+ implementation currently serves all of Sqoop:
-+com.cloudera.sqoop.manager.DefaultManagerFactory+.  Extensions
++org.apache.sqoop.manager.DefaultManagerFactory+.  Extensions
 should not modify +DefaultManagerFactory+. Instead, an
 extension-specific +ManagerFactory+ implementation should be provided
 with the new +ConnManager+.  +ManagerFactory+ has a single method of
@@ -133,7 +133,7 @@
 +ManagerFactory+.  The +DefaultManagerFactory+ principly discriminates between
 databases by parsing the connect string stored in +SqoopOptions+.
 
-Extension authors may make use of classes in the +com.cloudera.sqoop.io+,
+Extension authors may make use of classes in the +org.apache.sqoop.io+,
 +mapreduce+, and +util+ packages to facilitate their implementations.
 These packages and classes are described in more detail in the following
 section.
@@ -149,7 +149,7 @@
 * Values must be serialized to byte arrays to put into cells.
 
 All of this is done via +Put+ statements in the HBase client API.
-Sqoop's interaction with HBase is performed in the +com.cloudera.sqoop.hbase+
+Sqoop's interaction with HBase is performed in the +org.apache.sqoop.hbase+
 package. Records are deserialzed from the database and emitted from the mapper.
 The OutputFormat is responsible for inserting the results into HBase. This is
 done through an interface called +PutTransformer+. The +PutTransformer+
@@ -179,7 +179,7 @@
 
 This section describes the internal architecture of Sqoop.
 
-The Sqoop program is driven by the +com.cloudera.sqoop.Sqoop+ main class.
+The Sqoop program is driven by the +org.apache.sqoop.Sqoop+ main class.
 A limited number of additional classes are in the same package; +SqoopOptions+
 (described earlier) and +ConnFactory+ (which manipulates +ManagerFactory+
 instances).
@@ -189,7 +189,7 @@
 
 The general program flow is as follows:
 
-+com.cloudera.sqoop.Sqoop+ is the main class and implements _Tool_. A new
++org.apache.sqoop.Sqoop+ is the main class and implements _Tool_. A new
 instance is launched with +ToolRunner+. The first argument to Sqoop is
 a string identifying the name of a +SqoopTool+ to run. The +SqoopTool+
 itself drives the execution of the user's requested operation (e.g.,
@@ -214,8 +214,8 @@
 to run the import. Each main action is actually controlled by the
 +ConnMananger+, except for the generating of code, which is done by
 the +CompilationManager+ and +ClassWriter+. (Both in the
-+com.cloudera.sqoop.orm+ package.) Importing into Hive is also
-taken care of via the +com.cloudera.sqoop.hive.HiveImport+ class
++org.apache.sqoop.orm+ package.) Importing into Hive is also
+taken care of via the +org.apache.sqoop.hive.HiveImport+ class
 after the +importTable()+ has completed. This is done without concern
 for the +ConnManager+ implementation used.
 
@@ -231,7 +231,7 @@
 Subpackages
 ^^^^^^^^^^^
 
-The following subpackages under +com.cloudera.sqoop+ exist:
+The following subpackages under +org.apache.sqoop+ exist:
 
 * +hive+ - Facilitates importing data to Hive.
 * +io+ - Implementations of +java.io.*+ interfaces (namely, _OutputStream_ and
@@ -288,7 +288,7 @@
 
 In Sqoop parlance, an "async sink" is a thread that takes an +InputStream+ and
 reads it to completion. These are realized by +AsyncSink+ implementations. The
-+com.cloudera.sqoop.util.AsyncSink+ abstract class defines the operations
++org.apache.sqoop.util.AsyncSink+ abstract class defines the operations
 this factory must perform. +processStream()+ will spawn another thread to
 immediately begin handling the data read from the +InputStream+ argument; it
 must read this stream to completion. The +join()+ method allows external threads
@@ -311,7 +311,7 @@
 Configuration and execution of MapReduce jobs follows a few common
 steps (configuring the +InputFormat+; configuring the +OutputFormat+;
 setting the +Mapper+ implementation; etc...). These steps are
-formalized in the +com.cloudera.sqoop.mapreduce.JobBase+ class.
+formalized in the +org.apache.sqoop.mapreduce.JobBase+ class.
 The +JobBase+ allows a user to specify the +InputFormat+,
 +OutputFormat+, and +Mapper+ to use. 
 
diff --git a/src/docs/sip/sip-4.txt b/src/docs/sip/sip-4.txt
index 9664125..fb4dba5 100644
--- a/src/docs/sip/sip-4.txt
+++ b/src/docs/sip/sip-4.txt
@@ -83,7 +83,7 @@
 
 h3. Base package
 
-The base package in Sqoop is currently @org.apache.hadoop.sqoop@. To reflect Sqoop's migration from an Apache Hadoop subproject to its own project, the class hierarchy should be moved to @com.cloudera.sqoop@.
+The base package in Sqoop is currently @org.apache.hadoop.sqoop@.
 
 h2. Compatibility Issues
 
diff --git a/src/java/com/cloudera/sqoop/ConnFactory.java b/src/java/com/cloudera/sqoop/ConnFactory.java
deleted file mode 100644
index f75cb4e..0000000
--- a/src/java/com/cloudera/sqoop/ConnFactory.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ConnFactory
-    extends org.apache.sqoop.ConnFactory {
-
-  public static final String FACTORY_CLASS_NAMES_KEY =
-      org.apache.sqoop.ConnFactory.FACTORY_CLASS_NAMES_KEY;
-
-  public static final String DEFAULT_FACTORY_CLASS_NAMES =
-    org.apache.sqoop.ConnFactory.DEFAULT_FACTORY_CLASS_NAMES;
-
-  public ConnFactory(Configuration conf) {
-    super(conf);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/Sqoop.java b/src/java/com/cloudera/sqoop/Sqoop.java
deleted file mode 100644
index 8ec9f8f..0000000
--- a/src/java/com/cloudera/sqoop/Sqoop.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import com.cloudera.sqoop.tool.SqoopTool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class Sqoop
-    extends org.apache.sqoop.Sqoop {
-
-  public static final Log SQOOP_LOG =
-      org.apache.sqoop.Sqoop.SQOOP_LOG;
-
-  public static final String SQOOP_OPTIONS_FILE_SPECIFIER =
-      org.apache.sqoop.Sqoop.SQOOP_OPTIONS_FILE_SPECIFIER;
-
-  static {
-    Configuration.addDefaultResource("sqoop-site.xml");
-  }
-
-  public static int runSqoop(Sqoop sqoop, String [] args) {
-    return org.apache.sqoop.Sqoop.runSqoop(sqoop, args);
-  }
-
-  public static int runTool(String [] args, Configuration conf) {
-    return org.apache.sqoop.Sqoop.runTool(args, conf);
-  }
-
-  public static int runTool(String [] args) {
-    return org.apache.sqoop.Sqoop.runTool(args);
-  }
-
-  public static void main(String [] args) {
-    org.apache.sqoop.Sqoop.main(args);
-  }
-
-  public Sqoop(SqoopTool tool) {
-    super(tool);
-  }
-
-  public Sqoop(SqoopTool tool, Configuration conf) {
-    super(tool, conf);
-  }
-
-  public Sqoop(SqoopTool tool, Configuration conf, SqoopOptions opts) {
-    super(tool, conf, opts);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/SqoopOptions.java b/src/java/com/cloudera/sqoop/SqoopOptions.java
deleted file mode 100644
index 0863ef6..0000000
--- a/src/java/com/cloudera/sqoop/SqoopOptions.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * @deprecated
- */
-public class SqoopOptions
-  extends org.apache.sqoop.SqoopOptions implements Cloneable {
-
-  public static final String METASTORE_PASSWORD_KEY =
-    org.apache.sqoop.SqoopOptions.METASTORE_PASSWORD_KEY;
-
-  public static final boolean METASTORE_PASSWORD_DEFAULT =
-    org.apache.sqoop.SqoopOptions.METASTORE_PASSWORD_DEFAULT;
-
-  public static final int DEFAULT_NUM_MAPPERS =
-    org.apache.sqoop.SqoopOptions.DEFAULT_NUM_MAPPERS;
-
-  /** Selects in-HDFS destination file format. */
-  public enum FileLayout {
-    TextFile,
-    SequenceFile,
-    AvroDataFile,
-    ParquetFile
-  }
-
-  /**
-   * Incremental imports support two modes:
-   * <ul>
-   * <li>new rows being appended to the end of a table with an
-   * incrementing id</li>
-   * <li>new data results in a date-last-modified column being
-   * updated to NOW(); Sqoop will pull all dirty rows in the next
-   * incremental import.</li>
-   * </ul>
-   */
-  public enum IncrementalMode {
-    None,
-    AppendRows,
-    DateLastModified,
-  }
-
-  /**
-   * Update mode option specifies how updates are performed when
-   * new rows are found with non-matching keys in database.
-   * It supports two modes:
-   * <ul>
-   * <li>UpdateOnly: This is the default. New rows are silently ignored.</li>
-   * <li>AllowInsert: New rows are inserted into the database.</li>
-   * </ul>
-   */
-  public enum UpdateMode {
-    UpdateOnly,
-    AllowInsert
-  }
-
-  public SqoopOptions() {
-    super();
-  }
-
-  public SqoopOptions(Configuration conf) {
-    super(conf);
-  }
-
-  public SqoopOptions(final String connect, final String table) {
-    super(connect, table);
-  }
-
-  public static void clearNonceDir() {
-    org.apache.sqoop.SqoopOptions.clearNonceDir();
-  }
-
-  public static String getHiveHomeDefault() {
-    return org.apache.sqoop.SqoopOptions.getHiveHomeDefault();
-  }
-
-  public static boolean isSqoopRethrowSystemPropertySet() {
-    return org.apache.sqoop.SqoopOptions.isSqoopRethrowSystemPropertySet();
-  }
-
-  /**
-   * {@inheritDoc}.
-   * @deprecated
-   */
-  public static class InvalidOptionsException
-    extends org.apache.sqoop.SqoopOptions.InvalidOptionsException {
-
-
-    public InvalidOptionsException(final String msg) {
-      super(msg);
-    }
-  }
-
-  public static char toChar(String charish) throws InvalidOptionsException {
-    try {
-      return org.apache.sqoop.SqoopOptions.toChar(charish);
-    } catch(org.apache.sqoop.SqoopOptions.InvalidOptionsException ex) {
-      throw new InvalidOptionsException(ex.getMessage());
-    }
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/cli/RelatedOptions.java b/src/java/com/cloudera/sqoop/cli/RelatedOptions.java
deleted file mode 100644
index c6453b7..0000000
--- a/src/java/com/cloudera/sqoop/cli/RelatedOptions.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.cli;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class RelatedOptions
-    extends org.apache.sqoop.cli.RelatedOptions {
-
-  public RelatedOptions() {
-    super();
-  }
-
-  public RelatedOptions(final String title) {
-    super(title);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/cli/SqoopParser.java b/src/java/com/cloudera/sqoop/cli/SqoopParser.java
deleted file mode 100644
index 0c6d84e..0000000
--- a/src/java/com/cloudera/sqoop/cli/SqoopParser.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.cli;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class SqoopParser
-    extends org.apache.sqoop.cli.SqoopParser {
-}
-
diff --git a/src/java/com/cloudera/sqoop/cli/ToolOptions.java b/src/java/com/cloudera/sqoop/cli/ToolOptions.java
deleted file mode 100644
index 0b5da6c..0000000
--- a/src/java/com/cloudera/sqoop/cli/ToolOptions.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.cli;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ToolOptions
-  extends org.apache.sqoop.cli.ToolOptions {
-}
-
diff --git a/src/java/com/cloudera/sqoop/config/ConfigurationConstants.java b/src/java/com/cloudera/sqoop/config/ConfigurationConstants.java
deleted file mode 100644
index 72e7df1..0000000
--- a/src/java/com/cloudera/sqoop/config/ConfigurationConstants.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.config;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class ConfigurationConstants {
-
-  public static final String PROP_MAPRED_TASK_ID =
-    org.apache.sqoop.config.ConfigurationConstants.PROP_MAPRED_TASK_ID;
-  public static final String PROP_JOB_LOCAL_DIRECTORY =
-    org.apache.sqoop.config.ConfigurationConstants.PROP_JOB_LOCAL_DIRECTORY;
-  public static final String PROP_MAPRED_MAP_TASKS =
-    org.apache.sqoop.config.ConfigurationConstants.PROP_MAPRED_MAP_TASKS;
-  public static final String PROP_MAPRED_MAP_TASKS_SPECULATIVE_EXEC =
-    org.apache.sqoop.config.
-        ConfigurationConstants.PROP_MAPRED_MAP_TASKS_SPECULATIVE_EXEC;
-  public static final String PROP_MAPRED_REDUCE_TASKS_SPECULATIVE_EXEC =
-    org.apache.sqoop.config.
-        ConfigurationConstants.PROP_MAPRED_REDUCE_TASKS_SPECULATIVE_EXEC;
-  public static final String PROP_MAPRED_JOB_TRACKER_ADDRESS =
-    org.apache.sqoop.config.
-        ConfigurationConstants.PROP_MAPRED_JOB_TRACKER_ADDRESS;
-  public static final String COUNTER_GROUP_MAPRED_TASK_COUNTERS =
-    org.apache.sqoop.config.
-        ConfigurationConstants.COUNTER_GROUP_MAPRED_TASK_COUNTERS;
-  public static final String COUNTER_MAP_OUTPUT_RECORDS =
-    org.apache.sqoop.config.ConfigurationConstants.COUNTER_MAP_OUTPUT_RECORDS;
-  public static final String COUNTER_MAP_INPUT_RECORDS =
-    org.apache.sqoop.config.ConfigurationConstants.COUNTER_MAP_INPUT_RECORDS;
-
-  private ConfigurationConstants() {
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/config/ConfigurationHelper.java b/src/java/com/cloudera/sqoop/config/ConfigurationHelper.java
deleted file mode 100644
index 0870497..0000000
--- a/src/java/com/cloudera/sqoop/config/ConfigurationHelper.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.config;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class ConfigurationHelper {
-
-  public static void setJobNumMaps(Job job, int numMapTasks) {
-    org.apache.sqoop.config.ConfigurationHelper.setJobNumMaps(job, numMapTasks);
-  }
-
-  public static int getJobNumMaps(JobContext job) {
-    return org.apache.sqoop.config.ConfigurationHelper.getJobNumMaps(job);
-  }
-
-  public static long getNumMapOutputRecords(Job job)
-      throws IOException, InterruptedException {
-    return org.apache.sqoop.config.
-           ConfigurationHelper.getNumMapOutputRecords(job);
-  }
-
-  public static long getNumMapInputRecords(Job job)
-      throws IOException, InterruptedException {
-    return org.apache.sqoop.config.
-            ConfigurationHelper.getNumMapInputRecords(job);
-  }
-
-  public static int getConfNumMaps(Configuration conf) {
-    return org.apache.sqoop.config.ConfigurationHelper.getConfNumMaps(conf);
-  }
-
-  public static void setJobMapSpeculativeExecution(Job job, boolean isEnabled) {
-    org.apache.sqoop.config.
-        ConfigurationHelper.setJobMapSpeculativeExecution(job, isEnabled);
-  }
-
-  public static void setJobReduceSpeculativeExecution(
-      Job job, boolean isEnabled) {
-    org.apache.sqoop.config.
-        ConfigurationHelper.setJobReduceSpeculativeExecution(job, isEnabled);
-  }
-
-  public static void setJobtrackerAddr(Configuration conf, String addr) {
-    org.apache.sqoop.config.
-        ConfigurationHelper.setJobtrackerAddr(conf, addr);
-  }
-
-  public static String getDbInputClassProperty() {
-    return org.apache.sqoop.config.
-               ConfigurationHelper.getDbInputClassProperty();
-  }
-
-  public static String getDbUsernameProperty() {
-    return org.apache.sqoop.config.
-               ConfigurationHelper.getDbUsernameProperty();
-  }
-
-  public static String getDbPasswordProperty() {
-    return org.apache.sqoop.config.
-               ConfigurationHelper.getDbPasswordProperty();
-  }
-
-  public static String getDbUrlProperty() {
-    return org.apache.sqoop.config.
-               ConfigurationHelper.getDbUrlProperty();
-  }
-
-  public static String getDbInputTableNameProperty() {
-    return org.apache.sqoop.config.
-               ConfigurationHelper.getDbInputTableNameProperty();
-  }
-
-
-  public static String getDbInputConditionsProperty() {
-    return org.apache.sqoop.config.
-               ConfigurationHelper.getDbInputConditionsProperty();
-  }
-
-  public static String [] parseGenericOptions(
-      Configuration conf, String [] args) throws IOException {
-    return org.apache.sqoop.config.
-               ConfigurationHelper.parseGenericOptions(conf, args);
-  }
-
-  private ConfigurationHelper() {
-    // Disable explicit object creation
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java b/src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java
deleted file mode 100644
index 425b0f4..0000000
--- a/src/java/com/cloudera/sqoop/hbase/HBasePutProcessor.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class HBasePutProcessor
-    extends org.apache.sqoop.hbase.HBasePutProcessor {
-
-  public static final String TABLE_NAME_KEY =
-      org.apache.sqoop.hbase.HBasePutProcessor.TABLE_NAME_KEY;
-  public static final String COL_FAMILY_KEY =
-      org.apache.sqoop.hbase.HBasePutProcessor.COL_FAMILY_KEY;
-  public static final String ROW_KEY_COLUMN_KEY =
-      org.apache.sqoop.hbase.HBasePutProcessor.ROW_KEY_COLUMN_KEY;
-  public static final String TRANSFORMER_CLASS_KEY =
-      org.apache.sqoop.hbase.HBasePutProcessor.TRANSFORMER_CLASS_KEY;
-
-}
diff --git a/src/java/com/cloudera/sqoop/hbase/HBaseUtil.java b/src/java/com/cloudera/sqoop/hbase/HBaseUtil.java
deleted file mode 100644
index 6f1fba7..0000000
--- a/src/java/com/cloudera/sqoop/hbase/HBaseUtil.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class HBaseUtil {
-
-  private HBaseUtil() { }
-
-  /**
-   * This is a way to make this always return false for testing.
-   */
-  public static void setAlwaysNoHBaseJarMode(boolean mode) {
-    org.apache.sqoop.hbase.HBaseUtil.setAlwaysNoHBaseJarMode(mode);
-  }
-
-  public static boolean isHBaseJarPresent() {
-    return org.apache.sqoop.hbase.HBaseUtil.isHBaseJarPresent();
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/hbase/PutTransformer.java b/src/java/com/cloudera/sqoop/hbase/PutTransformer.java
deleted file mode 100644
index b8ceab7..0000000
--- a/src/java/com/cloudera/sqoop/hbase/PutTransformer.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class PutTransformer
-    extends org.apache.sqoop.hbase.PutTransformer {
-}
diff --git a/src/java/com/cloudera/sqoop/hbase/ToStringPutTransformer.java b/src/java/com/cloudera/sqoop/hbase/ToStringPutTransformer.java
deleted file mode 100644
index 0c55b21..0000000
--- a/src/java/com/cloudera/sqoop/hbase/ToStringPutTransformer.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hbase;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ToStringPutTransformer
-    extends org.apache.sqoop.hbase.ToStringPutTransformer {
-}
diff --git a/src/java/com/cloudera/sqoop/hive/HiveImport.java b/src/java/com/cloudera/sqoop/hive/HiveImport.java
deleted file mode 100644
index db1c1b3..0000000
--- a/src/java/com/cloudera/sqoop/hive/HiveImport.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hive;
-
-import org.apache.hadoop.conf.Configuration;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class HiveImport
-    extends org.apache.sqoop.hive.HiveImport {
-
-  public HiveImport(final SqoopOptions opts, final ConnManager connMgr,
-      final Configuration conf, final boolean generateOnly) {
-    super(opts, connMgr, conf, generateOnly);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/hive/HiveTypes.java b/src/java/com/cloudera/sqoop/hive/HiveTypes.java
deleted file mode 100644
index e67f6b8..0000000
--- a/src/java/com/cloudera/sqoop/hive/HiveTypes.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hive;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class HiveTypes {
-
-  private HiveTypes() { }
-
-  public static String toHiveType(int sqlType) {
-    return org.apache.sqoop.hive.HiveTypes.toHiveType(sqlType);
-  }
-
-  public static boolean isHiveTypeImprovised(int sqlType) {
-    return org.apache.sqoop.hive.HiveTypes.isHiveTypeImprovised(sqlType);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/hive/TableDefWriter.java b/src/java/com/cloudera/sqoop/hive/TableDefWriter.java
deleted file mode 100644
index d19e018..0000000
--- a/src/java/com/cloudera/sqoop/hive/TableDefWriter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hive;
-
-import org.apache.hadoop.conf.Configuration;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class TableDefWriter
-    extends org.apache.sqoop.hive.TableDefWriter {
-
-  public TableDefWriter(final SqoopOptions opts, final ConnManager connMgr,
-      final String inputTable, final String outputTable,
-      final Configuration config, final boolean withComments) {
-    super(opts, connMgr, inputTable, outputTable, config, withComments);
-  }
-
-  public static String getHiveOctalCharCode(int charNum) {
-    return org.apache.sqoop.hive.TableDefWriter.getHiveOctalCharCode(charNum);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/io/CodecMap.java b/src/java/com/cloudera/sqoop/io/CodecMap.java
deleted file mode 100644
index 647cedc..0000000
--- a/src/java/com/cloudera/sqoop/io/CodecMap.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.io;
-
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.compress.CompressionCodec;
-
-/**
- * Provides a mapping from codec names to concrete implementation class names.
- *
- * @deprecated use org.apache.sqoop.io.CodecMap instead.
- * @see org.apache.sqoop.io.CodecMap
- */
-public final class CodecMap {
-
-  // Supported codec map values
-  // Note: do not add more values here, since codecs are discovered using the
-  // standard Hadoop mechanism (io.compression.codecs). See
-  // CompressionCodecFactory.
-  public static final String NONE = org.apache.sqoop.io.CodecMap.NONE;
-  public static final String DEFLATE = org.apache.sqoop.io.CodecMap.DEFLATE;
-  public static final String LZO = org.apache.sqoop.io.CodecMap.LZO;
-  public static final String LZOP = org.apache.sqoop.io.CodecMap.LZOP;
-
-  private CodecMap() {
-  }
-
-  /**
-   * Given a codec name, return the name of the concrete class
-   * that implements it (or 'null' in the case of the "none" codec).
-   * @throws UnsupportedCodecException if a codec cannot be found
-   * with the supplied name.
-   */
-  public static String getCodecClassName(String codecName)
-      throws UnsupportedCodecException {
-    return org.apache.sqoop.io.CodecMap.getCodecClassName(codecName);
-  }
-
-  /**
-   * Given a codec name, instantiate the concrete implementation
-   * class that implements it.
-   * @throws UnsupportedCodecException if a codec cannot be found
-   * with the supplied name.
-   */
-  public static CompressionCodec getCodec(String codecName,
-      Configuration conf) throws UnsupportedCodecException {
-    return org.apache.sqoop.io.CodecMap.getCodec(codecName, conf);
-  }
-
-  /**
-   * Return the set of available codec names.
-   */
-  public static Set<String> getCodecNames() {
-    return org.apache.sqoop.io.CodecMap.getCodecNames();
-  }
-
-  /**
-   * Return the short name of the codec.
-   * See {@link org.apache.sqoop.io.CodecMap#getCodecShortNameByName(String,
-   * Configuration)}.
-   */
-  public static String getCodecShortNameByName(String codecName,
-    Configuration conf) throws UnsupportedCodecException {
-    return org.apache.sqoop.io.CodecMap
-      .getCodecShortNameByName(codecName, conf);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/io/FixedLengthInputStream.java b/src/java/com/cloudera/sqoop/io/FixedLengthInputStream.java
deleted file mode 100644
index 806af22..0000000
--- a/src/java/com/cloudera/sqoop/io/FixedLengthInputStream.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.io;
-
-import java.io.InputStream;
-
-/**
- * Provides an InputStream that can consume a fixed maximum number of bytes
- * from an underlying stream. Closing the FixedLengthInputStream does not
- * close the underlying stream. After reading the maximum number of available
- * bytes this acts as though EOF has been reached.
- *
- * @deprecated use org.apache.sqoop.io.FixedLengthInputStream instead.
- * @see org.apache.sqoop.io.FixedLengthInputStream
- */
-public class FixedLengthInputStream
-  extends org.apache.sqoop.io.FixedLengthInputStream {
-
-   public FixedLengthInputStream(InputStream stream, long maxLen) {
-     super(stream, maxLen);
-   }
-}
-
diff --git a/src/java/com/cloudera/sqoop/io/LobFile.java b/src/java/com/cloudera/sqoop/io/LobFile.java
deleted file mode 100644
index 905c68f..0000000
--- a/src/java/com/cloudera/sqoop/io/LobFile.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.io;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-
-/**
- * File format which stores large object records.
- * The format allows large objects to be read through individual InputStreams
- * to allow reading without full materialization of a single record.
- * Each record is assigned an id and can be accessed by id efficiently by
- * consulting an index at the end of the file.
- *
- * @deprecated use org.apache.sqoop.io.LobFile instead.
- * @see org.apache.sqoop.io.LobFile
- */
-public final class LobFile {
-
-  private LobFile() {
-  }
-
-  public static final Log LOG = org.apache.sqoop.io.LobFile.LOG;
-
-  public static final int LATEST_LOB_VERSION =
-      org.apache.sqoop.io.LobFile.LATEST_LOB_VERSION;
-
-  // Must be in sync with org.apache.sqoop.io.LobFile.HEADER_ID_STR
-  static final char [] HEADER_ID_STR =
-      org.apache.sqoop.io.LobFile.HEADER_ID_STR;
-
-  // Value for entryId to write to the beginning of an IndexSegment.
-  static final long SEGMENT_HEADER_ID =
-      org.apache.sqoop.io.LobFile.SEGMENT_HEADER_ID;
-
-  // Value for entryId to write before the finale.
-  static final long SEGMENT_OFFSET_ID =
-      org.apache.sqoop.io.LobFile.SEGMENT_OFFSET_ID;
-
-  // Value for entryID to write before the IndexTable
-  static final long INDEX_TABLE_ID = org.apache.sqoop.io.LobFile.INDEX_TABLE_ID;
-
-  /**
-   * @deprecated use org.apache.sqoop.io.LobFile.Writer
-   * @see org.apache.sqoop.io.LobFile.Writer
-   */
-  public abstract static class Writer
-    extends org.apache.sqoop.io.LobFile.Writer {
-  }
-
-  /**
-   * @deprecated use org.apache.sqoop.io.LobFile.Reader instead.
-   * @see org.apache.sqoop.io.LobFile.Reader
-   */
-  public abstract static class Reader
-    extends org.apache.sqoop.io.LobFile.Reader {
-  }
-
-  /**
-   * Creates a LobFile Reader configured to read from the specified file.
-   */
-  public static Reader open(Path p, Configuration conf) throws IOException {
-    return org.apache.sqoop.io.LobFile.open(p, conf);
-  }
-
-  /**
-   * Creates a LobFile Writer configured for uncompressed binary data.
-   * @param p the path to create.
-   * @param conf the configuration to use to interact with the filesystem.
-   */
-  public static Writer create(Path p, Configuration conf) throws IOException {
-    return org.apache.sqoop.io.LobFile.create(p, conf, false);
-  }
-
-  /**
-   * Creates a LobFile Writer configured for uncompressed data.
-   * @param p the path to create.
-   * @param conf the configuration to use to interact with the filesystem.
-   * @param isCharData true if this is for CLOBs, false for BLOBs.
-   */
-  public static Writer create(Path p, Configuration conf, boolean isCharData)
-      throws IOException {
-    return org.apache.sqoop.io.LobFile.create(p, conf, isCharData, null);
-  }
-
-  /**
-   * Creates a LobFile Writer.
-   * @param p the path to create.
-   * @param conf the configuration to use to interact with the filesystem.
-   * @param isCharData true if this is for CLOBs, false for BLOBs.
-   * @param codec the compression codec to use (or null for none).
-   */
-  public static Writer create(Path p, Configuration conf, boolean isCharData,
-      String codec) throws IOException {
-    return org.apache.sqoop.io.LobFile.create(p, conf, isCharData, codec);
-  }
-
-  /**
-   * Creates a LobFile Writer.
-   * @param p the path to create.
-   * @param conf the configuration to use to interact with the filesystem.
-   * @param isCharData true if this is for CLOBs, false for BLOBs.
-   * @param codec the compression codec to use (or null for none).
-   * @param entriesPerSegment number of entries per index segment.
-   */
-  public static Writer create(Path p, Configuration conf, boolean isCharData,
-      String codec, int entriesPerSegment)
-      throws IOException {
-    return org.apache.sqoop.io.LobFile.create(
-        p, conf, isCharData, codec, entriesPerSegment);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/io/LobReaderCache.java b/src/java/com/cloudera/sqoop/io/LobReaderCache.java
deleted file mode 100644
index 89d31d3..0000000
--- a/src/java/com/cloudera/sqoop/io/LobReaderCache.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.io;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-
-/**
- * A cache of open LobFile.Reader objects.
- * This maps from filenames to the open Reader, if any.  This uses the
- * Singleton pattern. While nothing prevents multiple LobReaderCache
- * instances, it is most useful to have a single global cache. This cache is
- * internally synchronized; only one thread can insert or retrieve a reader
- * from the cache at a time.
- *
- * @deprecated use org.apache.sqoop.io.LobReaderCache instead.
- * @see org.apache.sqoop.io.LobReaderCache
- */
-public final class LobReaderCache extends org.apache.sqoop.io.LobReaderCache {
-
-  public static final Log LOG = org.apache.sqoop.io.LobReaderCache.LOG;
-
-  private static final LobReaderCache CACHE;
-  static {
-    CACHE = new LobReaderCache();
-  }
-
-  /**
-   * @return the singleton LobReaderCache instance.
-   */
-  public static LobReaderCache getCache() {
-    return CACHE;
-  }
-
-  /**
-   * Created a fully-qualified path object.
-   * @param path the path to fully-qualify with its fs URI.
-   * @param conf the current Hadoop FS configuration.
-   * @return a new path representing the same location as the input 'path',
-   * but with a fully-qualified URI.
-   */
-  public static Path qualify(Path path, Configuration conf)
-      throws IOException {
-    return org.apache.sqoop.util.FileSystemUtil.makeQualified(path, conf);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/io/NamedFifo.java b/src/java/com/cloudera/sqoop/io/NamedFifo.java
deleted file mode 100644
index e27d2c4..0000000
--- a/src/java/com/cloudera/sqoop/io/NamedFifo.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.io;
-
-import java.io.File;
-
-/**
- * A named FIFO channel.
- *
- * @deprecated use org.apache.sqoop.io.NamedFifo instead.
- * @see org.apache.sqoop.io.NamedFifo
- */
-public class NamedFifo extends org.apache.sqoop.io.NamedFifo {
-
-  public NamedFifo(String pathname) {
-    super(pathname);
-  }
-
-  public NamedFifo(File fifo) {
-    super(fifo);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/io/SplittableBufferedWriter.java b/src/java/com/cloudera/sqoop/io/SplittableBufferedWriter.java
deleted file mode 100644
index ef9285a..0000000
--- a/src/java/com/cloudera/sqoop/io/SplittableBufferedWriter.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.io;
-
-/**
- * A BufferedWriter implementation that wraps around a SplittingOutputStream
- * and allows splitting of the underlying stream.
- * Splits occur at allowSplit() calls, or newLine() calls.
- *
- * @deprecated use org.apache.sqoop.io.SplittableBufferedWriter instead.
- * @see org.apache.sqoop.io.SplittableBufferedWriter
- */
-public class SplittableBufferedWriter
-  extends org.apache.sqoop.io.SplittableBufferedWriter {
-
-  public SplittableBufferedWriter(
-      final SplittingOutputStream splitOutputStream) {
-    super(splitOutputStream);
-  }
-
-  SplittableBufferedWriter(final SplittingOutputStream splitOutputStream,
-      final boolean alwaysFlush) {
-    super(splitOutputStream, alwaysFlush);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/io/SplittingOutputStream.java b/src/java/com/cloudera/sqoop/io/SplittingOutputStream.java
deleted file mode 100644
index ab81042..0000000
--- a/src/java/com/cloudera/sqoop/io/SplittingOutputStream.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.io;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.compress.CompressionCodec;
-
-/**
- * An output stream that writes to an underlying filesystem, opening
- * a new file after a specified number of bytes have been written to the
- * current one.
- *
- * @deprecated use org.apache.sqoop.io.SplittingOutputStream instead.
- * @see org.apache.sqoop.io.SplittingOutputStream
- */
-public class SplittingOutputStream
-    extends org.apache.sqoop.io.SplittingOutputStream {
-
-  public static final Log LOG = org.apache.sqoop.io.SplittingOutputStream.LOG;
-
-  public SplittingOutputStream(final Configuration conf, final Path destDir,
-      final String filePrefix, final long cutoff, final CompressionCodec codec)
-      throws IOException {
-    super(conf, destDir, filePrefix, cutoff, codec);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/io/UnsupportedCodecException.java b/src/java/com/cloudera/sqoop/io/UnsupportedCodecException.java
deleted file mode 100644
index 4d8225a..0000000
--- a/src/java/com/cloudera/sqoop/io/UnsupportedCodecException.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.io;
-
-
-/**
- * Thrown when a compression codec cannot be recognized.
- *
- * @deprecated use org.apache.sqoop.io.UnsupportedCodecException instead.
- * @see org.apache.sqoop.io.UnsupportedCodecException
- */
-public class UnsupportedCodecException
-    extends org.apache.sqoop.io.UnsupportedCodecException {
-
-  public UnsupportedCodecException() {
-    super("UnsupportedCodecException");
-  }
-
-  public UnsupportedCodecException(String msg) {
-    super(msg);
-  }
-
-  public UnsupportedCodecException(Throwable cause) {
-    super(cause);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/BigDecimalSerializer.java b/src/java/com/cloudera/sqoop/lib/BigDecimalSerializer.java
deleted file mode 100644
index 2ae89c2..0000000
--- a/src/java/com/cloudera/sqoop/lib/BigDecimalSerializer.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
-/**
- * Serialize BigDecimal classes to/from DataInput and DataOutput objects.
- *
- * BigDecimal is comprised of a BigInteger with an integer 'scale' field.
- * The BigDecimal/BigInteger can also return itself as a 'long' value.
- *
- * We serialize in one of two formats:
- *
- *  First, check whether the BigInt can fit in a long:
- *  boolean b = BigIntegerPart &gt; LONG_MAX || BigIntegerPart &lt; LONG_MIN
- *
- *  [int: scale][boolean: b == false][long: BigInt-part]
- *  [int: scale][boolean: b == true][string: BigInt-part.toString()]
- *
- * TODO(aaron): Get this to work with Hadoop's Serializations framework.
- *
- * @deprecated use org.apache.sqoop.lib.BigDecimalSerializer instead.
- * @see org.apache.sqoop.lib.BigDecimalSerializer
- */
-public final class BigDecimalSerializer {
-
-  private BigDecimalSerializer() { }
-
-  static final BigInteger LONG_MAX_AS_BIGINT =
-      org.apache.sqoop.lib.BigDecimalSerializer.LONG_MAX_AS_BIGINT;
-  static final BigInteger LONG_MIN_AS_BIGINT =
-      org.apache.sqoop.lib.BigDecimalSerializer.LONG_MIN_AS_BIGINT;
-
-  public static void write(BigDecimal d, DataOutput out) throws IOException {
-    org.apache.sqoop.lib.BigDecimalSerializer.write(d, out);
-  }
-
-  public static BigDecimal readFields(DataInput in) throws IOException {
-    return org.apache.sqoop.lib.BigDecimalSerializer.readFields(in);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/BlobRef.java b/src/java/com/cloudera/sqoop/lib/BlobRef.java
deleted file mode 100644
index b3d5341..0000000
--- a/src/java/com/cloudera/sqoop/lib/BlobRef.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-import org.apache.commons.logging.Log;
-
-/**
- * BlobRef is a wrapper that holds a BLOB either directly, or a
- * reference to a file that holds the BLOB data.
- *
- * @deprecated use org.apache.sqoop.lib.BlobRef instead.
- * @see org.apache.sqoop.lib.BlobRef
- */
-public class BlobRef extends org.apache.sqoop.lib.BlobRef {
-
-  public static final Log LOG = org.apache.sqoop.lib.BlobRef.LOG;
-
-  public BlobRef() {
-    super();
-  }
-
-  public BlobRef(byte [] bytes) {
-    super(bytes);
-  }
-
-  /**
-   * Initialize a BlobRef to an external BLOB.
-   * @param file the filename to the BLOB. May be relative to the job dir.
-   * @param offset the offset (in bytes) into the LobFile for this record.
-   * @param length the length of the record in bytes.
-   */
-  public BlobRef(String file, long offset, long length) {
-    super(file, offset, length);
-  }
-
-
-  /**
-   * Create a BlobRef based on parsed data from a line of text.
-   * This only operates correctly on external blobs; inline blobs are simply
-   * returned as null. You should store BLOB data in SequenceFile format
-   * if reparsing is necessary.
-   * @param inputString the text-based input data to parse.
-   * @return a new BlobRef containing a reference to an external BLOB, or
-   * an empty BlobRef if the data to be parsed is actually inline.
-   */
-  public static BlobRef parse(String inputString) {
-    return org.apache.sqoop.lib.BlobRef.parse(inputString);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/lib/BooleanParser.java b/src/java/com/cloudera/sqoop/lib/BooleanParser.java
deleted file mode 100644
index ab97cf0..0000000
--- a/src/java/com/cloudera/sqoop/lib/BooleanParser.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.lib;
-
-/**
- * Parse string representations of boolean values into boolean
- * scalar types.
- * @deprecated use org.apache.sqoop.lib.BooleanParser instead.
- * @see org.apache.sqoop.lib.BooleanParser
- */
-public final class BooleanParser {
-  private BooleanParser() {
-  }
-
-  /**
-   * Return a boolean based on the value contained in the string.
-   *
-   * <p>The following values are considered true:
-   * "true", "t", "yes", "on", "1".</p>
-   * <p>All other values, including 'null', are false.</p>
-   * <p>All comparisons are case-insensitive.</p>
-   */
-  public static boolean valueOf(final String s) {
-    return org.apache.sqoop.lib.BooleanParser.valueOf(s);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/lib/ClobRef.java b/src/java/com/cloudera/sqoop/lib/ClobRef.java
deleted file mode 100644
index a328f23..0000000
--- a/src/java/com/cloudera/sqoop/lib/ClobRef.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.lib;
-
-/**
- * ClobRef is a wrapper that holds a CLOB either directly, or a
- * reference to a file that holds the CLOB data.
- *
- * @deprecated use org.apache.sqoop.lib.ClobRef instead.
- * @see org.apache.sqoop.lib.ClobRef
- */
-public class ClobRef extends org.apache.sqoop.lib.ClobRef {
-
-  public ClobRef() {
-    super();
-  }
-
-  public ClobRef(String chars) {
-    super(chars);
-  }
-
-  /**
-   * Initialize a clobref to an external CLOB.
-   * @param file the filename to the CLOB. May be relative to the job dir.
-   * @param offset the offset (in bytes) into the LobFile for this record.
-   * @param length the length of the record in characters.
-   */
-  public ClobRef(String file, long offset, long length) {
-    super(file, offset, length);
-  }
-
-  /**
-   * Create a ClobRef based on parsed data from a line of text.
-   * @param inputString the text-based input data to parse.
-   * @return a ClobRef to the given data.
-   */
-  public static ClobRef parse(String inputString) {
-    return org.apache.sqoop.lib.ClobRef.parse(inputString);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/lib/DelimiterSet.java b/src/java/com/cloudera/sqoop/lib/DelimiterSet.java
deleted file mode 100644
index 6de90ad..0000000
--- a/src/java/com/cloudera/sqoop/lib/DelimiterSet.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-/**
- * Encapsulates a set of delimiters used to encode a record.
- * @deprecated use org.apache.sqoop.lib.DelimiterSet instead.
- * @see org.apache.sqoop.lib.DelimiterSet
- */
-public class DelimiterSet extends org.apache.sqoop.lib.DelimiterSet {
-
-  public static final char NULL_CHAR =
-      org.apache.sqoop.lib.DelimiterSet.NULL_CHAR;
-
-  /**
-   * Create a delimiter set with the default delimiters
-   * (comma for fields, newline for records).
-   */
-  public DelimiterSet() {
-    super();
-  }
-
-  /**
-   * Create a delimiter set with the specified delimiters.
-   * @param field the fields-terminated-by delimiter
-   * @param record the lines-terminated-by delimiter
-   * @param enclose the enclosed-by character
-   * @param escape the escaped-by character
-   * @param isEncloseRequired If true, enclosed-by is applied to all
-   * fields. If false, only applied to fields that embed delimiters.
-   */
-  public DelimiterSet(char field, char record, char enclose, char escape,
-      boolean isEncloseRequired) {
-    super(field, record, enclose, escape, isEncloseRequired);
-  }
-
-  /**
-   * Identical to clone() but does not throw spurious exceptions.
-   * @return a new copy of this same set of delimiters.
-   */
-  public DelimiterSet copy() {
-    try {
-      return (DelimiterSet) clone();
-    } catch (CloneNotSupportedException cnse) {
-      // Should never happen for DelimiterSet.
-      return null;
-    }
-  }
-
-  // Static delimiter sets for the commonly-used delimiter arrangements.
-
-  public static final DelimiterSet DEFAULT_DELIMITERS;
-  public static final DelimiterSet HIVE_DELIMITERS;
-  public static final DelimiterSet MYSQL_DELIMITERS;
-
-  static {
-    DEFAULT_DELIMITERS = new DelimiterSet(',', '\n', NULL_CHAR, NULL_CHAR,
-        false);
-    MYSQL_DELIMITERS = new DelimiterSet(',', '\n', '\'', '\\', false);
-    HIVE_DELIMITERS = new DelimiterSet('\001', '\n',
-        NULL_CHAR, NULL_CHAR, false);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/lib/FieldFormatter.java b/src/java/com/cloudera/sqoop/lib/FieldFormatter.java
deleted file mode 100644
index 45fb81f..0000000
--- a/src/java/com/cloudera/sqoop/lib/FieldFormatter.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-/**
- * Static helper class that will help format data with quotes and escape chars.
- *
- * @deprecated use org.apache.sqoop.lib.FieldFormatter instead.
- * @see org.apache.sqoop.lib.FieldFormatter
- */
-public final class FieldFormatter {
-
-  private FieldFormatter() { }
-
-  /**
-   * only pass fields that are strings when --hive-drop-delims option is on.
-   * @param str
-   * @param delimiters
-   * @return
-   */
-  public static String hiveStringDropDelims(String str,
-          DelimiterSet delimiters) {
-    return org.apache.sqoop.lib.FieldFormatter.hiveStringDropDelims(
-        str, delimiters);
-  }
-
-  /**
-   * replace hive delimiters with a user-defined string passed to the
-   * --hive-delims-replacement option.
-   * @param str
-   * @param delimiters
-   * @return
-   */
-  public static String hiveStringReplaceDelims(String str, String replacement,
-      DelimiterSet delimiters) {
-    return org.apache.sqoop.lib.FieldFormatter.hiveStringReplaceDelims(
-        str, replacement, delimiters);
-  }
-
-  /**
-   * Takes an input string representing the value of a field, encloses it in
-   * enclosing chars, and escapes any occurrences of such characters in the
-   * middle.  The escape character itself is also escaped if it appears in the
-   * text of the field.  If there is no enclosing character, then any
-   * delimiters present in the field body are escaped instead.
-   *
-   * The field is enclosed only if:
-   *   enclose != '\000', and:
-   *     encloseRequired is true, or
-   *     one of the fields-terminated-by or lines-terminated-by characters is
-   *     present in the string.
-   *
-   * Escaping is not performed if the escape char is '\000'.
-   *
-   * @param str - The user's string to escape and enclose
-   * @param delimiters - The DelimiterSet to use identifying the escape and
-   * enclose semantics. If the specified escape or enclose characters are
-   * '\000', those operations are not performed.
-   * @return the escaped, enclosed version of 'str'.
-   */
-  public static String escapeAndEnclose(String str, DelimiterSet delimiters) {
-    return org.apache.sqoop.lib.FieldFormatter.escapeAndEnclose(
-        str, delimiters);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/FieldMapProcessor.java b/src/java/com/cloudera/sqoop/lib/FieldMapProcessor.java
deleted file mode 100644
index 3f21540..0000000
--- a/src/java/com/cloudera/sqoop/lib/FieldMapProcessor.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-/**
- * Interface implemented by classes that process FieldMappable objects.
- *
- * @deprecated use org.apache.sqoop.lib.FieldMapProcessor instead.
- * @see org.apache.sqoop.lib.FieldMapProcessor
- */
-public interface FieldMapProcessor
-  extends org.apache.sqoop.lib.FieldMapProcessor {
-}
-
diff --git a/src/java/com/cloudera/sqoop/lib/FieldMappable.java b/src/java/com/cloudera/sqoop/lib/FieldMappable.java
deleted file mode 100644
index 2067ecc..0000000
--- a/src/java/com/cloudera/sqoop/lib/FieldMappable.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-/**
- * Interface describing a class capable of returning a map of the fields
- * of the object to their values.
- *
- * @deprecated use org.apache.sqoop.lib.FieldMappable instead.
- * @see org.apache.sqoop.lib.FieldMappable
- */
-public interface FieldMappable extends org.apache.sqoop.lib.FieldMappable {
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/lib/JdbcWritableBridge.java b/src/java/com/cloudera/sqoop/lib/JdbcWritableBridge.java
deleted file mode 100644
index 316547f..0000000
--- a/src/java/com/cloudera/sqoop/lib/JdbcWritableBridge.java
+++ /dev/null
@@ -1,187 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Time;
-import java.sql.Timestamp;
-
-import org.apache.hadoop.io.BytesWritable;
-
-/**
- * Contains a set of methods which can read db columns from a ResultSet into
- * Java types, and do serialization of these types to/from DataInput/DataOutput
- * for use with Hadoop's Writable implementation. This supports null values
- * for all types.
- *
- * @deprecated use org.apache.sqoop.lib.JdbcWritableBridge instead.
- * @see org.apache.sqoop.lib.JdbcWritableBridge
- */
-public final class JdbcWritableBridge {
-
-  // Currently, cap BLOB/CLOB objects at 16 MB until we can use external
-  // storage.
-  public static final long MAX_BLOB_LENGTH =
-      org.apache.sqoop.lib.JdbcWritableBridge.MAX_BLOB_LENGTH;
-  public static final long MAX_CLOB_LENGTH =
-      org.apache.sqoop.lib.JdbcWritableBridge.MAX_CLOB_LENGTH;
-
-  private JdbcWritableBridge() {
-  }
-
-  public static Integer readInteger(int colNum, ResultSet r)
-      throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readInteger(colNum, r);
-  }
-
-  public static Long readLong(int colNum, ResultSet r) throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readLong(colNum, r);
-  }
-
-  public static String readString(int colNum, ResultSet r) throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readString(colNum, r);
-  }
-
-  public static Float readFloat(int colNum, ResultSet r) throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readFloat(colNum, r);
-  }
-
-  public static Double readDouble(int colNum, ResultSet r) throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readDouble(colNum, r);
-  }
-
-  public static Boolean readBoolean(int colNum, ResultSet r)
-     throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readBoolean(colNum, r);
-  }
-
-  public static Time readTime(int colNum, ResultSet r) throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readTime(colNum, r);
-  }
-
-  public static Timestamp readTimestamp(int colNum, ResultSet r)
-      throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readTimestamp(colNum, r);
-  }
-
-  public static Date readDate(int colNum, ResultSet r) throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readDate(colNum, r);
-  }
-
-  public static BytesWritable readBytesWritable(int colNum, ResultSet r)
-      throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readBytesWritable(colNum, r);
-  }
-
-  public static BigDecimal readBigDecimal(int colNum, ResultSet r)
-      throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readBigDecimal(colNum, r);
-  }
-
-  public static BlobRef readBlobRef(int colNum, ResultSet r)
-      throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readBlobRef(colNum, r);
-  }
-
-  public static ClobRef readClobRef(int colNum, ResultSet r)
-      throws SQLException {
-    return org.apache.sqoop.lib.JdbcWritableBridge.readClobRef(colNum, r);
-  }
-
-  public static void writeInteger(Integer val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeInteger(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeLong(Long val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeLong(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeDouble(Double val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeDouble(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeBoolean(Boolean val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeBoolean(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeFloat(Float val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeFloat(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeString(String val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeString(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeTimestamp(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeTime(Time val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeTime(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeDate(Date val, int paramIdx, int sqlType,
-      PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeDate(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeBytesWritable(BytesWritable val, int paramIdx,
-      int sqlType, PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeBytesWritable(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeBigDecimal(BigDecimal val, int paramIdx,
-      int sqlType, PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeBigDecimal(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeBlobRef(BlobRef val, int paramIdx,
-      int sqlType, PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeBlobRef(
-        val, paramIdx, sqlType, s);
-  }
-
-  public static void writeClobRef(ClobRef val, int paramIdx,
-      int sqlType, PreparedStatement s) throws SQLException {
-    org.apache.sqoop.lib.JdbcWritableBridge.writeClobRef(
-        val, paramIdx, sqlType, s);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java b/src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java
deleted file mode 100644
index b51cf0c..0000000
--- a/src/java/com/cloudera/sqoop/lib/LargeObjectLoader.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-
-/**
- * Contains a set of methods which can read db columns from a ResultSet into
- * Java types, and do serialization of these types to/from DataInput/DataOutput
- * for use with Hadoop's Writable implementation. This supports null values
- * for all types.
- *
- * This is a singleton instance class; only one may exist at a time.
- * However, its lifetime is limited to the current TaskInputOutputContext's
- * life.
- *
- * @deprecated use org.apache.sqoop.lib.LargeObjectLoader instead.
- * @see org.apache.sqoop.lib.LargeObjectLoader
- */
-public class LargeObjectLoader extends org.apache.sqoop.lib.LargeObjectLoader {
-
-  // Spill to external storage for BLOB/CLOB objects > 16 MB.
-  public static final long DEFAULT_MAX_LOB_LENGTH =
-      org.apache.sqoop.lib.LargeObjectLoader.DEFAULT_MAX_LOB_LENGTH;
-
-  public static final String MAX_INLINE_LOB_LEN_KEY =
-      org.apache.sqoop.lib.LargeObjectLoader.MAX_INLINE_LOB_LEN_KEY;
-
-  /**
-   * Create a new LargeObjectLoader.
-   * @param conf the Configuration to use
-   */
-  public LargeObjectLoader(Configuration conf, Path workPath)
-      throws IOException {
-    super(conf, workPath);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/LobRef.java b/src/java/com/cloudera/sqoop/lib/LobRef.java
deleted file mode 100644
index 518b622..0000000
--- a/src/java/com/cloudera/sqoop/lib/LobRef.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-import java.util.regex.Matcher;
-
-import org.apache.commons.logging.Log;
-
-/**
- * Abstract base class that holds a reference to a Blob or a Clob.
- * DATATYPE is the type being held (e.g., a byte array).
- * CONTAINERTYPE is the type used to hold this data (e.g., BytesWritable).
- * ACCESSORTYPE is the type used to access this data in a streaming fashion
- *   (either an InputStream or a Reader).
- *
- * @deprecated use org.apache.sqoop.lib.LobRef instead.
- * @see org.apache.sqoop.lib.LobRef
- */
-public abstract class LobRef<DATATYPE, CONTAINERTYPE, ACCESSORTYPE>
-    extends org.apache.sqoop.lib.LobRef<DATATYPE, CONTAINERTYPE, ACCESSORTYPE> {
-
-  public static final Log LOG = org.apache.sqoop.lib.LobRef.LOG;
-
-  protected LobRef() {
-    super();
-  }
-
-  protected LobRef(CONTAINERTYPE container) {
-    super(container);
-  }
-
-  protected LobRef(String file, long offset, long length) {
-    super(file, offset, length);
-  }
-
-  protected static final ThreadLocal<Matcher> EXTERNAL_MATCHER =
-    org.apache.sqoop.lib.LobRef.EXTERNAL_MATCHER;
-}
-
diff --git a/src/java/com/cloudera/sqoop/lib/LobSerializer.java b/src/java/com/cloudera/sqoop/lib/LobSerializer.java
deleted file mode 100644
index b8324fe..0000000
--- a/src/java/com/cloudera/sqoop/lib/LobSerializer.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-/**
- * Serialize LOB classes to/from DataInput and DataOutput objects.
- *
- * @deprecated use org.apache.sqoop.lib.LobSerializer instead.
- * @see org.apache.sqoop.lib.LobSerializer
- */
-public final class LobSerializer {
-
-  private LobSerializer() { }
-
-  public static void writeClob(ClobRef clob, DataOutput out)
-      throws IOException {
-    org.apache.sqoop.lib.LobSerializer.writeClob(clob, out);
-  }
-
-  public static void writeBlob(BlobRef blob, DataOutput out)
-      throws IOException {
-    org.apache.sqoop.lib.LobSerializer.writeBlob(blob, out);
-  }
-
-  public static ClobRef readClobFields(DataInput in) throws IOException {
-    return org.apache.sqoop.lib.LobSerializer.readClobFields(in);
-  }
-
-  public static BlobRef readBlobFields(DataInput in) throws IOException {
-    return org.apache.sqoop.lib.LobSerializer.readBlobFields(in);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/ProcessingException.java b/src/java/com/cloudera/sqoop/lib/ProcessingException.java
deleted file mode 100644
index c4216b1..0000000
--- a/src/java/com/cloudera/sqoop/lib/ProcessingException.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-/**
- * General error during processing of a SqoopRecord.
- *
- * @deprecated use org.apache.sqoop.lib.ProcessingException instead.
- * @see org.apache.sqoop.lib.ProcessingException
- */
-@SuppressWarnings("serial")
-public class ProcessingException
-  extends org.apache.sqoop.lib.ProcessingException {
-
-  public ProcessingException() {
-    super("ProcessingException");
-  }
-
-  public ProcessingException(final String message) {
-    super(message);
-  }
-
-  public ProcessingException(final Throwable cause) {
-    super(cause);
-  }
-
-  public ProcessingException(final String message, final Throwable cause) {
-    super(message, cause);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/RecordParser.java b/src/java/com/cloudera/sqoop/lib/RecordParser.java
deleted file mode 100644
index a3238e8..0000000
--- a/src/java/com/cloudera/sqoop/lib/RecordParser.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-import org.apache.commons.logging.Log;
-
-/**
- * Parses a record containing one or more fields. Fields are separated
- * by some FIELD_DELIMITER character, e.g. a comma or a ^A character.
- * Records are terminated by a RECORD_DELIMITER character, e.g., a newline.
- *
- * Fields may be (optionally or mandatorily) enclosed by a quoting char
- * e.g., '\"'
- *
- * Fields may contain escaped characters. An escape character may be, e.g.,
- * the '\\' character. Any character following an escape character
- * is treated literally. e.g., '\n' is recorded as an 'n' character, not a
- * newline.
- *
- * Unexpected results may occur if the enclosing character escapes itself.
- * e.g., this cannot parse SQL SELECT statements where the single character
- * ['] escapes to [''].
- *
- * This class is not synchronized. Multiple threads must use separate
- * instances of RecordParser.
- *
- * The fields parsed by RecordParser are backed by an internal buffer
- * which is cleared when the next call to parseRecord() is made. If
- * the buffer is required to be preserved, you must copy it yourself.
- *
- * @deprecated use org.apache.sqoop.lib.RecordParser instead.
- * @see org.apache.sqoop.lib.RecordParser
- */
-public final class RecordParser extends org.apache.sqoop.lib.RecordParser {
-
-  public static final Log LOG = org.apache.sqoop.lib.RecordParser.LOG;
-
-  /**
-   * An error thrown when parsing fails.
-   *
-   * @deprecated use org.apache.sqoop.lib.RecordParser.ParseError instead.
-   * @see org.apache.sqoop.lib.RecordParser.ParseError
-   */
-  public static class ParseError
-    extends org.apache.sqoop.lib.RecordParser.ParseError {
-
-    public ParseError() {
-      super();
-    }
-
-    public ParseError(final String msg) {
-      super(msg);
-    }
-
-    public ParseError(final String msg, final Throwable cause) {
-      super(msg, cause);
-    }
-
-    public ParseError(final Throwable cause) {
-      super(cause);
-    }
-  }
-
-  public RecordParser(final DelimiterSet delimitersIn) {
-    super(delimitersIn);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/lib/SqoopRecord.java b/src/java/com/cloudera/sqoop/lib/SqoopRecord.java
deleted file mode 100644
index 7cfcbb3..0000000
--- a/src/java/com/cloudera/sqoop/lib/SqoopRecord.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.lib;
-
-
-/**
- * Interface implemented by the classes generated by sqoop's orm.ClassWriter.
- *
- * @deprecated use org.apache.sqoop.lib.SqoopRecord instead.
- * @see org.apache.sqoop.lib.SqoopRecord
- */
-public abstract class SqoopRecord extends org.apache.sqoop.lib.SqoopRecord {
-
-  public SqoopRecord() {
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/CatalogQueryManager.java b/src/java/com/cloudera/sqoop/manager/CatalogQueryManager.java
deleted file mode 100644
index 5e6a725..0000000
--- a/src/java/com/cloudera/sqoop/manager/CatalogQueryManager.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class CatalogQueryManager
-    extends org.apache.sqoop.manager.CatalogQueryManager {
-
-  public CatalogQueryManager(final String driverClass,
-    final SqoopOptions opts) {
-    super(driverClass, opts);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/ConnManager.java b/src/java/com/cloudera/sqoop/manager/ConnManager.java
deleted file mode 100644
index a4b4457..0000000
--- a/src/java/com/cloudera/sqoop/manager/ConnManager.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class ConnManager
-    extends org.apache.sqoop.manager.ConnManager {
-}
diff --git a/src/java/com/cloudera/sqoop/manager/Db2Manager.java b/src/java/com/cloudera/sqoop/manager/Db2Manager.java
deleted file mode 100644
index dd4c743..0000000
--- a/src/java/com/cloudera/sqoop/manager/Db2Manager.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class Db2Manager
-    extends org.apache.sqoop.manager.Db2Manager {
-
-  public Db2Manager(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/manager/DefaultManagerFactory.java b/src/java/com/cloudera/sqoop/manager/DefaultManagerFactory.java
deleted file mode 100644
index c1ca763..0000000
--- a/src/java/com/cloudera/sqoop/manager/DefaultManagerFactory.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class DefaultManagerFactory
-  extends org.apache.sqoop.manager.DefaultManagerFactory {
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/DirectMySQLManager.java b/src/java/com/cloudera/sqoop/manager/DirectMySQLManager.java
deleted file mode 100644
index e13823d..0000000
--- a/src/java/com/cloudera/sqoop/manager/DirectMySQLManager.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class DirectMySQLManager
-    extends org.apache.sqoop.manager.DirectMySQLManager {
-
-  public DirectMySQLManager(final SqoopOptions options) {
-    super(options);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/DirectPostgresqlManager.java b/src/java/com/cloudera/sqoop/manager/DirectPostgresqlManager.java
deleted file mode 100644
index 18cbe74..0000000
--- a/src/java/com/cloudera/sqoop/manager/DirectPostgresqlManager.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class DirectPostgresqlManager
-    extends org.apache.sqoop.manager.DirectPostgresqlManager {
-
-  public DirectPostgresqlManager(final SqoopOptions opts) {
-    super(opts);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/manager/ExportJobContext.java b/src/java/com/cloudera/sqoop/manager/ExportJobContext.java
deleted file mode 100644
index cad63a1..0000000
--- a/src/java/com/cloudera/sqoop/manager/ExportJobContext.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ExportJobContext
-    extends org.apache.sqoop.manager.ExportJobContext {
-
-  public ExportJobContext(final String table, final String jar,
-      final SqoopOptions opts) {
-    super(table, jar, opts);
-  }
-
-  public void setConnManager(ConnManager mgr) {
-    super.setConnManager(mgr);
-  }
-
-  public ConnManager getConnManager() {
-    return (ConnManager)super.getConnManager();
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/GenericJdbcManager.java b/src/java/com/cloudera/sqoop/manager/GenericJdbcManager.java
deleted file mode 100644
index 14d4554..0000000
--- a/src/java/com/cloudera/sqoop/manager/GenericJdbcManager.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class GenericJdbcManager
-    extends org.apache.sqoop.manager.GenericJdbcManager {
-
-  public GenericJdbcManager(final String driverClass, final SqoopOptions opts) {
-    super(driverClass, opts);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/HsqldbManager.java b/src/java/com/cloudera/sqoop/manager/HsqldbManager.java
deleted file mode 100644
index e0a2cf9..0000000
--- a/src/java/com/cloudera/sqoop/manager/HsqldbManager.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class HsqldbManager
-    extends org.apache.sqoop.manager.HsqldbManager {
-
-  public HsqldbManager(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/manager/ImportJobContext.java b/src/java/com/cloudera/sqoop/manager/ImportJobContext.java
deleted file mode 100644
index 8eec2eb..0000000
--- a/src/java/com/cloudera/sqoop/manager/ImportJobContext.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import org.apache.hadoop.fs.Path;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ImportJobContext
-    extends org.apache.sqoop.manager.ImportJobContext {
-
-  public ImportJobContext(final String table, final String jar,
-      final SqoopOptions opts, final Path destination) {
-    super(table, jar, opts, destination);
-  }
-
-  public void setConnManager(ConnManager mgr) {
-    super.setConnManager(mgr);
-  }
-
-  public ConnManager getConnManager() {
-    return (ConnManager)super.getConnManager();
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/InformationSchemaManager.java b/src/java/com/cloudera/sqoop/manager/InformationSchemaManager.java
deleted file mode 100644
index 056ddea..0000000
--- a/src/java/com/cloudera/sqoop/manager/InformationSchemaManager.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class InformationSchemaManager
-  extends org.apache.sqoop.manager.InformationSchemaManager {
-
-  public InformationSchemaManager(final String driverClass,
-    final SqoopOptions opts) {
-    super(driverClass, opts);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/ManagerFactory.java b/src/java/com/cloudera/sqoop/manager/ManagerFactory.java
deleted file mode 100644
index 3ffa722..0000000
--- a/src/java/com/cloudera/sqoop/manager/ManagerFactory.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class ManagerFactory
-    extends org.apache.sqoop.manager.ManagerFactory {
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/MySQLManager.java b/src/java/com/cloudera/sqoop/manager/MySQLManager.java
deleted file mode 100644
index 4c29a4c..0000000
--- a/src/java/com/cloudera/sqoop/manager/MySQLManager.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLManager
-    extends org.apache.sqoop.manager.MySQLManager {
-
-  public MySQLManager(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/MySQLUtils.java b/src/java/com/cloudera/sqoop/manager/MySQLUtils.java
deleted file mode 100644
index 3410324..0000000
--- a/src/java/com/cloudera/sqoop/manager/MySQLUtils.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import java.io.IOException;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class MySQLUtils {
-
-  private MySQLUtils() {
-  }
-
-  public static final String MYSQL_DUMP_CMD =
-      org.apache.sqoop.manager.MySQLUtils.MYSQL_DUMP_CMD;
-  public static final String MYSQL_IMPORT_CMD =
-      org.apache.sqoop.manager.MySQLUtils.MYSQL_IMPORT_CMD;
-  public static final String OUTPUT_FIELD_DELIM_KEY =
-      org.apache.sqoop.manager.MySQLUtils.OUTPUT_FIELD_DELIM_KEY;
-  public static final String OUTPUT_RECORD_DELIM_KEY =
-      org.apache.sqoop.manager.MySQLUtils.OUTPUT_RECORD_DELIM_KEY;
-  public static final String OUTPUT_ENCLOSED_BY_KEY =
-      org.apache.sqoop.manager.MySQLUtils.OUTPUT_ENCLOSED_BY_KEY;
-  public static final String OUTPUT_ESCAPED_BY_KEY =
-      org.apache.sqoop.manager.MySQLUtils.OUTPUT_ESCAPED_BY_KEY;
-  public static final String OUTPUT_ENCLOSE_REQUIRED_KEY =
-      org.apache.sqoop.manager.MySQLUtils.OUTPUT_ENCLOSE_REQUIRED_KEY;
-  public static final String TABLE_NAME_KEY =
-      org.apache.sqoop.manager.MySQLUtils.TABLE_NAME_KEY;
-  public static final String CONNECT_STRING_KEY =
-      org.apache.sqoop.manager.MySQLUtils.CONNECT_STRING_KEY;
-  public static final String USERNAME_KEY =
-      org.apache.sqoop.manager.MySQLUtils.USERNAME_KEY;
-  public static final String WHERE_CLAUSE_KEY =
-      org.apache.sqoop.manager.MySQLUtils.WHERE_CLAUSE_KEY;
-  public static final String EXTRA_ARGS_KEY =
-      org.apache.sqoop.manager.MySQLUtils.EXTRA_ARGS_KEY;
-  public static final String MYSQL_DEFAULT_CHARSET =
-      org.apache.sqoop.manager.MySQLUtils.MYSQL_DEFAULT_CHARSET;
-
-  public static boolean outputDelimsAreMySQL(Configuration conf) {
-    return org.apache.sqoop.manager.MySQLUtils.outputDelimsAreMySQL(conf);
-  }
-
-  public static String writePasswordFile(Configuration conf)
-      throws IOException {
-    return org.apache.sqoop.manager.MySQLUtils.writePasswordFile(conf);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/OracleManager.java b/src/java/com/cloudera/sqoop/manager/OracleManager.java
deleted file mode 100644
index 7beb521..0000000
--- a/src/java/com/cloudera/sqoop/manager/OracleManager.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class OracleManager
-    extends org.apache.sqoop.manager.OracleManager {
-
-  public static final int ERROR_TABLE_OR_VIEW_DOES_NOT_EXIST =
-    org.apache.sqoop.manager.OracleManager.ERROR_TABLE_OR_VIEW_DOES_NOT_EXIST;
-  public static final String QUERY_LIST_DATABASES =
-    org.apache.sqoop.manager.OracleManager.QUERY_LIST_DATABASES;
-  public static final String QUERY_LIST_TABLES =
-    org.apache.sqoop.manager.OracleManager.QUERY_LIST_TABLES;
-  public static final String QUERY_COLUMNS_FOR_TABLE =
-    org.apache.sqoop.manager.OracleManager.QUERY_COLUMNS_FOR_TABLE;
-  public static final String QUERY_PRIMARY_KEY_FOR_TABLE =
-    org.apache.sqoop.manager.OracleManager.QUERY_PRIMARY_KEY_FOR_TABLE;
-  public static final String ORACLE_TIMEZONE_KEY =
-    org.apache.sqoop.manager.OracleManager.ORACLE_TIMEZONE_KEY;
-
-  public OracleManager(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/PostgresqlManager.java b/src/java/com/cloudera/sqoop/manager/PostgresqlManager.java
deleted file mode 100644
index 354d260..0000000
--- a/src/java/com/cloudera/sqoop/manager/PostgresqlManager.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * Manages connections to Postgresql databases.
- */
-public class PostgresqlManager
-    extends org.apache.sqoop.manager.PostgresqlManager {
-
-  public PostgresqlManager(final SqoopOptions opts) {
-    super(opts);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/SQLServerManager.java b/src/java/com/cloudera/sqoop/manager/SQLServerManager.java
deleted file mode 100644
index 2cc0458..0000000
--- a/src/java/com/cloudera/sqoop/manager/SQLServerManager.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class SQLServerManager
-    extends org.apache.sqoop.manager.SQLServerManager {
-
-  public SQLServerManager(final SqoopOptions opts) {
-    super(opts);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/manager/SqlManager.java b/src/java/com/cloudera/sqoop/manager/SqlManager.java
deleted file mode 100644
index 398e01b..0000000
--- a/src/java/com/cloudera/sqoop/manager/SqlManager.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class SqlManager
-    extends org.apache.sqoop.manager.SqlManager {
-
-  public static final String SUBSTITUTE_TOKEN =
-          org.apache.sqoop.manager.SqlManager.SUBSTITUTE_TOKEN;
-
-  public SqlManager(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java
deleted file mode 100644
index ccd2be5..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlOutputFormat.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class AsyncSqlOutputFormat<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.AsyncSqlOutputFormat<K, V> {
-
-  public static final String RECORDS_PER_STATEMENT_KEY =
-      org.apache.sqoop.mapreduce.AsyncSqlOutputFormat.
-      RECORDS_PER_STATEMENT_KEY;
-
-  public static final String STATEMENTS_PER_TRANSACTION_KEY =
-      org.apache.sqoop.mapreduce.AsyncSqlOutputFormat.
-      STATEMENTS_PER_TRANSACTION_KEY;
-
-  public static final int DEFAULT_RECORDS_PER_STATEMENT =
-      org.apache.sqoop.mapreduce.AsyncSqlOutputFormat.
-      DEFAULT_RECORDS_PER_STATEMENT;
-
-  public static final int DEFAULT_STATEMENTS_PER_TRANSACTION =
-      org.apache.sqoop.mapreduce.AsyncSqlOutputFormat.
-      DEFAULT_STATEMENTS_PER_TRANSACTION;
-
-  public static final int UNLIMITED_STATEMENTS_PER_TRANSACTION =
-      org.apache.sqoop.mapreduce.AsyncSqlOutputFormat.
-      UNLIMITED_STATEMENTS_PER_TRANSACTION;
-
-  /**
-   * @deprecated Moving to use org.apache.sqoop namespace.
-   */
-  public static class AsyncDBOperation
-      extends org.apache.sqoop.mapreduce.AsyncSqlOutputFormat.
-      AsyncDBOperation {
-
-    public AsyncDBOperation(PreparedStatement s, boolean commitAndClose,
-        boolean batch) {
-        super(s, commitAndClose, batch);
-    }
-
-    public AsyncDBOperation(PreparedStatement s, boolean batch,
-        boolean commit, boolean stopThread) {
-      super(s, batch, commit, stopThread);
-    }
-
-  }
-
-  /**
-   * @deprecated Moving to use org.apache.sqoop namespace.
-   */
-  public static class AsyncSqlExecThread
-      extends org.apache.sqoop.mapreduce.AsyncSqlOutputFormat.
-      AsyncSqlExecThread{
-
-    public AsyncSqlExecThread(Connection conn, int stmtsPerTx) {
-      super(conn, stmtsPerTx);
-    }
-
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java b/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java
deleted file mode 100644
index 265dc27..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AsyncSqlRecordWriter.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.sql.SQLException;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class AsyncSqlRecordWriter<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.AsyncSqlRecordWriter<K, V> {
-
-  public AsyncSqlRecordWriter(TaskAttemptContext context)
-      throws ClassNotFoundException, SQLException {
-    super(context);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AutoProgressMapper.java b/src/java/com/cloudera/sqoop/mapreduce/AutoProgressMapper.java
deleted file mode 100644
index ee8ab6d..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AutoProgressMapper.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AutoProgressMapper<KEYIN, VALIN, KEYOUT, VALOUT>
-    extends org.apache.sqoop.mapreduce.AutoProgressMapper
-    <KEYIN, VALIN, KEYOUT, VALOUT> {
-
-  public static final String MAX_PROGRESS_PERIOD_KEY =
-      org.apache.sqoop.mapreduce.AutoProgressMapper.MAX_PROGRESS_PERIOD_KEY;
-  public static final String SLEEP_INTERVAL_KEY =
-      org.apache.sqoop.mapreduce.AutoProgressMapper.SLEEP_INTERVAL_KEY;
-  public static final String REPORT_INTERVAL_KEY =
-      org.apache.sqoop.mapreduce.AutoProgressMapper.REPORT_INTERVAL_KEY;
-
-  public static final int DEFAULT_SLEEP_INTERVAL =
-      org.apache.sqoop.mapreduce.AutoProgressMapper.DEFAULT_SLEEP_INTERVAL;
-  public static final int DEFAULT_REPORT_INTERVAL =
-      org.apache.sqoop.mapreduce.AutoProgressMapper.DEFAULT_REPORT_INTERVAL;
-  public static final int DEFAULT_MAX_PROGRESS =
-      org.apache.sqoop.mapreduce.AutoProgressMapper.DEFAULT_MAX_PROGRESS;
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AvroExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/AvroExportMapper.java
deleted file mode 100644
index 725b9d0..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AvroExportMapper.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AvroExportMapper
-    extends org.apache.sqoop.mapreduce.AvroExportMapper {
-
-  public static final String AVRO_COLUMN_TYPES_MAP =
-      org.apache.sqoop.mapreduce.AvroExportMapper.AVRO_COLUMN_TYPES_MAP;
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AvroImportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/AvroImportMapper.java
deleted file mode 100644
index 08b5788..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AvroImportMapper.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AvroImportMapper
-    extends org.apache.sqoop.mapreduce.AvroImportMapper {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AvroInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/AvroInputFormat.java
deleted file mode 100644
index 2491012..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AvroInputFormat.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AvroInputFormat<T>
-    extends org.apache.sqoop.mapreduce.AvroInputFormat<T> {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AvroJob.java b/src/java/com/cloudera/sqoop/mapreduce/AvroJob.java
deleted file mode 100644
index 1ba22a5..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AvroJob.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import org.apache.avro.Schema;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class AvroJob {
-
-  public static final String MAP_OUTPUT_SCHEMA =
-      org.apache.sqoop.mapreduce.AvroJob.MAP_OUTPUT_SCHEMA;
-
-  private AvroJob() { }
-
-  public static void setMapOutputSchema(Configuration job, Schema s) {
-    org.apache.sqoop.mapreduce.AvroJob.setMapOutputSchema(job, s);
-  }
-
-  public static Schema getMapOutputSchema(Configuration job) {
-    return org.apache.sqoop.mapreduce.AvroJob.getMapOutputSchema(job);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AvroOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/AvroOutputFormat.java
deleted file mode 100644
index 40b4368..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AvroOutputFormat.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AvroOutputFormat<T>
-    extends org.apache.sqoop.mapreduce.AvroOutputFormat<T> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/AvroRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/AvroRecordReader.java
deleted file mode 100644
index 8f34128..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/AvroRecordReader.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AvroRecordReader<T>
-    extends org.apache.sqoop.mapreduce.AvroRecordReader<T> {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java
deleted file mode 100644
index 7fb7c22..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/CombineShimRecordReader.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.io.IOException;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.sqoop.mapreduce.CombineFileSplit;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class CombineShimRecordReader
-    extends org.apache.sqoop.mapreduce.CombineShimRecordReader {
-
-  public CombineShimRecordReader(CombineFileSplit split,
-      TaskAttemptContext context, Integer index)
-      throws IOException, InterruptedException {
-    super(split, context, index);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/DataDrivenImportJob.java b/src/java/com/cloudera/sqoop/mapreduce/DataDrivenImportJob.java
deleted file mode 100644
index 2a648a0..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/DataDrivenImportJob.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import org.apache.hadoop.mapreduce.InputFormat;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class DataDrivenImportJob
-    extends org.apache.sqoop.mapreduce.DataDrivenImportJob {
-
-  public DataDrivenImportJob(final SqoopOptions opts) {
-    super(opts);
-  }
-
-  public DataDrivenImportJob(final SqoopOptions opts,
-      final Class<? extends InputFormat> inputFormatClass,
-      ImportJobContext context) {
-    super(opts, inputFormatClass, context);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java
deleted file mode 100644
index 2994d21..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.FieldMappable;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class DelegatingOutputFormat<K extends FieldMappable, V>
-    extends org.apache.sqoop.mapreduce.DelegatingOutputFormat<K, V> {
-
-  public static final String DELEGATE_CLASS_KEY =
-      org.apache.sqoop.mapreduce.DelegatingOutputFormat.DELEGATE_CLASS_KEY;
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/ExportBatchOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/ExportBatchOutputFormat.java
deleted file mode 100644
index 030bcbe..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/ExportBatchOutputFormat.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ExportBatchOutputFormat<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.ExportBatchOutputFormat
-    <SqoopRecord, V> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/ExportInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/ExportInputFormat.java
deleted file mode 100644
index e0ad15b..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/ExportInputFormat.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import org.apache.hadoop.mapreduce.JobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ExportInputFormat
-   extends org.apache.sqoop.mapreduce.ExportInputFormat {
-
-  public static final int DEFAULT_NUM_MAP_TASKS =
-      org.apache.sqoop.mapreduce.ExportInputFormat.DEFAULT_NUM_MAP_TASKS;
-
-  public static void setNumMapTasks(JobContext job, int numTasks) {
-    org.apache.sqoop.mapreduce.ExportInputFormat.setNumMapTasks(job, numTasks);
-  }
-
-  public static int getNumMapTasks(JobContext job) {
-    return org.apache.sqoop.mapreduce.ExportInputFormat.getNumMapTasks(job);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/ExportJobBase.java b/src/java/com/cloudera/sqoop/mapreduce/ExportJobBase.java
deleted file mode 100644
index 026b5e4..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/ExportJobBase.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.io.IOException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.manager.ExportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ExportJobBase
-    extends org.apache.sqoop.mapreduce.ExportJobBase {
-
-  public static final String SQOOP_EXPORT_TABLE_CLASS_KEY =
-      org.apache.sqoop.mapreduce.ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY;
-
-  public static final String SQOOP_EXPORT_UPDATE_COL_KEY =
-      org.apache.sqoop.mapreduce.ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY;
-
-  public static final String EXPORT_MAP_TASKS_KEY =
-      org.apache.sqoop.mapreduce.ExportJobBase.EXPORT_MAP_TASKS_KEY;
-
-  public ExportJobBase(final ExportJobContext ctxt) {
-    super(ctxt);
-  }
-
-  public ExportJobBase(final ExportJobContext ctxt,
-      final Class<? extends Mapper> mapperClass,
-      final Class<? extends InputFormat> inputFormatClass,
-      final Class<? extends OutputFormat> outputFormatClass) {
-    super(ctxt, mapperClass, inputFormatClass, outputFormatClass);
-  }
-
-  public static boolean isSequenceFiles(Configuration conf, Path p)
-      throws IOException {
-    return org.apache.sqoop.mapreduce.ExportJobBase.isSequenceFiles(conf, p);
-  }
-
-  public static FileType getFileType(Configuration conf, Path p)
-      throws IOException {
-    return org.apache.sqoop.mapreduce.ExportJobBase.getFileType(conf, p);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java
deleted file mode 100644
index 2b99552..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/ExportOutputFormat.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.SqoopRecord;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import java.sql.SQLException;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ExportOutputFormat<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.ExportOutputFormat<K, V> {
-
-  /** {@inheritDoc}. **/
-  public class ExportRecordWriter<K extends SqoopRecord, V> extends
-    org.apache.sqoop.mapreduce.ExportOutputFormat<K, V>.ExportRecordWriter {
-
-    public ExportRecordWriter(TaskAttemptContext context)
-      throws ClassNotFoundException, SQLException {
-      super(context);
-    }
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/HBaseImportJob.java b/src/java/com/cloudera/sqoop/mapreduce/HBaseImportJob.java
deleted file mode 100644
index b3d8ec6..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/HBaseImportJob.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ImportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class HBaseImportJob
-    extends org.apache.sqoop.mapreduce.HBaseImportJob {
-
-  public static final Log LOG = LogFactory.getLog(
-      HBaseImportJob.class.getName());
-
-  public HBaseImportJob(final SqoopOptions opts,
-      final ImportJobContext importContext) {
-    super(opts, importContext);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/HBaseImportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/HBaseImportMapper.java
deleted file mode 100644
index 1fbc333..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/HBaseImportMapper.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class HBaseImportMapper
-    extends org.apache.sqoop.mapreduce.HBaseImportMapper {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java b/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java
deleted file mode 100644
index c9fe8e5..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/ImportJobBase.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ImportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ImportJobBase
-    extends org.apache.sqoop.mapreduce.ImportJobBase {
-
-  public ImportJobBase() {
-    super();
-  }
-
-  public ImportJobBase(final SqoopOptions opts) {
-    super(opts);
-  }
-
-  public ImportJobBase(final SqoopOptions opts,
-      final Class<? extends Mapper> mapperClass,
-      final Class<? extends InputFormat> inputFormatClass,
-      final Class<? extends OutputFormat> outputFormatClass,
-      final ImportJobContext context) {
-    super(opts, mapperClass, inputFormatClass, outputFormatClass, context);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/JdbcExportJob.java b/src/java/com/cloudera/sqoop/mapreduce/JdbcExportJob.java
deleted file mode 100644
index d0baeaf..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/JdbcExportJob.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.manager.ExportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class JdbcExportJob
-    extends org.apache.sqoop.mapreduce.JdbcExportJob {
-
-  public JdbcExportJob(final ExportJobContext context) {
-    super(context);
-  }
-
-  public JdbcExportJob(final ExportJobContext ctxt,
-      final Class<? extends Mapper> mapperClass,
-      final Class<? extends InputFormat> inputFormatClass,
-      final Class<? extends OutputFormat> outputFormatClass) {
-    super(ctxt, mapperClass, inputFormatClass, outputFormatClass);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/JdbcUpdateExportJob.java b/src/java/com/cloudera/sqoop/mapreduce/JdbcUpdateExportJob.java
deleted file mode 100644
index 5d282ba..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/JdbcUpdateExportJob.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.io.IOException;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.manager.ExportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class JdbcUpdateExportJob
-    extends org.apache.sqoop.mapreduce.JdbcUpdateExportJob {
-
-  public JdbcUpdateExportJob(final ExportJobContext context)
-      throws IOException {
-    super(context);
-  }
-
-  public JdbcUpdateExportJob(final ExportJobContext ctxt,
-      final Class<? extends Mapper> mapperClass,
-      final Class<? extends InputFormat> inputFormatClass,
-      final Class<? extends OutputFormat> outputFormatClass) {
-    super(ctxt, mapperClass, inputFormatClass, outputFormatClass);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/JdbcUpsertExportJob.java b/src/java/com/cloudera/sqoop/mapreduce/JdbcUpsertExportJob.java
deleted file mode 100644
index c23f234..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/JdbcUpsertExportJob.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.io.IOException;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.manager.ExportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class JdbcUpsertExportJob
-    extends org.apache.sqoop.mapreduce.JdbcUpsertExportJob {
-
-  public JdbcUpsertExportJob(final ExportJobContext context,
-      final Class<? extends OutputFormat> outputFormatClass)
-      throws IOException {
-    super(context, outputFormatClass);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/JobBase.java b/src/java/com/cloudera/sqoop/mapreduce/JobBase.java
deleted file mode 100644
index 974422a..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/JobBase.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.io.IOException;
-import java.sql.SQLException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.sqoop.config.ConfigurationHelper;
-import org.apache.sqoop.manager.ConnManager;
-import org.apache.sqoop.validation.ValidationContext;
-import org.apache.sqoop.validation.ValidationException;
-import org.apache.sqoop.validation.ValidationFailureHandler;
-import org.apache.sqoop.validation.ValidationThreshold;
-import org.apache.sqoop.validation.Validator;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-@Deprecated
-public class JobBase
-    extends org.apache.sqoop.mapreduce.JobBase {
-
-  public JobBase() {
-    super();
-  }
-
-  public JobBase(final SqoopOptions opts) {
-    super(opts);
-  }
-
-  public JobBase(final SqoopOptions opts,
-      final Class<? extends Mapper> mapperClass,
-      final Class<? extends InputFormat> inputFormatClass,
-      final Class<? extends OutputFormat> outputFormatClass) {
-    super(opts, mapperClass, inputFormatClass, outputFormatClass);
-  }
-
-  protected long getRowCountFromDB(ConnManager connManager, String tableName)
-    throws SQLException {
-    return connManager.getTableRowCount(tableName);
-  }
-
-  protected long getRowCountFromHadoop(Job job)
-    throws IOException, InterruptedException {
-    return ConfigurationHelper.getNumMapOutputRecords(job);
-  }
-
-  protected void doValidate(SqoopOptions options, Configuration conf,
-                            ValidationContext validationContext)
-    throws ValidationException {
-    Validator validator = (Validator) ReflectionUtils.newInstance(
-        options.getValidatorClass(), conf);
-    ValidationThreshold threshold = (ValidationThreshold)
-        ReflectionUtils.newInstance(options.getValidationThresholdClass(),
-          conf);
-    ValidationFailureHandler failureHandler = (ValidationFailureHandler)
-        ReflectionUtils.newInstance(options.getValidationFailureHandlerClass(),
-          conf);
-
-    StringBuilder sb = new StringBuilder();
-    sb.append("Validating the integrity of the import using the "
-      + "following configuration\n");
-    sb.append("\tValidator : ").append(validator.getClass().getName())
-      .append('\n');
-    sb.append("\tThreshold Specifier : ")
-      .append(threshold.getClass().getName()).append('\n');
-    sb.append("\tFailure Handler : ")
-      .append(failureHandler.getClass().getName()).append('\n');
-    LOG.info(sb.toString());
-    validator.validate(validationContext, threshold, failureHandler);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeJob.java b/src/java/com/cloudera/sqoop/mapreduce/MergeJob.java
deleted file mode 100644
index 5b21343..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MergeJob.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MergeJob
-    extends org.apache.sqoop.mapreduce.MergeJob {
-
-  public static final String MERGE_OLD_PATH_KEY =
-      org.apache.sqoop.mapreduce.MergeJob.MERGE_OLD_PATH_KEY;
-  public static final String MERGE_NEW_PATH_KEY =
-      org.apache.sqoop.mapreduce.MergeJob.MERGE_NEW_PATH_KEY;
-  public static final String MERGE_KEY_COL_KEY =
-      org.apache.sqoop.mapreduce.MergeJob.MERGE_KEY_COL_KEY;
-  public static final String MERGE_SQOOP_RECORD_KEY =
-      org.apache.sqoop.mapreduce.MergeJob.MERGE_SQOOP_RECORD_KEY;
-
-  public MergeJob(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
-
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeMapperBase.java b/src/java/com/cloudera/sqoop/mapreduce/MergeMapperBase.java
deleted file mode 100644
index 083c8dc..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MergeMapperBase.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MergeMapperBase<INKEY, INVAL>
-    extends org.apache.sqoop.mapreduce.MergeMapperBase<INKEY, INVAL> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeRecord.java b/src/java/com/cloudera/sqoop/mapreduce/MergeRecord.java
deleted file mode 100644
index 0494c05..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MergeRecord.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MergeRecord
-    extends org.apache.sqoop.mapreduce.MergeRecord {
-
-  public MergeRecord() {
-    super();
-  }
-
-  public MergeRecord(SqoopRecord sr, boolean recordIsNew) {
-    super(sr, recordIsNew);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeRecordMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MergeRecordMapper.java
deleted file mode 100644
index a9acf9e..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MergeRecordMapper.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MergeRecordMapper
-    extends org.apache.sqoop.mapreduce.MergeRecordMapper {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeReducer.java b/src/java/com/cloudera/sqoop/mapreduce/MergeReducer.java
deleted file mode 100644
index e3ecde3..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MergeReducer.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MergeReducer
-    extends org.apache.sqoop.mapreduce.MergeReducer {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MergeTextMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MergeTextMapper.java
deleted file mode 100644
index d4d7d02..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MergeTextMapper.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MergeTextMapper
-    extends org.apache.sqoop.mapreduce.MergeTextMapper {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpImportJob.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpImportJob.java
deleted file mode 100644
index a6d0de6..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpImportJob.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ImportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLDumpImportJob
-    extends org.apache.sqoop.mapreduce.MySQLDumpImportJob {
-
-  public MySQLDumpImportJob(final SqoopOptions opts, ImportJobContext context)
-      throws ClassNotFoundException {
-    super(opts, context);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java
deleted file mode 100644
index 066e4e0..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpInputFormat.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLDumpInputFormat
-    extends org.apache.sqoop.mapreduce.MySQLDumpInputFormat {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpMapper.java
deleted file mode 100644
index b6fe39f..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MySQLDumpMapper.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import org.apache.hadoop.conf.Configuration;
-import com.cloudera.sqoop.util.PerfCounters;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLDumpMapper
-    extends org.apache.sqoop.mapreduce.MySQLDumpMapper {
-
-  /**
-   * @deprecated Moving to use org.apache.sqoop namespace.
-   */
-  public static class CopyingAsyncSink
-      extends org.apache.sqoop.mapreduce.MySQLDumpMapper.CopyingAsyncSink {
-
-    protected CopyingAsyncSink(final MySQLDumpMapper.Context context,
-        final PerfCounters ctrs) {
-      super(context, ctrs);
-    }
-
-  }
-
-  /**
-   * @deprecated Moving to use org.apache.sqoop namespace.
-   */
-  public static class ReparsingAsyncSink
-      extends org.apache.sqoop.mapreduce.MySQLDumpMapper.ReparsingAsyncSink {
-
-    protected ReparsingAsyncSink(final MySQLDumpMapper.Context c,
-        final Configuration conf, final PerfCounters ctrs) {
-      super(c, conf, ctrs);
-    }
-
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportJob.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLExportJob.java
deleted file mode 100644
index 2eb8442..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportJob.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.manager.ExportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLExportJob
-    extends org.apache.sqoop.mapreduce.MySQLExportJob {
-
-  public MySQLExportJob(final ExportJobContext context) {
-    super(context);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLExportMapper.java
deleted file mode 100644
index 30046dd..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MySQLExportMapper.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLExportMapper<KEYIN, VALIN>
-    extends org.apache.sqoop.mapreduce.MySQLExportMapper<KEYIN, VALIN> {
-
-  public static final String MYSQL_CHECKPOINT_BYTES_KEY =
-      org.apache.sqoop.mapreduce.MySQLExportMapper.MYSQL_CHECKPOINT_BYTES_KEY;
-
-  public static final long DEFAULT_CHECKPOINT_BYTES =
-      org.apache.sqoop.mapreduce.MySQLExportMapper.DEFAULT_CHECKPOINT_BYTES;
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLRecordExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLRecordExportMapper.java
deleted file mode 100644
index 08751d2..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MySQLRecordExportMapper.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLRecordExportMapper
-    extends org.apache.sqoop.mapreduce.MySQLRecordExportMapper {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/MySQLTextExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/MySQLTextExportMapper.java
deleted file mode 100644
index 95517e2..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/MySQLTextExportMapper.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MySQLTextExportMapper
-    extends org.apache.sqoop.mapreduce.MySQLTextExportMapper {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/NullOutputCommitter.java b/src/java/com/cloudera/sqoop/mapreduce/NullOutputCommitter.java
deleted file mode 100644
index c3c9b39..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/NullOutputCommitter.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class NullOutputCommitter
-    extends org.apache.sqoop.mapreduce.NullOutputCommitter {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java
deleted file mode 100644
index 8ae10d7..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/OracleExportOutputFormat.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class OracleExportOutputFormat<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.OracleExportOutputFormat<K, V> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/OracleUpsertOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/OracleUpsertOutputFormat.java
deleted file mode 100644
index aa20e7f..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/OracleUpsertOutputFormat.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class OracleUpsertOutputFormat<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.OracleUpsertOutputFormat<K, V> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java
deleted file mode 100644
index 2e9518a..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/RawKeyTextOutputFormat.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import java.io.DataOutputStream;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class RawKeyTextOutputFormat<K, V>
-    extends org.apache.sqoop.mapreduce.RawKeyTextOutputFormat<K, V> {
-
-  /**
-   * @deprecated Moving to use org.apache.sqoop namespace.
-   */
-  public static class RawKeyRecordWriter<K, V>
-      extends org.apache.sqoop.mapreduce.RawKeyTextOutputFormat.
-      RawKeyRecordWriter<K, V> {
-
-    public RawKeyRecordWriter(DataOutputStream out) {
-      super(out);
-    }
-
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/SQLServerExportOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/SQLServerExportOutputFormat.java
deleted file mode 100644
index f7841ca..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/SQLServerExportOutputFormat.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class SQLServerExportOutputFormat<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.SQLServerExportOutputFormat<K, V> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/SequenceFileExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/SequenceFileExportMapper.java
deleted file mode 100644
index 3b5b8b7..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/SequenceFileExportMapper.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class SequenceFileExportMapper
-    extends org.apache.sqoop.mapreduce.SequenceFileExportMapper {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/SequenceFileImportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/SequenceFileImportMapper.java
deleted file mode 100644
index 152a942..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/SequenceFileImportMapper.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class SequenceFileImportMapper
-    extends org.apache.sqoop.mapreduce.SequenceFileImportMapper {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/TextExportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/TextExportMapper.java
deleted file mode 100644
index 742feb6..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/TextExportMapper.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class TextExportMapper
-    extends org.apache.sqoop.mapreduce.TextExportMapper {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/TextImportMapper.java b/src/java/com/cloudera/sqoop/mapreduce/TextImportMapper.java
deleted file mode 100644
index 0479297..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/TextImportMapper.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class TextImportMapper
-    extends org.apache.sqoop.mapreduce.TextImportMapper {
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java
deleted file mode 100644
index fb47f9d..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/UpdateOutputFormat.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.mapreduce;
-
-import com.cloudera.sqoop.lib.SqoopRecord;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class UpdateOutputFormat<K extends SqoopRecord, V>
-    extends org.apache.sqoop.mapreduce.UpdateOutputFormat<K, V> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/BigDecimalSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/BigDecimalSplitter.java
deleted file mode 100644
index 8d3505a..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/BigDecimalSplitter.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-/**
- * Implement DBSplitter over BigDecimal values.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.BigDecimalSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.BigDecimalSplitter
- */
-public class BigDecimalSplitter
-  extends org.apache.sqoop.mapreduce.db.BigDecimalSplitter {
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/BooleanSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/BooleanSplitter.java
deleted file mode 100644
index da5ef85..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/BooleanSplitter.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-/**
- * Implement DBSplitter over boolean values.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.BooleanSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.BooleanSplitter
- */
-public class BooleanSplitter
-  extends org.apache.sqoop.mapreduce.db.BooleanSplitter {
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DBConfiguration.java b/src/java/com/cloudera/sqoop/mapreduce/db/DBConfiguration.java
deleted file mode 100644
index 89f2b4f..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DBConfiguration.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * A container for configuration property names for jobs with DB input/output.
- *
- * The job can be configured using the static methods in this class,
- * {@link DBInputFormat}, and {@link DBOutputFormat}.
- * Alternatively, the properties can be set in the configuration with proper
- * values.
- *
- * @see DBConfiguration#configureDB(Configuration, String, String, String,
- * String)
- * @see DBInputFormat#setInput(Job, Class, String, String)
- * @see DBInputFormat#setInput(Job, Class, String, String, String, String...)
- * @see DBOutputFormat#setOutput(Job, String, String...)
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DBConfiguration instead.
- * @see org.apache.sqoop.mapreduce.db.DBConfiguration
- */
-public class DBConfiguration
-  extends org.apache.sqoop.mapreduce.db.DBConfiguration {
-
-  /** The JDBC Driver class name. */
-  public static final String DRIVER_CLASS_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.DRIVER_CLASS_PROPERTY;
-
-  /** JDBC Database access URL. */
-  public static final String URL_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.URL_PROPERTY;
-
-  /** User name to access the database. */
-  public static final String USERNAME_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.USERNAME_PROPERTY;
-
-  /** Password to access the database. */
-  public static final String PASSWORD_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.PASSWORD_PROPERTY;
-
-  /** Fetch size. */
-  public static final String FETCH_SIZE =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.FETCH_SIZE;
-
-  /** Input table name. */
-  public static final String INPUT_TABLE_NAME_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_TABLE_NAME_PROPERTY;
-
-  /** Field names in the Input table. */
-  public static final String INPUT_FIELD_NAMES_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_FIELD_NAMES_PROPERTY;
-
-  /** WHERE clause in the input SELECT statement. */
-  public static final String INPUT_CONDITIONS_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_CONDITIONS_PROPERTY;
-
-  /** ORDER BY clause in the input SELECT statement. */
-  public static final String INPUT_ORDER_BY_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_ORDER_BY_PROPERTY;
-
-  /** Whole input query, exluding LIMIT...OFFSET. */
-  public static final String INPUT_QUERY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_QUERY;
-
-  /** Input query to get the count of records. */
-  public static final String INPUT_COUNT_QUERY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_COUNT_QUERY;
-
-  /** Input query to get the max and min values of the jdbc.input.query. */
-  public static final String INPUT_BOUNDING_QUERY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_BOUNDING_QUERY;
-
-  /** Class name implementing DBWritable which will hold input tuples. */
-  public static final String INPUT_CLASS_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.INPUT_CLASS_PROPERTY;
-
-  /** Output table name. */
-  public static final String OUTPUT_TABLE_NAME_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY;
-
-  /** Field names in the Output table. */
-  public static final String OUTPUT_FIELD_NAMES_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY;
-
-  /** Number of fields in the Output table. */
-  public static final String OUTPUT_FIELD_COUNT_PROPERTY =
-      org.apache.sqoop.mapreduce.db.DBConfiguration.OUTPUT_FIELD_COUNT_PROPERTY;
-
-  /**
-   * Sets the DB access related fields in the {@link Configuration}.
-   * @param conf the configuration
-   * @param driverClass JDBC Driver class name
-   * @param dbUrl JDBC DB access URL
-   * @param userName DB access username
-   * @param passwd DB access passwd
-   * @param fetchSize DB fetch size
-   */
-  public static void configureDB(Configuration conf, String driverClass,
-      String dbUrl, String userName, String passwd, Integer fetchSize) {
-
-    org.apache.sqoop.mapreduce.db.DBConfiguration.configureDB(
-        conf, driverClass, dbUrl, userName, passwd, fetchSize);
-  }
-
-  /**
-   * Sets the DB access related fields in the JobConf.
-   * @param job the job
-   * @param driverClass JDBC Driver class name
-   * @param dbUrl JDBC DB access URL
-   * @param fetchSize DB fetch size
-   */
-  public static void configureDB(Configuration job, String driverClass,
-      String dbUrl, Integer fetchSize) {
-    org.apache.sqoop.mapreduce.db.DBConfiguration.configureDB(job, driverClass,
-        dbUrl, fetchSize);
-  }
-
-  /**
-   * Sets the DB access related fields in the {@link Configuration}.
-   * @param conf the configuration
-   * @param driverClass JDBC Driver class name
-   * @param dbUrl JDBC DB access URL
-   * @param userName DB access username
-   * @param passwd DB access passwd
-   */
-  public static void configureDB(Configuration conf, String driverClass,
-      String dbUrl, String userName, String passwd) {
-    org.apache.sqoop.mapreduce.db.DBConfiguration.configureDB(conf, driverClass,
-        dbUrl, userName, passwd);
-  }
-
-  /**
-   * Sets the DB access related fields in the JobConf.
-   * @param job the job
-   * @param driverClass JDBC Driver class name
-   * @param dbUrl JDBC DB access URL.
-   */
-  public static void configureDB(Configuration job, String driverClass,
-      String dbUrl) {
-    org.apache.sqoop.mapreduce.db.DBConfiguration.configureDB(job, driverClass,
-        dbUrl);
-  }
-
-  public DBConfiguration(Configuration job) {
-    super(job);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DBInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/db/DBInputFormat.java
deleted file mode 100644
index 90c1b24..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DBInputFormat.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A InputFormat that reads input data from an SQL table.
- * <p>
- * DBInputFormat emits LongWritables containing the record number as
- * key and DBWritables as value.
- *
- * The SQL query, and input class can be using one of the two
- * setInput methods.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DBInputFormat instead.
- * @see org.apache.sqoop.mapreduce.db.DBInputFormat
- */
-public class DBInputFormat<T extends DBWritable>
-    extends org.apache.sqoop.mapreduce.db.DBInputFormat<T> {
-
-  /**
-   * A Class that does nothing, implementing DBWritable.
-   * @deprecated use org.apache.sqoop.mapreduce.db.DBInputFormat.NullDBWritable
-   *   instead.
-   * @see org.apache.sqoop.mapreduce.db.DBInputFormat.NullDBWritable
-   */
-  public static class NullDBWritable
-    extends org.apache.sqoop.mapreduce.db.DBInputFormat.NullDBWritable {
-  }
-
-  /**
-   * A InputSplit that spans a set of rows.
-   *
-   * @deprecated use org.apache.sqoop.mapreduce.db.DBInputFormat.DBInputSplit
-   *   instead.
-   * @see org.apache.sqoop.mapreduce.db.DBInputFormat.DBInputSplit
-   */
-  public static class DBInputSplit extends
-    org.apache.sqoop.mapreduce.db.DBInputFormat.DBInputSplit {
-
-    /**
-     * Default Constructor.
-     */
-    public DBInputSplit() {
-      super();
-    }
-
-    /**
-     * Convenience Constructor.
-     * @param start the index of the first row to select
-     * @param end the index of the last row to select
-     */
-    public DBInputSplit(long start, long end) {
-      super(start, end);
-    }
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java
deleted file mode 100644
index e68adb9..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A OutputFormat that sends the reduce output to a SQL table.
- * <p>
- * {@link DBOutputFormat} accepts &lt;key,value&gt; pairs, where
- * key has a type extending DBWritable. Returned {@link RecordWriter}
- * writes <b>only the key</b> to the database with a batch SQL query.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DBoutputFormat instead.
- * @see org.apache.sqoop.mapreduce.db.DBOutputFormat
- */
-public class DBOutputFormat<K extends DBWritable, V>
-    extends org.apache.sqoop.mapreduce.db.DBOutputFormat<K, V> {
-
-  /**
-   * A RecordWriter that writes the reduce output to a SQL table.
-   *
-   * @deprecated use
-   *   org.apache.sqoop.mapreduce.db.DBOutputFormat.DBRecordWriter instead.
-   * @see org.apache.sqoop.mapreduce.db.DBOutputFormat.DBRecordWriter
-   */
-  public static class DBRecordWriter<K extends DBWritable, V> extends
-    org.apache.sqoop.mapreduce.db.DBOutputFormat.DBRecordWriter<K, V> {
-
-    public DBRecordWriter() throws SQLException {
-      super();
-    }
-
-    public DBRecordWriter(Connection connection,
-        PreparedStatement statement) throws SQLException {
-      super(connection, statement);
-    }
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DBRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/db/DBRecordReader.java
deleted file mode 100644
index 2f6a897..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DBRecordReader.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A RecordReader that reads records from a SQL table.
- * Emits LongWritables containing the record number as
- * key and DBWritables as value.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DBRecordReader instead.
- * @see org.apache.sqoop.mapreduce.db.DBRecordReader
- */
-public class DBRecordReader<T extends DBWritable> extends
-    org.apache.sqoop.mapreduce.db.DBRecordReader<T> {
-
-  /**
-   * @param split The InputSplit to read data for
-   * @throws SQLException
-   */
-  // CHECKSTYLE:OFF
-  // TODO (aaron): Refactor constructor to take fewer arguments
-  public DBRecordReader(DBInputFormat.DBInputSplit split,
-      Class<T> inputClass, Configuration conf, Connection conn,
-      DBConfiguration dbConfig, String cond, String [] fields, String table)
-      throws SQLException {
-    super(split, inputClass, conf, conn, dbConfig, cond, fields, table);
-  }
-  // CHECKSTYLE:ON
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DBSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/DBSplitter.java
deleted file mode 100644
index 6abeeba..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DBSplitter.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-/**
- * DBSplitter will generate DBInputSplits to use with DataDrivenDBInputFormat.
- * DataDrivenDBInputFormat needs to interpolate between two values that
- * represent the lowest and highest valued records to import. Depending
- * on the data-type of the column, this requires different behavior.
- * DBSplitter implementations should perform this for a data type or family
- * of data types.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DBSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.DBSplitter
- */
-public interface DBSplitter extends org.apache.sqoop.mapreduce.db.DBSplitter {
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java
deleted file mode 100644
index 4cdb218..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A InputFormat that reads input data from an SQL table.
- * Operates like DBInputFormat, but instead of using LIMIT and OFFSET to
- * demarcate splits, it tries to generate WHERE clauses which separate the
- * data into roughly equivalent shards.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat instead
- * @see org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat
- */
-public class DataDrivenDBInputFormat<T extends DBWritable>
-    extends org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat<T> {
-
-  /**
-   * If users are providing their own query, the following string is expected
-   * to appear in the WHERE clause, which will be substituted with a pair of
-   * conditions on the input to allow input splits to parallelise the import.
-   */
-  public static final String SUBSTITUTE_TOKEN =
-      org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.SUBSTITUTE_TOKEN;
-
-  /**
-   * A InputSplit that spans a set of rows.
-   *
-   * @deprecated use org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.
-   *   DataDrivenDBInputSplit instead.
-   * @see org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.
-   *      DataDrivenDBInputSplit
-   */
-  public static class DataDrivenDBInputSplit extends
-  org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.DataDrivenDBInputSplit {
-
-    /**
-     * Default Constructor.
-     */
-    public DataDrivenDBInputSplit() {
-      super();
-    }
-
-    /**
-     * Convenience Constructor.
-     * @param lower the string to be put in the WHERE clause to guard
-     * on the 'lower' end.
-     * @param upper the string to be put in the WHERE clause to guard
-     * on the 'upper' end.
-     */
-    public DataDrivenDBInputSplit(final String lower, final String upper) {
-      super(lower, upper);
-    }
-  }
-
-
-  /** Set the user-defined bounding query to use with a user-defined query.
-      This *must* include the substring "$CONDITIONS"
-      (DataDrivenDBInputFormat.SUBSTITUTE_TOKEN) inside the WHERE clause,
-      so that DataDrivenDBInputFormat knows where to insert split clauses.
-      e.g., "SELECT foo FROM mytable WHERE $CONDITIONS"
-      This will be expanded to something like:
-        SELECT foo FROM mytable WHERE (id &gt; 100) AND (id &lt; 250)
-      inside each split.
-    */
-  public static void setBoundingQuery(Configuration conf, String query) {
-    org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.setBoundingQuery(
-        conf, query);
-  }
-
-  // Configuration methods override superclass to ensure that the proper
-  // DataDrivenDBInputFormat gets used.
-
-  /** Note that the "orderBy" column is called the "splitBy" in this version.
-    * We reuse the same field, but it's not strictly ordering it
-    * -- just partitioning the results.
-    */
-  public static void setInput(Job job,
-      Class<? extends DBWritable> inputClass,
-      String tableName, String conditions,
-      String splitBy, String... fieldNames) {
-    org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.setInput(
-        job, inputClass, tableName, conditions, splitBy, fieldNames);
-  }
-
-  /** setInput() takes a custom query and a separate "bounding query" to use
-      instead of the custom "count query" used by DBInputFormat.
-    */
-  public static void setInput(Job job,
-      Class<? extends DBWritable> inputClass,
-      String inputQuery, String inputBoundingQuery) {
-    org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.setInput(
-        job, inputClass, inputQuery, inputBoundingQuery);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBRecordReader.java
deleted file mode 100644
index 9563444..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBRecordReader.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A RecordReader that reads records from a SQL table,
- * using data-driven WHERE clause splits.
- * Emits LongWritables containing the record number as
- * key and DBWritables as value.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DataDrivenDBRecordReader
- *   instead.
- * @see org.apache.sqoop.mapreduce.db.DataDrivenDBRecordReader
- */
-public class DataDrivenDBRecordReader<T extends DBWritable>
-    extends org.apache.sqoop.mapreduce.db.DataDrivenDBRecordReader<T> {
-
-  // CHECKSTYLE:OFF
-  // TODO(aaron): Refactor constructor to use fewer arguments.
-  /**
-   * @param split The InputSplit to read data for
-   * @throws SQLException
-   */
-  public DataDrivenDBRecordReader(DBInputFormat.DBInputSplit split,
-      Class<T> inputClass, Configuration conf, Connection conn,
-      DBConfiguration dbConfig, String cond, String [] fields, String table,
-      String dbProduct) throws SQLException {
-    super(split, inputClass, conf, conn, dbConfig,
-        cond, fields, table, dbProduct);
-  }
-  // CHECKSTYLE:ON
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/DateSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/DateSplitter.java
deleted file mode 100644
index 2e4a2ba..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/DateSplitter.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-/**
- * Implement DBSplitter over date/time values.
- * Make use of logic from IntegerSplitter, since date/time are just longs
- * in Java.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.DateSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.DateSplitter
- */
-public class DateSplitter extends org.apache.sqoop.mapreduce.db.DateSplitter {
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/FloatSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/FloatSplitter.java
deleted file mode 100644
index 0c7bc8c..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/FloatSplitter.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-/**
- * Implement DBSplitter over floating-point values.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.FloatSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.FloatSplitter
- */
-public class FloatSplitter
-  extends org.apache.sqoop.mapreduce.db.FloatSplitter {
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/IntegerSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/IntegerSplitter.java
deleted file mode 100644
index f042833..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/IntegerSplitter.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import org.apache.commons.logging.Log;
-
-/**
- * Implement DBSplitter over integer values.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.IntegerSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.IntegerSplitter
- */
-public class IntegerSplitter
-  extends org.apache.sqoop.mapreduce.db.IntegerSplitter {
-  public static final Log LOG =
-      org.apache.sqoop.mapreduce.db.IntegerSplitter.LOG;
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDBRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/db/OracleDBRecordReader.java
deleted file mode 100644
index 53f30d5..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDBRecordReader.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A RecordReader that reads records from an Oracle SQL table.
- * @deprecated use org.apache.sqoop.mapreduce.db.OracleDBRecordReader instead.
- * @see org.apache.sqoop.mapreduce.db.OracleDBRecordReader
- */
-public class OracleDBRecordReader<T extends DBWritable> extends
-  org.apache.sqoop.mapreduce.db.OracleDBRecordReader<T> {
-
-  /** Configuration key to set to a timezone string. */
-  public static final String SESSION_TIMEZONE_KEY =
-      org.apache.sqoop.mapreduce.db.OracleDBRecordReader.SESSION_TIMEZONE_KEY;
-
-  // CHECKSTYLE:OFF
-  public OracleDBRecordReader(DBInputFormat.DBInputSplit split,
-      Class<T> inputClass, Configuration conf, Connection conn,
-      DBConfiguration dbConfig, String cond, String [] fields,
-      String table) throws SQLException {
-    super(split, inputClass, conf, conn, dbConfig, cond, fields, table);
-  }
-  // CHECKSTYLE:ON
-
-  /**
-   * Set session time zone.
-   * @param conf The current configuration.
-   * We read the 'oracle.sessionTimeZone' property from here.
-   * @param conn The connection to alter the timezone properties of.
-   */
-  public static void setSessionTimeZone(Configuration conf,
-      Connection conn) throws SQLException {
-    org.apache.sqoop.mapreduce.db.OracleDBRecordReader.setSessionTimeZone(
-        conf, conn);
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDataDrivenDBInputFormat.java b/src/java/com/cloudera/sqoop/mapreduce/db/OracleDataDrivenDBInputFormat.java
deleted file mode 100644
index b8cb8f7..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDataDrivenDBInputFormat.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A InputFormat that reads input data from an SQL table in an Oracle db.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat
- *   instead.
- * @see org.apache.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat
- */
-public class OracleDataDrivenDBInputFormat<T extends DBWritable>
-    extends org.apache.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat<T> {
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDataDrivenDBRecordReader.java b/src/java/com/cloudera/sqoop/mapreduce/db/OracleDataDrivenDBRecordReader.java
deleted file mode 100644
index 228d627..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDataDrivenDBRecordReader.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sqoop.mapreduce.DBWritable;
-
-/**
- * A RecordReader that reads records from a Oracle table
- * via DataDrivenDBRecordReader.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.OracleDataDrivenDBRecordReader
- *   instead.
- * @see org.apache.sqoop.mapreduce.db.OracleDataDrivenDBRecordReader
- */
-public class OracleDataDrivenDBRecordReader<T extends DBWritable>
-    extends org.apache.sqoop.mapreduce.db.OracleDataDrivenDBRecordReader<T> {
-
-  // CHECKSTYLE:OFF
-  // TODO(aaron): Enable checkstyle after refactoring DBRecordReader c'tor.
-  public OracleDataDrivenDBRecordReader(DBInputFormat.DBInputSplit split,
-      Class<T> inputClass, Configuration conf, Connection conn,
-      DBConfiguration dbConfig, String cond, String [] fields,
-      String table) throws SQLException {
-
-    super(split, inputClass, conf, conn, dbConfig, cond, fields, table);
-  }
-  // CHECKSTYLE:ON
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDateSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/OracleDateSplitter.java
deleted file mode 100644
index 3e1af33..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/OracleDateSplitter.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-/**
- * Implement DBSplitter over date/time values returned by an Oracle db.
- * Make use of logic from DateSplitter, since this just needs to use
- * some Oracle-specific functions on the formatting end when generating
- * InputSplits.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.OracleDateSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.OracleDateSplitter
- */
-public class OracleDateSplitter
-    extends org.apache.sqoop.mapreduce.db.OracleDateSplitter {
-
-}
diff --git a/src/java/com/cloudera/sqoop/mapreduce/db/TextSplitter.java b/src/java/com/cloudera/sqoop/mapreduce/db/TextSplitter.java
deleted file mode 100644
index f1bd9d1..0000000
--- a/src/java/com/cloudera/sqoop/mapreduce/db/TextSplitter.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-/**
- * Implement DBSplitter over text strings.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.TextSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.TextSplitter
- */
-public class TextSplitter extends org.apache.sqoop.mapreduce.db.TextSplitter {
-
-}
diff --git a/src/java/com/cloudera/sqoop/metastore/GenericJobStorage.java b/src/java/com/cloudera/sqoop/metastore/GenericJobStorage.java
deleted file mode 100644
index d42e5a3..0000000
--- a/src/java/com/cloudera/sqoop/metastore/GenericJobStorage.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.metastore;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class GenericJobStorage
-    extends org.apache.sqoop.metastore.GenericJobStorage {
-
-  public static final String META_CONNECT_KEY =
-      org.apache.sqoop.metastore.GenericJobStorage.META_CONNECT_KEY;
-  public static final String META_USERNAME_KEY =
-      org.apache.sqoop.metastore.GenericJobStorage.META_USERNAME_KEY;
-  public static final String META_PASSWORD_KEY =
-      org.apache.sqoop.metastore.GenericJobStorage.META_PASSWORD_KEY;
-  public static final String ROOT_TABLE_NAME_KEY =
-      org.apache.sqoop.metastore.GenericJobStorage.ROOT_TABLE_NAME_KEY;
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/metastore/JobData.java b/src/java/com/cloudera/sqoop/metastore/JobData.java
deleted file mode 100644
index 94194d7..0000000
--- a/src/java/com/cloudera/sqoop/metastore/JobData.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.metastore;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.SqoopTool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class JobData
-    extends org.apache.sqoop.metastore.JobData {
-
-  public JobData() {
-    super();
-  }
-
-  public JobData(SqoopOptions options, SqoopTool sqoopTool) {
-    super(options, sqoopTool);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/metastore/JobStorage.java b/src/java/com/cloudera/sqoop/metastore/JobStorage.java
deleted file mode 100644
index bbc6aea..0000000
--- a/src/java/com/cloudera/sqoop/metastore/JobStorage.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.metastore;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class JobStorage
-    extends org.apache.sqoop.metastore.JobStorage {
-}
-
diff --git a/src/java/com/cloudera/sqoop/metastore/JobStorageFactory.java b/src/java/com/cloudera/sqoop/metastore/JobStorageFactory.java
deleted file mode 100644
index 3809e6b..0000000
--- a/src/java/com/cloudera/sqoop/metastore/JobStorageFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.metastore;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class JobStorageFactory
-    extends org.apache.sqoop.metastore.JobStorageFactory {
-
-  public static final String AVAILABLE_STORAGES_KEY =
-          org.apache.sqoop.metastore.JobStorageFactory.AVAILABLE_STORAGES_KEY;
-
-  public JobStorageFactory(Configuration config) {
-    super(config);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/metastore/hsqldb/HsqldbMetaStore.java b/src/java/com/cloudera/sqoop/metastore/hsqldb/HsqldbMetaStore.java
deleted file mode 100644
index 945c11a..0000000
--- a/src/java/com/cloudera/sqoop/metastore/hsqldb/HsqldbMetaStore.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package com.cloudera.sqoop.metastore.hsqldb;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class HsqldbMetaStore
-    extends org.apache.sqoop.metastore.hsqldb.HsqldbMetaStore {
-
-  public static final String META_STORAGE_LOCATION_KEY =
-    org.apache.sqoop.metastore.hsqldb.HsqldbMetaStore.META_STORAGE_LOCATION_KEY;
-  public static final String META_SERVER_PORT_KEY =
-    org.apache.sqoop.metastore.hsqldb.HsqldbMetaStore.META_SERVER_PORT_KEY;
-  public static final int DEFAULT_PORT =
-    org.apache.sqoop.metastore.hsqldb.HsqldbMetaStore.DEFAULT_PORT;
-
-  public HsqldbMetaStore(Configuration config) {
-    super(config);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/orm/AvroSchemaGenerator.java b/src/java/com/cloudera/sqoop/orm/AvroSchemaGenerator.java
deleted file mode 100644
index 8ecee80..0000000
--- a/src/java/com/cloudera/sqoop/orm/AvroSchemaGenerator.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.orm;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AvroSchemaGenerator
-    extends org.apache.sqoop.orm.AvroSchemaGenerator {
-
-  public AvroSchemaGenerator(final SqoopOptions opts, final ConnManager connMgr,
-      final String table) {
-    super(opts, connMgr, table);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/orm/ClassWriter.java b/src/java/com/cloudera/sqoop/orm/ClassWriter.java
deleted file mode 100644
index 80141bf..0000000
--- a/src/java/com/cloudera/sqoop/orm/ClassWriter.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.orm;
-
-import java.util.HashSet;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ClassWriter
-    extends org.apache.sqoop.orm.ClassWriter {
-
-  public static final HashSet<String> JAVA_RESERVED_WORDS;
-
-  static {
-    JAVA_RESERVED_WORDS = new HashSet<String>();
-
-    JAVA_RESERVED_WORDS.add("abstract");
-    JAVA_RESERVED_WORDS.add("assert");
-    JAVA_RESERVED_WORDS.add("boolean");
-    JAVA_RESERVED_WORDS.add("break");
-    JAVA_RESERVED_WORDS.add("byte");
-    JAVA_RESERVED_WORDS.add("case");
-    JAVA_RESERVED_WORDS.add("catch");
-    JAVA_RESERVED_WORDS.add("char");
-    JAVA_RESERVED_WORDS.add("class");
-    JAVA_RESERVED_WORDS.add("const");
-    JAVA_RESERVED_WORDS.add("continue");
-    JAVA_RESERVED_WORDS.add("default");
-    JAVA_RESERVED_WORDS.add("do");
-    JAVA_RESERVED_WORDS.add("double");
-    JAVA_RESERVED_WORDS.add("else");
-    JAVA_RESERVED_WORDS.add("enum");
-    JAVA_RESERVED_WORDS.add("extends");
-    JAVA_RESERVED_WORDS.add("false");
-    JAVA_RESERVED_WORDS.add("final");
-    JAVA_RESERVED_WORDS.add("finally");
-    JAVA_RESERVED_WORDS.add("float");
-    JAVA_RESERVED_WORDS.add("for");
-    JAVA_RESERVED_WORDS.add("goto");
-    JAVA_RESERVED_WORDS.add("if");
-    JAVA_RESERVED_WORDS.add("implements");
-    JAVA_RESERVED_WORDS.add("import");
-    JAVA_RESERVED_WORDS.add("instanceof");
-    JAVA_RESERVED_WORDS.add("int");
-    JAVA_RESERVED_WORDS.add("interface");
-    JAVA_RESERVED_WORDS.add("long");
-    JAVA_RESERVED_WORDS.add("native");
-    JAVA_RESERVED_WORDS.add("new");
-    JAVA_RESERVED_WORDS.add("null");
-    JAVA_RESERVED_WORDS.add("package");
-    JAVA_RESERVED_WORDS.add("private");
-    JAVA_RESERVED_WORDS.add("protected");
-    JAVA_RESERVED_WORDS.add("public");
-    JAVA_RESERVED_WORDS.add("return");
-    JAVA_RESERVED_WORDS.add("short");
-    JAVA_RESERVED_WORDS.add("static");
-    JAVA_RESERVED_WORDS.add("strictfp");
-    JAVA_RESERVED_WORDS.add("super");
-    JAVA_RESERVED_WORDS.add("switch");
-    JAVA_RESERVED_WORDS.add("synchronized");
-    JAVA_RESERVED_WORDS.add("this");
-    JAVA_RESERVED_WORDS.add("throw");
-    JAVA_RESERVED_WORDS.add("throws");
-    JAVA_RESERVED_WORDS.add("transient");
-    JAVA_RESERVED_WORDS.add("true");
-    JAVA_RESERVED_WORDS.add("try");
-    JAVA_RESERVED_WORDS.add("void");
-    JAVA_RESERVED_WORDS.add("volatile");
-    JAVA_RESERVED_WORDS.add("while");
-  }
-
-  public static final int CLASS_WRITER_VERSION =
-    org.apache.sqoop.orm.ClassWriter.CLASS_WRITER_VERSION;
-
-  public static String toIdentifier(String candidate) {
-    return org.apache.sqoop.orm.ClassWriter.toIdentifier(candidate);
-  }
-
-  public static String toJavaIdentifier(String candidate) {
-    return org.apache.sqoop.orm.ClassWriter.toJavaIdentifier(candidate);
-  }
-
-  public static String getIdentifierStrForChar(char c) {
-    return org.apache.sqoop.orm.ClassWriter.getIdentifierStrForChar(c);
-  }
-
-  public ClassWriter(final SqoopOptions opts, final ConnManager connMgr,
-      final String table, final CompilationManager compMgr) {
-    super(opts, connMgr, table, compMgr);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/orm/CompilationManager.java b/src/java/com/cloudera/sqoop/orm/CompilationManager.java
deleted file mode 100644
index 152d8a4..0000000
--- a/src/java/com/cloudera/sqoop/orm/CompilationManager.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.orm;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class CompilationManager
-    extends org.apache.sqoop.orm.CompilationManager {
-
-  public static final String DEFAULT_CODEGEN_JAR_NAME =
-    org.apache.sqoop.orm.CompilationManager.DEFAULT_CODEGEN_JAR_NAME;
-
-  public CompilationManager(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/orm/TableClassName.java b/src/java/com/cloudera/sqoop/orm/TableClassName.java
deleted file mode 100644
index 20c82af..0000000
--- a/src/java/com/cloudera/sqoop/orm/TableClassName.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.orm;
-
-import com.cloudera.sqoop.SqoopOptions;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class TableClassName
-    extends org.apache.sqoop.orm.TableClassName {
-
-  public TableClassName(final SqoopOptions opts) {
-    super(opts);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/tool/BaseSqoopTool.java b/src/java/com/cloudera/sqoop/tool/BaseSqoopTool.java
deleted file mode 100644
index 891ed4d..0000000
--- a/src/java/com/cloudera/sqoop/tool/BaseSqoopTool.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-import com.cloudera.sqoop.SqoopOptions;
-import org.apache.sqoop.manager.SupportedManagers;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class BaseSqoopTool
-    extends org.apache.sqoop.tool.BaseSqoopTool {
-
-  public static final String HELP_STR =
-          org.apache.sqoop.tool.BaseSqoopTool.HELP_STR;
-  public static final String CONNECT_STRING_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.CONNECT_STRING_ARG;
-  public static final String CONN_MANAGER_CLASS_NAME =
-          org.apache.sqoop.tool.BaseSqoopTool.CONN_MANAGER_CLASS_NAME;
-  public static final String CONNECT_PARAM_FILE =
-          org.apache.sqoop.tool.BaseSqoopTool.CONNECT_PARAM_FILE;
-  public static final String DRIVER_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.DRIVER_ARG;
-  public static final String USERNAME_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.USERNAME_ARG;
-  public static final String PASSWORD_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.PASSWORD_ARG;
-  public static final String PASSWORD_PROMPT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.PASSWORD_PROMPT_ARG;
-  public static final String DIRECT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.DIRECT_ARG;
-  public static final String BATCH_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.BATCH_ARG;
-  public static final String TABLE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.TABLE_ARG;
-  public static final String STAGING_TABLE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.STAGING_TABLE_ARG;
-  public static final String CLEAR_STAGING_TABLE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.CLEAR_STAGING_TABLE_ARG;
-  public static final String COLUMNS_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.COLUMNS_ARG;
-  public static final String SPLIT_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.SPLIT_BY_ARG;
-  public static final String WHERE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.WHERE_ARG;
-  public static final String HADOOP_HOME_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HADOOP_HOME_ARG;
-  public static final String HIVE_HOME_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_HOME_ARG;
-  public static final String WAREHOUSE_DIR_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.WAREHOUSE_DIR_ARG;
-  public static final String TARGET_DIR_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.TARGET_DIR_ARG;
-  public static final String APPEND_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.APPEND_ARG;
-  public static final String NULL_STRING =
-          org.apache.sqoop.tool.BaseSqoopTool.NULL_STRING;
-  public static final String INPUT_NULL_STRING =
-          org.apache.sqoop.tool.BaseSqoopTool.INPUT_NULL_STRING;
-  public static final String NULL_NON_STRING =
-          org.apache.sqoop.tool.BaseSqoopTool.NULL_NON_STRING;
-  public static final String INPUT_NULL_NON_STRING =
-          org.apache.sqoop.tool.BaseSqoopTool.INPUT_NULL_NON_STRING;
-  public static final String MAP_COLUMN_JAVA =
-          org.apache.sqoop.tool.BaseSqoopTool.MAP_COLUMN_JAVA;
-  public static final String MAP_COLUMN_HIVE =
-          org.apache.sqoop.tool.BaseSqoopTool.MAP_COLUMN_HIVE;
-  public static final String FMT_SEQUENCEFILE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.FMT_SEQUENCEFILE_ARG;
-  public static final String FMT_TEXTFILE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.FMT_TEXTFILE_ARG;
-  public static final String FMT_AVRODATAFILE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.FMT_AVRODATAFILE_ARG;
-  public static final String HIVE_IMPORT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_IMPORT_ARG;
-  public static final String HIVE_TABLE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_TABLE_ARG;
-  public static final String HIVE_OVERWRITE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_OVERWRITE_ARG;
-  public static final String HIVE_DROP_DELIMS_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_DROP_DELIMS_ARG;
-  public static final String HIVE_DELIMS_REPLACEMENT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG;
-  public static final String HIVE_PARTITION_KEY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_PARTITION_KEY_ARG;
-  public static final String HIVE_PARTITION_VALUE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HIVE_PARTITION_VALUE_ARG;
-  public static final String CREATE_HIVE_TABLE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.CREATE_HIVE_TABLE_ARG;
-  public static final String NUM_MAPPERS_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.NUM_MAPPERS_ARG;
-  public static final String NUM_MAPPERS_SHORT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.NUM_MAPPERS_SHORT_ARG;
-  public static final String COMPRESS_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.COMPRESS_ARG;
-  public static final String COMPRESSION_CODEC_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.COMPRESSION_CODEC_ARG;
-  public static final String COMPRESS_SHORT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.COMPRESS_SHORT_ARG;
-  public static final String DIRECT_SPLIT_SIZE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.DIRECT_SPLIT_SIZE_ARG;
-  public static final String INLINE_LOB_LIMIT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INLINE_LOB_LIMIT_ARG;
-  public static final String FETCH_SIZE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.FETCH_SIZE_ARG;
-  public static final String EXPORT_PATH_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.EXPORT_PATH_ARG;
-  public static final String FIELDS_TERMINATED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.FIELDS_TERMINATED_BY_ARG;
-  public static final String LINES_TERMINATED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.LINES_TERMINATED_BY_ARG;
-  public static final String OPTIONALLY_ENCLOSED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.OPTIONALLY_ENCLOSED_BY_ARG;
-  public static final String ENCLOSED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.ENCLOSED_BY_ARG;
-  public static final String ESCAPED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.ESCAPED_BY_ARG;
-  public static final String MYSQL_DELIMITERS_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.MYSQL_DELIMITERS_ARG;
-  public static final String INPUT_FIELDS_TERMINATED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INPUT_FIELDS_TERMINATED_BY_ARG;
-  public static final String INPUT_LINES_TERMINATED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INPUT_LINES_TERMINATED_BY_ARG;
-  public static final String INPUT_OPTIONALLY_ENCLOSED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INPUT_OPTIONALLY_ENCLOSED_BY_ARG;
-  public static final String INPUT_ENCLOSED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INPUT_ENCLOSED_BY_ARG;
-  public static final String INPUT_ESCAPED_BY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INPUT_ESCAPED_BY_ARG;
-  public static final String CODE_OUT_DIR_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.CODE_OUT_DIR_ARG;
-  public static final String BIN_OUT_DIR_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.BIN_OUT_DIR_ARG;
-  public static final String PACKAGE_NAME_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.PACKAGE_NAME_ARG;
-  public static final String CLASS_NAME_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.CLASS_NAME_ARG;
-  public static final String JAR_FILE_NAME_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.JAR_FILE_NAME_ARG;
-  public static final String SQL_QUERY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.SQL_QUERY_ARG;
-  public static final String SQL_QUERY_BOUNDARY =
-          org.apache.sqoop.tool.BaseSqoopTool.SQL_QUERY_BOUNDARY;
-  public static final String SQL_QUERY_SHORT_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.SQL_QUERY_SHORT_ARG;
-  public static final String VERBOSE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.VERBOSE_ARG;
-  public static final String HELP_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HELP_ARG;
-  public static final String UPDATE_KEY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.UPDATE_KEY_ARG;
-  public static final String UPDATE_MODE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.UPDATE_MODE_ARG;
-  public static final String INCREMENT_TYPE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INCREMENT_TYPE_ARG;
-  public static final String INCREMENT_COL_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INCREMENT_COL_ARG;
-  public static final String INCREMENT_LAST_VAL_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.INCREMENT_LAST_VAL_ARG;
-  public static final String HBASE_TABLE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HBASE_TABLE_ARG;
-  public static final String HBASE_COL_FAM_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HBASE_COL_FAM_ARG;
-  public static final String HBASE_ROW_KEY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HBASE_ROW_KEY_ARG;
-  public static final String HBASE_CREATE_TABLE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.HBASE_CREATE_TABLE_ARG;
-  public static final String STORAGE_METASTORE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.STORAGE_METASTORE_ARG;
-  public static final String JOB_CMD_CREATE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.JOB_CMD_CREATE_ARG;
-  public static final String JOB_CMD_DELETE_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.JOB_CMD_DELETE_ARG;
-  public static final String JOB_CMD_EXEC_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.JOB_CMD_EXEC_ARG;
-  public static final String JOB_CMD_LIST_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.JOB_CMD_LIST_ARG;
-  public static final String JOB_CMD_SHOW_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.JOB_CMD_SHOW_ARG;
-  public static final String METASTORE_SHUTDOWN_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.METASTORE_SHUTDOWN_ARG;
-  public static final String NEW_DATASET_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.NEW_DATASET_ARG;
-  public static final String OLD_DATASET_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.OLD_DATASET_ARG;
-  public static final String MERGE_KEY_ARG =
-          org.apache.sqoop.tool.BaseSqoopTool.MERGE_KEY_ARG;
-
-  public BaseSqoopTool() {
-  }
-
-  public BaseSqoopTool(String toolName) {
-    super(toolName);
-  }
-
-  protected void validateHasDirectConnectorOption(SqoopOptions options) throws SqoopOptions.InvalidOptionsException {
-    SupportedManagers m = SupportedManagers.createFrom(options);
-    if (m != null && options.isDirect() && !m.hasDirectConnector()) {
-      throw new SqoopOptions.InvalidOptionsException(
-          "Was called with the --direct option, but no direct connector available.");
-    }
-  }
-}
diff --git a/src/java/com/cloudera/sqoop/tool/CodeGenTool.java b/src/java/com/cloudera/sqoop/tool/CodeGenTool.java
deleted file mode 100644
index caab39f..0000000
--- a/src/java/com/cloudera/sqoop/tool/CodeGenTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class CodeGenTool
-    extends org.apache.sqoop.tool.CodeGenTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/CreateHiveTableTool.java b/src/java/com/cloudera/sqoop/tool/CreateHiveTableTool.java
deleted file mode 100644
index 103b66e..0000000
--- a/src/java/com/cloudera/sqoop/tool/CreateHiveTableTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class CreateHiveTableTool
-    extends org.apache.sqoop.tool.CreateHiveTableTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/EvalSqlTool.java b/src/java/com/cloudera/sqoop/tool/EvalSqlTool.java
deleted file mode 100644
index fcba535..0000000
--- a/src/java/com/cloudera/sqoop/tool/EvalSqlTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class EvalSqlTool
-    extends org.apache.sqoop.tool.EvalSqlTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/ExportTool.java b/src/java/com/cloudera/sqoop/tool/ExportTool.java
deleted file mode 100644
index 597f0fb..0000000
--- a/src/java/com/cloudera/sqoop/tool/ExportTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ExportTool
-    extends org.apache.sqoop.tool.ExportTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/HelpTool.java b/src/java/com/cloudera/sqoop/tool/HelpTool.java
deleted file mode 100644
index dc2933a..0000000
--- a/src/java/com/cloudera/sqoop/tool/HelpTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class HelpTool
-    extends org.apache.sqoop.tool.HelpTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/ImportAllTablesTool.java b/src/java/com/cloudera/sqoop/tool/ImportAllTablesTool.java
deleted file mode 100644
index 0c7724b..0000000
--- a/src/java/com/cloudera/sqoop/tool/ImportAllTablesTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ImportAllTablesTool
-    extends org.apache.sqoop.tool.ImportAllTablesTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/ImportTool.java b/src/java/com/cloudera/sqoop/tool/ImportTool.java
deleted file mode 100644
index 8f00741..0000000
--- a/src/java/com/cloudera/sqoop/tool/ImportTool.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ImportTool
-    extends org.apache.sqoop.tool.ImportTool {
-
-  public ImportTool() {
-    super();
-  }
-
-  public ImportTool(String toolName, boolean allTables) {
-    super(toolName, allTables);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/tool/JobTool.java b/src/java/com/cloudera/sqoop/tool/JobTool.java
deleted file mode 100644
index f1554d0..0000000
--- a/src/java/com/cloudera/sqoop/tool/JobTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class JobTool
-    extends org.apache.sqoop.tool.JobTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/ListDatabasesTool.java b/src/java/com/cloudera/sqoop/tool/ListDatabasesTool.java
deleted file mode 100644
index 29d8923..0000000
--- a/src/java/com/cloudera/sqoop/tool/ListDatabasesTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ListDatabasesTool
-    extends org.apache.sqoop.tool.ListDatabasesTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/ListTablesTool.java b/src/java/com/cloudera/sqoop/tool/ListTablesTool.java
deleted file mode 100644
index 08392ef..0000000
--- a/src/java/com/cloudera/sqoop/tool/ListTablesTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ListTablesTool
-    extends org.apache.sqoop.tool.ListTablesTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/MergeTool.java b/src/java/com/cloudera/sqoop/tool/MergeTool.java
deleted file mode 100644
index 1ae2a4c..0000000
--- a/src/java/com/cloudera/sqoop/tool/MergeTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MergeTool
-    extends org.apache.sqoop.tool.MergeTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/MetastoreTool.java b/src/java/com/cloudera/sqoop/tool/MetastoreTool.java
deleted file mode 100644
index 210f9cc..0000000
--- a/src/java/com/cloudera/sqoop/tool/MetastoreTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class MetastoreTool
-    extends org.apache.sqoop.tool.MetastoreTool {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/SqoopTool.java b/src/java/com/cloudera/sqoop/tool/SqoopTool.java
deleted file mode 100644
index 8a4735b..0000000
--- a/src/java/com/cloudera/sqoop/tool/SqoopTool.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-import java.util.Set;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class SqoopTool
-    extends org.apache.sqoop.tool.SqoopTool {
-
-  public static final String TOOL_PLUGINS_KEY =
-    org.apache.sqoop.tool.SqoopTool.TOOL_PLUGINS_KEY;
-
-  public static final Set<String> getToolNames() {
-    return org.apache.sqoop.tool.SqoopTool.getToolNames();
-  }
-
-  public static final SqoopTool getTool(String toolName) {
-    return (SqoopTool)org.apache.sqoop.tool.SqoopTool.getTool(toolName);
-  }
-
-  public static final String getToolDescription(String toolName) {
-    return org.apache.sqoop.tool.SqoopTool.getToolDescription(toolName);
-  }
-
-  public SqoopTool() {
-     super();
-  }
-
-  public SqoopTool(String name) {
-    super(name);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/tool/ToolDesc.java b/src/java/com/cloudera/sqoop/tool/ToolDesc.java
deleted file mode 100644
index 49e8274..0000000
--- a/src/java/com/cloudera/sqoop/tool/ToolDesc.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ToolDesc
-    extends org.apache.sqoop.tool.ToolDesc {
-
-  public ToolDesc(String name, Class<? extends SqoopTool> cls, String desc) {
-    super(name, cls, desc);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/tool/ToolPlugin.java b/src/java/com/cloudera/sqoop/tool/ToolPlugin.java
deleted file mode 100644
index 46b69fb..0000000
--- a/src/java/com/cloudera/sqoop/tool/ToolPlugin.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class ToolPlugin
-    extends org.apache.sqoop.tool.ToolPlugin {
-}
diff --git a/src/java/com/cloudera/sqoop/tool/VersionTool.java b/src/java/com/cloudera/sqoop/tool/VersionTool.java
deleted file mode 100644
index ab85b89..0000000
--- a/src/java/com/cloudera/sqoop/tool/VersionTool.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.tool;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class VersionTool
-    extends org.apache.sqoop.tool.VersionTool {
-}
diff --git a/src/java/com/cloudera/sqoop/util/AppendUtils.java b/src/java/com/cloudera/sqoop/util/AppendUtils.java
deleted file mode 100644
index 0b9f887..0000000
--- a/src/java/com/cloudera/sqoop/util/AppendUtils.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import com.cloudera.sqoop.manager.ImportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class AppendUtils
-    extends org.apache.sqoop.util.AppendUtils {
-
-  public AppendUtils(ImportJobContext context) {
-    super(context);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/AsyncSink.java b/src/java/com/cloudera/sqoop/util/AsyncSink.java
deleted file mode 100644
index 1a869ba..0000000
--- a/src/java/com/cloudera/sqoop/util/AsyncSink.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class AsyncSink
-    extends org.apache.sqoop.util.AsyncSink {
-}
diff --git a/src/java/com/cloudera/sqoop/util/ClassLoaderStack.java b/src/java/com/cloudera/sqoop/util/ClassLoaderStack.java
deleted file mode 100644
index fcec0ce..0000000
--- a/src/java/com/cloudera/sqoop/util/ClassLoaderStack.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.io.IOException;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class ClassLoaderStack {
-
-  private ClassLoaderStack() { }
-
-  public static void setCurrentClassLoader(ClassLoader cl) {
-    org.apache.sqoop.util.ClassLoaderStack.setCurrentClassLoader(cl);
-  }
-
-  public static ClassLoader addJarFile(String jarFile, String testClassName)
-      throws IOException {
-    return org.apache.sqoop.util.ClassLoaderStack.addJarFile(jarFile,
-        testClassName);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/DirectImportUtils.java b/src/java/com/cloudera/sqoop/util/DirectImportUtils.java
deleted file mode 100644
index 9b84748..0000000
--- a/src/java/com/cloudera/sqoop/util/DirectImportUtils.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.io.SplittableBufferedWriter;
-import com.cloudera.sqoop.manager.ImportJobContext;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class DirectImportUtils {
-
-  private DirectImportUtils() { }
-
-  public static void setFilePermissions(File file, String modstr)
-      throws IOException {
-    org.apache.sqoop.util.DirectImportUtils.setFilePermissions(file, modstr);
-  }
-
-  public static SplittableBufferedWriter createHdfsSink(Configuration conf,
-      SqoopOptions options, ImportJobContext context) throws IOException {
-    return org.apache.sqoop.util.DirectImportUtils.createHdfsSink(conf,
-        options, context);
-  }
-
-  public static boolean isLocalhost(String someHost) {
-    return org.apache.sqoop.util.DirectImportUtils.isLocalhost(someHost);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/ErrorableAsyncSink.java b/src/java/com/cloudera/sqoop/util/ErrorableAsyncSink.java
deleted file mode 100644
index 4af8d72..0000000
--- a/src/java/com/cloudera/sqoop/util/ErrorableAsyncSink.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class ErrorableAsyncSink
-    extends org.apache.sqoop.util.ErrorableAsyncSink {
-}
diff --git a/src/java/com/cloudera/sqoop/util/ErrorableThread.java b/src/java/com/cloudera/sqoop/util/ErrorableThread.java
deleted file mode 100644
index 6cde23a..0000000
--- a/src/java/com/cloudera/sqoop/util/ErrorableThread.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public abstract class ErrorableThread
-    extends org.apache.sqoop.util.ErrorableThread {
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/Executor.java b/src/java/com/cloudera/sqoop/util/Executor.java
deleted file mode 100644
index 366a2fa..0000000
--- a/src/java/com/cloudera/sqoop/util/Executor.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.sqoop.util.AsyncSink;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class Executor {
-
-  private Executor() { }
-
-  public static int exec(String [] args) throws IOException {
-    return org.apache.sqoop.util.Executor.exec(args);
-  }
-
-  public static int exec(String [] args, AsyncSink outSink,
-      AsyncSink errSink) throws IOException {
-    return org.apache.sqoop.util.Executor.exec(args, outSink, errSink);
-  }
-
-  public static int exec(String [] args, String [] envp, AsyncSink outSink,
-      AsyncSink errSink) throws IOException {
-    return org.apache.sqoop.util.Executor.exec(args, envp, outSink, errSink);
-  }
-
-  public static List<String> getCurEnvpStrings() {
-    return org.apache.sqoop.util.Executor.getCurEnvpStrings();
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/ExitSecurityException.java b/src/java/com/cloudera/sqoop/util/ExitSecurityException.java
deleted file mode 100644
index e9ec464..0000000
--- a/src/java/com/cloudera/sqoop/util/ExitSecurityException.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-@SuppressWarnings("serial")
-public class ExitSecurityException
-    extends org.apache.sqoop.util.ExitSecurityException {
-
-  public ExitSecurityException() {
-    super();
-  }
-
-  public ExitSecurityException(final String message) {
-    super(message);
-  }
-
-  public ExitSecurityException(int status) {
-    super(status);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/ExportException.java b/src/java/com/cloudera/sqoop/util/ExportException.java
deleted file mode 100644
index 6c86795..0000000
--- a/src/java/com/cloudera/sqoop/util/ExportException.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-@SuppressWarnings("serial")
-public class ExportException
-    extends org.apache.sqoop.util.ExportException {
-
-  public ExportException() {
-    super();
-  }
-
-  public ExportException(final String message) {
-    super(message);
-  }
-
-  public ExportException(final Throwable cause) {
-    super(cause);
-  }
-
-  public ExportException(final String message, final Throwable cause) {
-    super(message, cause);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/FileListing.java b/src/java/com/cloudera/sqoop/util/FileListing.java
deleted file mode 100644
index 12494d8..0000000
--- a/src/java/com/cloudera/sqoop/util/FileListing.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.List;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class FileListing {
-
-  private FileListing() { }
-
-  public static void main(String... aArgs) throws FileNotFoundException {
-    org.apache.sqoop.util.FileListing.main(aArgs);
-  }
-
-  public static List<File> getFileListing(File aStartingDir)
-      throws FileNotFoundException {
-    return org.apache.sqoop.util.FileListing.getFileListing(aStartingDir);
-  }
-
-  public static void recursiveDeleteDir(File dir) throws IOException {
-    org.apache.sqoop.util.FileListing.recursiveDeleteDir(dir);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/ImportException.java b/src/java/com/cloudera/sqoop/util/ImportException.java
deleted file mode 100644
index 34e6767..0000000
--- a/src/java/com/cloudera/sqoop/util/ImportException.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-@SuppressWarnings("serial")
-public class ImportException
-    extends org.apache.sqoop.util.ImportException {
-
-  public ImportException() {
-    super();
-  }
-
-  public ImportException(final String message) {
-    super(message);
-  }
-
-  public ImportException(final Throwable cause) {
-    super(cause);
-  }
-
-  public ImportException(final String message, final Throwable cause) {
-    super(message, cause);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/Jars.java b/src/java/com/cloudera/sqoop/util/Jars.java
deleted file mode 100644
index 09a4f16..0000000
--- a/src/java/com/cloudera/sqoop/util/Jars.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import com.cloudera.sqoop.manager.ConnManager;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class Jars {
-
-  private Jars() { }
-
-  public static String getSqoopJarPath() {
-    return org.apache.sqoop.util.Jars.getSqoopJarPath();
-  }
-
-  public static String getJarPathForClass(Class<? extends Object> classObj) {
-    return org.apache.sqoop.util.Jars.getJarPathForClass(classObj);
-  }
-
-  public static String getDriverClassJar(ConnManager mgr) {
-    return org.apache.sqoop.util.Jars.getDriverClassJar(mgr);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/JdbcUrl.java b/src/java/com/cloudera/sqoop/util/JdbcUrl.java
deleted file mode 100644
index f7ef10b..0000000
--- a/src/java/com/cloudera/sqoop/util/JdbcUrl.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class JdbcUrl {
-
-  private JdbcUrl() { }
-
-  public static String getDatabaseName(String connectString) {
-    return org.apache.sqoop.util.JdbcUrl.getDatabaseName(connectString);
-  }
-
-  public static String getHostName(String connectString) {
-    return org.apache.sqoop.util.JdbcUrl.getHostName(connectString);
-  }
-
-  public static int getPort(String connectString) {
-    return org.apache.sqoop.util.JdbcUrl.getPort(connectString);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/LoggingAsyncSink.java b/src/java/com/cloudera/sqoop/util/LoggingAsyncSink.java
deleted file mode 100644
index 9699252..0000000
--- a/src/java/com/cloudera/sqoop/util/LoggingAsyncSink.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import org.apache.commons.logging.Log;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class LoggingAsyncSink
-    extends org.apache.sqoop.util.LoggingAsyncSink {
-
-  public LoggingAsyncSink(final Log context) {
-    super(context);
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/LoggingUtils.java b/src/java/com/cloudera/sqoop/util/LoggingUtils.java
deleted file mode 100644
index 9cd50ff..0000000
--- a/src/java/com/cloudera/sqoop/util/LoggingUtils.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.sql.SQLException;
-
-import org.apache.commons.logging.Log;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class LoggingUtils {
-
-  private LoggingUtils() { }
-
-  public static void logAll(Log log, SQLException e) {
-    org.apache.sqoop.util.LoggingUtils.logAll(log, e);
-  }
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/NullAsyncSink.java b/src/java/com/cloudera/sqoop/util/NullAsyncSink.java
deleted file mode 100644
index 7d607d5..0000000
--- a/src/java/com/cloudera/sqoop/util/NullAsyncSink.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class NullAsyncSink
-    extends org.apache.sqoop.util.NullAsyncSink {
-}
diff --git a/src/java/com/cloudera/sqoop/util/OptionsFileUtil.java b/src/java/com/cloudera/sqoop/util/OptionsFileUtil.java
deleted file mode 100644
index 6c6cacd..0000000
--- a/src/java/com/cloudera/sqoop/util/OptionsFileUtil.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class OptionsFileUtil {
-
-  private OptionsFileUtil() { }
-
-  public static String[] expandArguments(String[] args) throws Exception {
-    List<String> options = new ArrayList<String>();
-    return org.apache.sqoop.util.OptionsFileUtil.expandArguments(args);
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/PerfCounters.java b/src/java/com/cloudera/sqoop/util/PerfCounters.java
deleted file mode 100644
index a8460dd..0000000
--- a/src/java/com/cloudera/sqoop/util/PerfCounters.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class PerfCounters
-    extends org.apache.sqoop.util.PerfCounters {
-
-  public PerfCounters() {
-    super();
-  }
-
-}
diff --git a/src/java/com/cloudera/sqoop/util/RandomHash.java b/src/java/com/cloudera/sqoop/util/RandomHash.java
deleted file mode 100644
index d3ba497..0000000
--- a/src/java/com/cloudera/sqoop/util/RandomHash.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class RandomHash {
-
-  private RandomHash() { }
-
-  public static byte [] generateMD5Bytes() {
-    return org.apache.sqoop.util.RandomHash.generateMD5Bytes();
-  }
-
-  public static String generateMD5String() {
-    return org.apache.sqoop.util.RandomHash.generateMD5String();
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/ResultSetPrinter.java b/src/java/com/cloudera/sqoop/util/ResultSetPrinter.java
deleted file mode 100644
index 4dc9c72..0000000
--- a/src/java/com/cloudera/sqoop/util/ResultSetPrinter.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class ResultSetPrinter
-    extends org.apache.sqoop.util.ResultSetPrinter {
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/StoredAsProperty.java b/src/java/com/cloudera/sqoop/util/StoredAsProperty.java
deleted file mode 100644
index eebfcda..0000000
--- a/src/java/com/cloudera/sqoop/util/StoredAsProperty.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-@Documented
-@Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.FIELD)
-public @interface StoredAsProperty {
-  String value();
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/SubprocessSecurityManager.java b/src/java/com/cloudera/sqoop/util/SubprocessSecurityManager.java
deleted file mode 100644
index 787564a..0000000
--- a/src/java/com/cloudera/sqoop/util/SubprocessSecurityManager.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public class SubprocessSecurityManager
-    extends org.apache.sqoop.util.SubprocessSecurityManager {
-
-  public SubprocessSecurityManager() {
-    super();
-  }
-
-}
-
diff --git a/src/java/com/cloudera/sqoop/util/TaskId.java b/src/java/com/cloudera/sqoop/util/TaskId.java
deleted file mode 100644
index 7804c8e..0000000
--- a/src/java/com/cloudera/sqoop/util/TaskId.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.util;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * @deprecated Moving to use org.apache.sqoop namespace.
- */
-public final class TaskId {
-
-  private TaskId() { }
-
-  public static String get(Configuration conf, String defaultVal) {
-    return org.apache.sqoop.util.TaskId.get(conf, defaultVal);
-  }
-
-  public static File getLocalWorkPath(Configuration conf) throws IOException {
-    return org.apache.sqoop.util.TaskId.getLocalWorkPath(conf);
-  }
-
-}
diff --git a/src/java/org/apache/sqoop/ConnFactory.java b/src/java/org/apache/sqoop/ConnFactory.java
index 3f77252..b008dfe 100644
--- a/src/java/org/apache/sqoop/ConnFactory.java
+++ b/src/java/org/apache/sqoop/ConnFactory.java
@@ -36,12 +36,12 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.DefaultManagerFactory;
-import com.cloudera.sqoop.manager.ManagerFactory;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.DefaultManagerFactory;
+import org.apache.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.metastore.JobData;
 
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.apache.sqoop.manager.GenericJdbcManager;
 import org.apache.sqoop.manager.oracle.OraOopManagerFactory;
 
@@ -119,7 +119,7 @@
    * @throws IOException if it cannot find a ConnManager for this schema.
    */
   public ConnManager getManager(JobData data) throws IOException {
-    com.cloudera.sqoop.SqoopOptions options = data.getSqoopOptions();
+    SqoopOptions options = data.getSqoopOptions();
     String manualDriver = options.getDriverClassName();
     String managerClassName = options.getConnManagerClassName();
 
@@ -151,12 +151,11 @@
         // connectors are forcing to use their building class names.
         if (manualDriver == null) {
           Constructor<ConnManager> constructor =
-            cls.getDeclaredConstructor(com.cloudera.sqoop.SqoopOptions.class);
+            cls.getDeclaredConstructor(SqoopOptions.class);
           connManager = constructor.newInstance(options);
         } else {
           Constructor<ConnManager> constructor =
-            cls.getDeclaredConstructor(String.class,
-                                       com.cloudera.sqoop.SqoopOptions.class);
+            cls.getDeclaredConstructor(String.class, SqoopOptions.class);
           connManager = constructor.newInstance(manualDriver, options);
         }
       } catch (ClassNotFoundException e) {
diff --git a/src/java/org/apache/sqoop/Sqoop.java b/src/java/org/apache/sqoop/Sqoop.java
index 8764aff..08ff82c 100644
--- a/src/java/org/apache/sqoop/Sqoop.java
+++ b/src/java/org/apache/sqoop/Sqoop.java
@@ -28,22 +28,20 @@
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.util.OptionsFileUtil;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.sqoop.util.OptionsFileUtil;
 
-import static com.cloudera.sqoop.SqoopOptions.isSqoopRethrowSystemPropertySet;
+import static org.apache.sqoop.SqoopOptions.isSqoopRethrowSystemPropertySet;
 import static org.apache.sqoop.tool.BaseSqoopTool.THROW_ON_ERROR_ARG;
 
 /**
  * Main entry-point for Sqoop
- * Usage: hadoop jar (this_jar_name) com.cloudera.sqoop.Sqoop (options)
+ * Usage: hadoop jar (this_jar_name) org.apache.sqoop.Sqoop (options)
  * See the SqoopOptions class for options.
  */
 public class Sqoop extends Configured implements Tool {
 
-  public static final Log SQOOP_LOG = LogFactory.getLog("com.cloudera.sqoop");
+  public static final Log SQOOP_LOG = LogFactory.getLog("org.apache.sqoop");
   public static final Log LOG = LogFactory.getLog(Sqoop.class.getName());
 
   /**
diff --git a/src/java/org/apache/sqoop/SqoopOptions.java b/src/java/org/apache/sqoop/SqoopOptions.java
index d18141c..73d0757 100644
--- a/src/java/org/apache/sqoop/SqoopOptions.java
+++ b/src/java/org/apache/sqoop/SqoopOptions.java
@@ -45,14 +45,11 @@
 import org.apache.sqoop.validation.AbsoluteValidationThreshold;
 import org.apache.sqoop.validation.RowCountValidator;
 
-import com.cloudera.sqoop.SqoopOptions.FileLayout;
-import com.cloudera.sqoop.SqoopOptions.IncrementalMode;
-import com.cloudera.sqoop.SqoopOptions.UpdateMode;
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.util.RandomHash;
-import com.cloudera.sqoop.util.StoredAsProperty;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.sqoop.util.RandomHash;
+import org.apache.sqoop.util.StoredAsProperty;
 
 import static org.apache.sqoop.Sqoop.SQOOP_RETHROW_PROPERTY;
 import static org.apache.sqoop.orm.ClassWriter.toJavaIdentifier;
@@ -83,6 +80,44 @@
   public static final boolean METASTORE_PASSWORD_DEFAULT = false;
   public static final String DB_PASSWORD_KEY = "db.password";
 
+  /** Selects in-HDFS destination file format. */
+  public enum FileLayout {
+    TextFile,
+    SequenceFile,
+    AvroDataFile,
+    ParquetFile
+  }
+
+  /**
+   * Incremental imports support two modes:
+   * <ul>
+   * <li>new rows being appended to the end of a table with an
+   * incrementing id</li>
+   * <li>new data results in a date-last-modified column being
+   * updated to NOW(); Sqoop will pull all dirty rows in the next
+   * incremental import.</li>
+   * </ul>
+   */
+  public enum IncrementalMode {
+    None,
+    AppendRows,
+    DateLastModified,
+  }
+
+  /**
+   * Update mode option specifies how updates are performed when
+   * new rows are found with non-matching keys in database.
+   * It supports two modes:
+   * <ul>
+   * <li>UpdateOnly: This is the default. New rows are silently ignored.</li>
+   * <li>AllowInsert: New rows are inserted into the database.</li>
+   * </ul>
+   */
+  public enum UpdateMode {
+    UpdateOnly,
+    AllowInsert
+  }
+
   /**
    * Thrown when invalid cmdline options are given.
    */
@@ -370,7 +405,7 @@
   // If we restore a job and then allow the user to apply arguments on
   // top, we retain the version without the arguments in a reference to the
   // 'parent' SqoopOptions instance, here.
-  private com.cloudera.sqoop.SqoopOptions parent;
+  private SqoopOptions parent;
 
   // Nonce directory name. Generate one per process, lazily, if
   // getNonceJarDir() is called. Not recorded in metadata. This is used as
@@ -2312,14 +2347,14 @@
   /**
    * Return the parent instance this SqoopOptions is derived from.
    */
-  public com.cloudera.sqoop.SqoopOptions getParent() {
+  public SqoopOptions getParent() {
     return this.parent;
   }
 
   /**
    * Set the parent instance this SqoopOptions is derived from.
    */
-  public void setParent(com.cloudera.sqoop.SqoopOptions options) {
+  public void setParent(SqoopOptions options) {
     this.parent = options;
   }
 
diff --git a/src/java/org/apache/sqoop/accumulo/AccumuloMutationProcessor.java b/src/java/org/apache/sqoop/accumulo/AccumuloMutationProcessor.java
index 123688c..9167dea 100644
--- a/src/java/org/apache/sqoop/accumulo/AccumuloMutationProcessor.java
+++ b/src/java/org/apache/sqoop/accumulo/AccumuloMutationProcessor.java
@@ -38,9 +38,9 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.FieldMappable;
-import com.cloudera.sqoop.lib.ProcessingException;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.FieldMappable;
+import org.apache.sqoop.lib.ProcessingException;
 
 /**
  * SqoopRecordProcessor that performs an Accumulo mutation operation
diff --git a/src/java/org/apache/sqoop/avro/AvroUtil.java b/src/java/org/apache/sqoop/avro/AvroUtil.java
index ee29f14..1aae8df 100644
--- a/src/java/org/apache/sqoop/avro/AvroUtil.java
+++ b/src/java/org/apache/sqoop/avro/AvroUtil.java
@@ -155,7 +155,7 @@
   private static final String TIME_TYPE = "java.sql.Time";
   private static final String DATE_TYPE = "java.sql.Date";
   private static final String BIG_DECIMAL_TYPE = "java.math.BigDecimal";
-  private static final String BLOB_REF_TYPE = "com.cloudera.sqoop.lib.BlobRef";
+  private static final String BLOB_REF_TYPE = "org.apache.sqoop.lib.BlobRef";
 
   /**
    * Convert from Avro type to Sqoop's java representation of the SQL type
diff --git a/src/java/org/apache/sqoop/cli/ToolOptions.java b/src/java/org/apache/sqoop/cli/ToolOptions.java
index f148897..6ab8901 100644
--- a/src/java/org/apache/sqoop/cli/ToolOptions.java
+++ b/src/java/org/apache/sqoop/cli/ToolOptions.java
@@ -28,8 +28,6 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 
-import com.cloudera.sqoop.cli.RelatedOptions;
-
 /**
  * Class that holds several sets of related options, providing a container
  * for all the options associated with a single tool.
diff --git a/src/java/org/apache/sqoop/config/ConfigurationHelper.java b/src/java/org/apache/sqoop/config/ConfigurationHelper.java
index 298907d..e07a699 100644
--- a/src/java/org/apache/sqoop/config/ConfigurationHelper.java
+++ b/src/java/org/apache/sqoop/config/ConfigurationHelper.java
@@ -27,7 +27,7 @@
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.util.GenericOptionsParser;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
 import org.apache.hadoop.util.ReflectionUtils;
 
diff --git a/src/java/org/apache/sqoop/hbase/HBasePutProcessor.java b/src/java/org/apache/sqoop/hbase/HBasePutProcessor.java
index cf97b8a..27d6006 100644
--- a/src/java/org/apache/sqoop/hbase/HBasePutProcessor.java
+++ b/src/java/org/apache/sqoop/hbase/HBasePutProcessor.java
@@ -18,9 +18,9 @@
 
 package org.apache.sqoop.hbase;
 
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.FieldMappable;
-import com.cloudera.sqoop.lib.ProcessingException;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.FieldMappable;
+import org.apache.sqoop.lib.ProcessingException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configurable;
diff --git a/src/java/org/apache/sqoop/hbase/ToStringPutTransformer.java b/src/java/org/apache/sqoop/hbase/ToStringPutTransformer.java
index 20bf1b9..0bd6169 100644
--- a/src/java/org/apache/sqoop/hbase/ToStringPutTransformer.java
+++ b/src/java/org/apache/sqoop/hbase/ToStringPutTransformer.java
@@ -18,7 +18,6 @@
 
 package org.apache.sqoop.hbase;
 
-import com.cloudera.sqoop.hbase.PutTransformer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
diff --git a/src/java/org/apache/sqoop/hive/HiveImport.java b/src/java/org/apache/sqoop/hive/HiveImport.java
index 153d091..c272911 100644
--- a/src/java/org/apache/sqoop/hive/HiveImport.java
+++ b/src/java/org/apache/sqoop/hive/HiveImport.java
@@ -45,9 +45,9 @@
 import org.apache.sqoop.util.LoggingAsyncSink;
 import org.apache.sqoop.util.SubprocessSecurityManager;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.util.ExitSecurityException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.util.ExitSecurityException;
 
 /**
  * Utility to import a table into the Hive metastore. Manages the connection
diff --git a/src/java/org/apache/sqoop/hive/TableDefWriter.java b/src/java/org/apache/sqoop/hive/TableDefWriter.java
index deec32d..e1424c3 100644
--- a/src/java/org/apache/sqoop/hive/TableDefWriter.java
+++ b/src/java/org/apache/sqoop/hive/TableDefWriter.java
@@ -35,8 +35,8 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.sqoop.io.CodecMap;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
 import org.apache.sqoop.util.FileSystemUtil;
 
 /**
diff --git a/src/java/org/apache/sqoop/io/CodecMap.java b/src/java/org/apache/sqoop/io/CodecMap.java
index cec9358..d579618 100644
--- a/src/java/org/apache/sqoop/io/CodecMap.java
+++ b/src/java/org/apache/sqoop/io/CodecMap.java
@@ -70,13 +70,13 @@
   /**
    * Given a codec name, return the name of the concrete class
    * that implements it (or 'null' in the case of the "none" codec).
-   * @throws com.cloudera.sqoop.io.UnsupportedCodecException if a codec cannot
+   * @throws org.apache.sqoop.io.UnsupportedCodecException if a codec cannot
    * be found with the supplied name.
    */
   public static String getCodecClassName(String codecName)
-      throws com.cloudera.sqoop.io.UnsupportedCodecException {
+      throws org.apache.sqoop.io.UnsupportedCodecException {
     if (!codecNames.containsKey(codecName)) {
-      throw new com.cloudera.sqoop.io.UnsupportedCodecException(codecName);
+      throw new org.apache.sqoop.io.UnsupportedCodecException(codecName);
     }
 
     return codecNames.get(codecName);
@@ -85,11 +85,11 @@
   /**
    * Given a codec name, instantiate the concrete implementation
    * class that implements it.
-   * @throws com.cloudera.sqoop.io.UnsupportedCodecException if a codec cannot
+   * @throws org.apache.sqoop.io.UnsupportedCodecException if a codec cannot
    * be found with the supplied name.
    */
   public static CompressionCodec getCodec(String codecName,
-    Configuration conf) throws com.cloudera.sqoop.io.UnsupportedCodecException {
+    Configuration conf) throws org.apache.sqoop.io.UnsupportedCodecException {
     // Try standard Hadoop mechanism first
     CompressionCodec codec = getCodecByName(codecName, conf);
     if (codec != null) {
@@ -108,7 +108,7 @@
       return (CompressionCodec) ReflectionUtils.newInstance(
           codecClass, conf);
     } catch (ClassNotFoundException cnfe) {
-      throw new com.cloudera.sqoop.io.UnsupportedCodecException(
+      throw new org.apache.sqoop.io.UnsupportedCodecException(
           "Cannot find codec class "
           + codecClassName + " for codec " + codecName);
     }
@@ -162,11 +162,11 @@
    *
    * @return the short name of the codec
    *
-   * @throws com.cloudera.sqoop.io.UnsupportedCodecException
+   * @throws org.apache.sqoop.io.UnsupportedCodecException
    *          if no short name could be found
    */
   public static String getCodecShortNameByName(String codecName,
-    Configuration conf) throws com.cloudera.sqoop.io.UnsupportedCodecException {
+    Configuration conf) throws org.apache.sqoop.io.UnsupportedCodecException {
     if (codecNames.containsKey(codecName)) {
       return codecName;
     }
@@ -186,7 +186,7 @@
       return simpleName.toLowerCase();
     }
 
-    throw new com.cloudera.sqoop.io.UnsupportedCodecException(
+    throw new org.apache.sqoop.io.UnsupportedCodecException(
       "Cannot find codec class " + codecName + " for codec " + codecName);
   }
 
diff --git a/src/java/org/apache/sqoop/io/LobFile.java b/src/java/org/apache/sqoop/io/LobFile.java
index 4edde12..ec0f8fa 100644
--- a/src/java/org/apache/sqoop/io/LobFile.java
+++ b/src/java/org/apache/sqoop/io/LobFile.java
@@ -62,8 +62,7 @@
 import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.io.compress.DecompressorStream;
 
-import com.cloudera.sqoop.io.LobReaderCache;
-import com.cloudera.sqoop.util.RandomHash;
+import org.apache.sqoop.util.RandomHash;
 
 /**
  * File format which stores large object records.
@@ -97,7 +96,7 @@
   /**
    * Creates a LobFile Reader configured to read from the specified file.
    */
-  public static com.cloudera.sqoop.io.LobFile.Reader
+  public static org.apache.sqoop.io.LobFile.Reader
       open(Path p, Configuration conf) throws IOException {
     FileSystem fs = p.getFileSystem(conf);
     FileStatus [] stats = fs.listStatus(p);
@@ -125,7 +124,7 @@
    * @param codec the compression codec to use (or null for none).
    * @param entriesPerSegment number of entries per index segment.
    */
-  public static com.cloudera.sqoop.io.LobFile.Writer
+  public static org.apache.sqoop.io.LobFile.Writer
             create(Path p, Configuration conf, boolean isCharData,
             String codec, int entriesPerSegment)
       throws IOException {
@@ -139,7 +138,7 @@
    * @param isCharData true if this is for CLOBs, false for BLOBs.
    * @param codec the compression codec to use (or null for none).
    */
-  public static com.cloudera.sqoop.io.LobFile.Writer
+  public static org.apache.sqoop.io.LobFile.Writer
             create(Path p, Configuration conf, boolean isCharData,
             String codec) throws IOException {
     return create(p, conf, isCharData, codec,
@@ -152,7 +151,7 @@
    * @param conf the configuration to use to interact with the filesystem.
    * @param isCharData true if this is for CLOBs, false for BLOBs.
    */
-  public static com.cloudera.sqoop.io.LobFile.Writer
+  public static org.apache.sqoop.io.LobFile.Writer
             create(Path p, Configuration conf, boolean isCharData)
       throws IOException {
     return create(p, conf, isCharData, null);
@@ -163,7 +162,7 @@
    * @param p the path to create.
    * @param conf the configuration to use to interact with the filesystem.
    */
-  public static com.cloudera.sqoop.io.LobFile.Writer
+  public static org.apache.sqoop.io.LobFile.Writer
             create(Path p, Configuration conf) throws IOException {
     return create(p, conf, false);
   }
@@ -962,7 +961,7 @@
    * Reader implementation for LobFile format version 0. Acquire with
    * LobFile.open().
    */
-  private static class V0Reader extends com.cloudera.sqoop.io.LobFile.Reader {
+  private static class V0Reader extends org.apache.sqoop.io.LobFile.Reader {
     public static final Log LOG = LogFactory.getLog(
         V0Reader.class.getName());
 
@@ -1523,7 +1522,7 @@
    * Concrete writer implementation for LobFile format version 0.
    * Instantiate via LobFile.create().
    */
-  private static class V0Writer extends com.cloudera.sqoop.io.LobFile.Writer {
+  private static class V0Writer extends org.apache.sqoop.io.LobFile.Writer {
     public static final Log LOG = LogFactory.getLog(
         V0Writer.class.getName());
 
diff --git a/src/java/org/apache/sqoop/io/LobReaderCache.java b/src/java/org/apache/sqoop/io/LobReaderCache.java
index dbfa4f1..5c61906 100644
--- a/src/java/org/apache/sqoop/io/LobReaderCache.java
+++ b/src/java/org/apache/sqoop/io/LobReaderCache.java
@@ -26,7 +26,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 
-import com.cloudera.sqoop.io.LobFile;
+import org.apache.sqoop.io.LobFile;
 import org.apache.sqoop.util.FileSystemUtil;
 
 /**
@@ -44,6 +44,30 @@
 
   private Map<Path, LobFile.Reader> readerMap;
 
+  private static final LobReaderCache CACHE;
+  static {
+    CACHE = new LobReaderCache();
+  }
+
+  /**
+   * @return the singleton LobReaderCache instance.
+   */
+  public static LobReaderCache getCache() {
+    return CACHE;
+  }
+
+  /**
+   * Created a fully-qualified path object.
+   * @param path the path to fully-qualify with its fs URI.
+   * @param conf the current Hadoop FS configuration.
+   * @return a new path representing the same location as the input 'path',
+   * but with a fully-qualified URI.
+   */
+  public static Path qualify(Path path, Configuration conf)
+      throws IOException {
+    return org.apache.sqoop.util.FileSystemUtil.makeQualified(path, conf);
+  }
+
   /**
    * Open a LobFile for read access, returning a cached reader if one is
    * available, or a new reader otherwise.
diff --git a/src/java/org/apache/sqoop/lib/BlobRef.java b/src/java/org/apache/sqoop/lib/BlobRef.java
index bff6b71..97a8276 100644
--- a/src/java/org/apache/sqoop/lib/BlobRef.java
+++ b/src/java/org/apache/sqoop/lib/BlobRef.java
@@ -29,14 +29,14 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.BytesWritable;
 
-import com.cloudera.sqoop.io.LobFile;
+import org.apache.sqoop.io.LobFile;
 
 /**
  * BlobRef is a wrapper that holds a BLOB either directly, or a
  * reference to a file that holds the BLOB data.
  */
 public class BlobRef extends
-  com.cloudera.sqoop.lib.LobRef<byte[], BytesWritable, InputStream> {
+    LobRef<byte[], BytesWritable, InputStream> {
 
   public static final Log LOG = LogFactory.getLog(BlobRef.class.getName());
 
@@ -107,7 +107,7 @@
    * @return a new BlobRef containing a reference to an external BLOB, or
    * an empty BlobRef if the data to be parsed is actually inline.
    */
-  public static com.cloudera.sqoop.lib.BlobRef parse(String inputString) {
+  public static org.apache.sqoop.lib.BlobRef parse(String inputString) {
     // If inputString is of the form 'externalLob(lf,%s,%d,%d)', then this is
     // an external BLOB stored at the LobFile indicated by '%s' with the next
     // two arguments representing its offset and length in the file.
@@ -118,13 +118,13 @@
     if (m.matches()) {
       // This is a LobFile. Extract the filename, offset and len from the
       // matcher.
-      return new com.cloudera.sqoop.lib.BlobRef(m.group(1),
+      return new org.apache.sqoop.lib.BlobRef(m.group(1),
           Long.valueOf(m.group(2)), Long.valueOf(m.group(3)));
     } else {
       // This is inline BLOB string data.
       LOG.warn(
           "Reparsing inline BLOB data is not supported; use SequenceFiles.");
-      return new com.cloudera.sqoop.lib.BlobRef();
+      return new org.apache.sqoop.lib.BlobRef();
     }
   }
 }
diff --git a/src/java/org/apache/sqoop/lib/ClobRef.java b/src/java/org/apache/sqoop/lib/ClobRef.java
index efe468a..3fd822d 100644
--- a/src/java/org/apache/sqoop/lib/ClobRef.java
+++ b/src/java/org/apache/sqoop/lib/ClobRef.java
@@ -26,14 +26,14 @@
 
 import org.apache.hadoop.io.Text;
 
-import com.cloudera.sqoop.io.LobFile;
+import org.apache.sqoop.io.LobFile;
 
 /**
  * ClobRef is a wrapper that holds a CLOB either directly, or a
  * reference to a file that holds the CLOB data.
  */
 public class ClobRef
-  extends com.cloudera.sqoop.lib.LobRef<String, String, Reader> {
+  extends LobRef<String, String, Reader> {
 
   public ClobRef() {
     super();
@@ -90,7 +90,7 @@
    * @param inputString the text-based input data to parse.
    * @return a ClobRef to the given data.
    */
-  public static com.cloudera.sqoop.lib.ClobRef parse(String inputString) {
+  public static org.apache.sqoop.lib.ClobRef parse(String inputString) {
     // If inputString is of the form 'externalLob(lf,%s,%d,%d)', then this is
     // an external CLOB stored at the LobFile indicated by '%s' with the next
     // two arguments representing its offset and length in the file.
@@ -101,11 +101,11 @@
     if (m.matches()) {
       // This is a LobFile. Extract the filename, offset and len from the
       // matcher.
-      return new com.cloudera.sqoop.lib.ClobRef(m.group(1),
+      return new org.apache.sqoop.lib.ClobRef(m.group(1),
           Long.valueOf(m.group(2)), Long.valueOf(m.group(3)));
     } else {
       // This is inline CLOB string data.
-      return new com.cloudera.sqoop.lib.ClobRef(inputString);
+      return new org.apache.sqoop.lib.ClobRef(inputString);
     }
   }
 }
diff --git a/src/java/org/apache/sqoop/lib/DelimiterSet.java b/src/java/org/apache/sqoop/lib/DelimiterSet.java
index d76890e..36c8e53 100644
--- a/src/java/org/apache/sqoop/lib/DelimiterSet.java
+++ b/src/java/org/apache/sqoop/lib/DelimiterSet.java
@@ -60,6 +60,21 @@
       "sqoop.input.escaped.by";
   public static final String INPUT_ENCLOSE_REQUIRED_KEY =
       "sqoop.input.enclose.required";
+
+  // Static delimiter sets for the commonly-used delimiter arrangements.
+
+  public static final DelimiterSet DEFAULT_DELIMITERS;
+  public static final DelimiterSet HIVE_DELIMITERS;
+  public static final DelimiterSet MYSQL_DELIMITERS;
+
+  static {
+    DEFAULT_DELIMITERS = new DelimiterSet(',', '\n', NULL_CHAR, NULL_CHAR,
+        false);
+    MYSQL_DELIMITERS = new DelimiterSet(',', '\n', '\'', '\\', false);
+    HIVE_DELIMITERS = new DelimiterSet('\001', '\n',
+        NULL_CHAR, NULL_CHAR, false);
+  }
+
   /**
    * Create a delimiter set with the default delimiters
    * (comma for fields, newline for records).
diff --git a/src/java/org/apache/sqoop/lib/FieldFormatter.java b/src/java/org/apache/sqoop/lib/FieldFormatter.java
index a6b055b..ba8d18e 100644
--- a/src/java/org/apache/sqoop/lib/FieldFormatter.java
+++ b/src/java/org/apache/sqoop/lib/FieldFormatter.java
@@ -36,8 +36,7 @@
    * @param delimiters
    * @return
    */
-  public static String hiveStringDropDelims(String str,
-      com.cloudera.sqoop.lib.DelimiterSet delimiters) {
+  public static String hiveStringDropDelims(String str, DelimiterSet delimiters) {
     return hiveStringReplaceDelims(str, "", delimiters);
   }
 
@@ -48,8 +47,7 @@
    * @param delimiters
    * @return
    */
-  public static String hiveStringReplaceDelims(String str, String replacement,
-      com.cloudera.sqoop.lib.DelimiterSet delimiters) {
+  public static String hiveStringReplaceDelims(String str, String replacement, DelimiterSet delimiters) {
     String droppedDelims = REPLACE_PATTERN.matcher(str).replaceAll(replacement);
     return escapeAndEnclose(droppedDelims, delimiters);
   }
@@ -75,16 +73,14 @@
    * '\000', those operations are not performed.
    * @return the escaped, enclosed version of 'str'.
    */
-  public static String escapeAndEnclose(String str,
-      com.cloudera.sqoop.lib.DelimiterSet delimiters) {
+  public static String escapeAndEnclose(String str, DelimiterSet delimiters) {
 
     char escape = delimiters.getEscapedBy();
     char enclose = delimiters.getEnclosedBy();
     boolean encloseRequired = delimiters.isEncloseRequired();
 
     // true if we can use an escape character.
-    boolean escapingLegal =
-        com.cloudera.sqoop.lib.DelimiterSet.NULL_CHAR != escape;
+    boolean escapingLegal = DelimiterSet.NULL_CHAR != escape;
     String withEscapes;
 
     if (null == str) {
@@ -99,7 +95,7 @@
       withEscapes = str;
     }
 
-    if (com.cloudera.sqoop.lib.DelimiterSet.NULL_CHAR == enclose) {
+    if (DelimiterSet.NULL_CHAR == enclose) {
       // The enclose-with character was left unset, so we can't enclose items.
 
       if (escapingLegal) {
diff --git a/src/java/org/apache/sqoop/lib/FieldMapProcessor.java b/src/java/org/apache/sqoop/lib/FieldMapProcessor.java
index 6a4ade9..4885571 100644
--- a/src/java/org/apache/sqoop/lib/FieldMapProcessor.java
+++ b/src/java/org/apache/sqoop/lib/FieldMapProcessor.java
@@ -19,9 +19,6 @@
 
 import java.io.IOException;
 
-import com.cloudera.sqoop.lib.FieldMappable;
-import com.cloudera.sqoop.lib.ProcessingException;
-
 /**
  * Interface implemented by classes that process FieldMappable objects.
  */
diff --git a/src/java/org/apache/sqoop/lib/JdbcWritableBridge.java b/src/java/org/apache/sqoop/lib/JdbcWritableBridge.java
index afde585..d9628dc 100644
--- a/src/java/org/apache/sqoop/lib/JdbcWritableBridge.java
+++ b/src/java/org/apache/sqoop/lib/JdbcWritableBridge.java
@@ -27,9 +27,6 @@
 
 import org.apache.hadoop.io.BytesWritable;
 
-import com.cloudera.sqoop.lib.BlobRef;
-import com.cloudera.sqoop.lib.ClobRef;
-
 /**
  * Contains a set of methods which can read db columns from a ResultSet into
  * Java types, and do serialization of these types to/from DataInput/DataOutput
@@ -242,13 +239,13 @@
     }
   }
 
-  public static void writeBlobRef(com.cloudera.sqoop.lib.BlobRef val,
+  public static void writeBlobRef(org.apache.sqoop.lib.BlobRef val,
       int paramIdx, int sqlType, PreparedStatement s) throws SQLException {
     // TODO: support this.
     throw new RuntimeException("Unsupported: Cannot export BLOB data");
   }
 
-  public static void writeClobRef(com.cloudera.sqoop.lib.ClobRef val,
+  public static void writeClobRef(org.apache.sqoop.lib.ClobRef val,
       int paramIdx, int sqlType, PreparedStatement s) throws SQLException {
     // TODO: support this.
     throw new RuntimeException("Unsupported: Cannot export CLOB data");
diff --git a/src/java/org/apache/sqoop/lib/LargeObjectLoader.java b/src/java/org/apache/sqoop/lib/LargeObjectLoader.java
index b8525fe..8cfee02 100644
--- a/src/java/org/apache/sqoop/lib/LargeObjectLoader.java
+++ b/src/java/org/apache/sqoop/lib/LargeObjectLoader.java
@@ -36,8 +36,8 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import com.cloudera.sqoop.io.LobFile;
-import com.cloudera.sqoop.util.TaskId;
+import org.apache.sqoop.io.LobFile;
+import org.apache.sqoop.util.TaskId;
 
 /**
  * Contains a set of methods which can read db columns from a ResultSet into
@@ -228,7 +228,7 @@
    * @throws IOException if an error occurs writing to the FileSystem.
    * @throws SQLException if an error occurs reading from the database.
    */
-  public com.cloudera.sqoop.lib.BlobRef readBlobRef(int colNum, ResultSet r)
+  public org.apache.sqoop.lib.BlobRef readBlobRef(int colNum, ResultSet r)
       throws IOException, InterruptedException, SQLException {
 
     long maxInlineLobLen = conf.getLong(
@@ -262,11 +262,11 @@
         lobWriter.finishRecord();
       }
 
-      return new com.cloudera.sqoop.lib.BlobRef(
+      return new org.apache.sqoop.lib.BlobRef(
           getRelativePath(curBlobWriter), recordOffset, len);
     } else {
       // This is a 1-based array.
-      return new com.cloudera.sqoop.lib.BlobRef(
+      return new org.apache.sqoop.lib.BlobRef(
           b.getBytes(1, (int) b.length()));
     }
   }
@@ -282,7 +282,7 @@
    * @throws IOException if an error occurs writing to the FileSystem.
    * @throws SQLException if an error occurs reading from the database.
    */
-  public com.cloudera.sqoop.lib.ClobRef readClobRef(int colNum, ResultSet r)
+  public org.apache.sqoop.lib.ClobRef readClobRef(int colNum, ResultSet r)
       throws IOException, InterruptedException, SQLException {
 
     long maxInlineLobLen = conf.getLong(
@@ -316,11 +316,11 @@
         lobWriter.finishRecord();
       }
 
-      return new com.cloudera.sqoop.lib.ClobRef(
+      return new org.apache.sqoop.lib.ClobRef(
           getRelativePath(lobWriter), recordOffset, len);
     } else {
       // This is a 1-based array.
-      return new com.cloudera.sqoop.lib.ClobRef(
+      return new org.apache.sqoop.lib.ClobRef(
           c.getSubString(1, (int) c.length()));
     }
   }
diff --git a/src/java/org/apache/sqoop/lib/LobRef.java b/src/java/org/apache/sqoop/lib/LobRef.java
index d6d6b25..4abddc4 100644
--- a/src/java/org/apache/sqoop/lib/LobRef.java
+++ b/src/java/org/apache/sqoop/lib/LobRef.java
@@ -34,8 +34,8 @@
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 
-import com.cloudera.sqoop.io.LobFile;
-import com.cloudera.sqoop.io.LobReaderCache;
+import org.apache.sqoop.io.LobFile;
+import org.apache.sqoop.io.LobReaderCache;
 
 /**
  * Abstract base class that holds a reference to a Blob or a Clob.
diff --git a/src/java/org/apache/sqoop/lib/LobSerializer.java b/src/java/org/apache/sqoop/lib/LobSerializer.java
index a30ffe7..4702356 100644
--- a/src/java/org/apache/sqoop/lib/LobSerializer.java
+++ b/src/java/org/apache/sqoop/lib/LobSerializer.java
@@ -29,25 +29,25 @@
   private LobSerializer() { }
 
   public static void writeClob(
-      com.cloudera.sqoop.lib.ClobRef clob, DataOutput out) throws IOException {
+      org.apache.sqoop.lib.ClobRef clob, DataOutput out) throws IOException {
     clob.write(out);
   }
 
   public static void writeBlob(
-      com.cloudera.sqoop.lib.BlobRef blob, DataOutput out) throws IOException {
+      org.apache.sqoop.lib.BlobRef blob, DataOutput out) throws IOException {
     blob.write(out);
   }
 
-  public static com.cloudera.sqoop.lib.ClobRef readClobFields(
+  public static org.apache.sqoop.lib.ClobRef readClobFields(
       DataInput in) throws IOException {
-    com.cloudera.sqoop.lib.ClobRef clob = new com.cloudera.sqoop.lib.ClobRef();
+    org.apache.sqoop.lib.ClobRef clob = new org.apache.sqoop.lib.ClobRef();
     clob.readFields(in);
     return clob;
   }
 
-  public static com.cloudera.sqoop.lib.BlobRef readBlobFields(
+  public static org.apache.sqoop.lib.BlobRef readBlobFields(
       DataInput in) throws IOException {
-    com.cloudera.sqoop.lib.BlobRef blob = new com.cloudera.sqoop.lib.BlobRef();
+    org.apache.sqoop.lib.BlobRef blob = new org.apache.sqoop.lib.BlobRef();
     blob.readFields(in);
     return blob;
   }
diff --git a/src/java/org/apache/sqoop/lib/RecordParser.java b/src/java/org/apache/sqoop/lib/RecordParser.java
index 7c29151..e641aff 100644
--- a/src/java/org/apache/sqoop/lib/RecordParser.java
+++ b/src/java/org/apache/sqoop/lib/RecordParser.java
@@ -84,11 +84,11 @@
     }
   }
 
-  private com.cloudera.sqoop.lib.DelimiterSet delimiters;
+  private DelimiterSet delimiters;
   private ArrayList<String> outputs;
 
 
-  public RecordParser(final com.cloudera.sqoop.lib.DelimiterSet delimitersIn) {
+  public RecordParser(final DelimiterSet delimitersIn) {
     this.delimiters = delimitersIn.copy();
     this.outputs = new ArrayList<String>();
   }
@@ -99,9 +99,9 @@
    * next call to parseRecord().
    */
   public List<String> parseRecord(CharSequence input)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError {
+      throws org.apache.sqoop.lib.RecordParser.ParseError {
     if (null == input) {
-      throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+      throw new org.apache.sqoop.lib.RecordParser.ParseError(
           "null input string");
     }
 
@@ -114,9 +114,9 @@
    * next call to parseRecord().
    */
   public List<String> parseRecord(Text input)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError {
+      throws org.apache.sqoop.lib.RecordParser.ParseError {
     if (null == input) {
-      throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+      throw new org.apache.sqoop.lib.RecordParser.ParseError(
           "null input string");
     }
 
@@ -131,9 +131,9 @@
    * next call to parseRecord().
    */
   public List<String> parseRecord(byte [] input)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError {
+      throws org.apache.sqoop.lib.RecordParser.ParseError {
     if (null == input) {
-      throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+      throw new org.apache.sqoop.lib.RecordParser.ParseError(
           "null input string");
     }
 
@@ -146,9 +146,9 @@
    * next call to parseRecord().
    */
   public List<String> parseRecord(char [] input)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError {
+      throws org.apache.sqoop.lib.RecordParser.ParseError {
     if (null == input) {
-      throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+      throw new org.apache.sqoop.lib.RecordParser.ParseError(
           "null input string");
     }
 
@@ -156,9 +156,9 @@
   }
 
   public List<String> parseRecord(ByteBuffer input)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError {
+      throws org.apache.sqoop.lib.RecordParser.ParseError {
     if (null == input) {
-      throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+      throw new org.apache.sqoop.lib.RecordParser.ParseError(
           "null input string");
     }
 
@@ -173,9 +173,9 @@
    * next call to parseRecord().
    */
   public List<String> parseRecord(CharBuffer input)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError {
+      throws org.apache.sqoop.lib.RecordParser.ParseError {
     if (null == input) {
-      throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+      throw new org.apache.sqoop.lib.RecordParser.ParseError(
           "null input string");
     }
 
@@ -218,7 +218,7 @@
         add charater literal to current string, return to UNENCLOSED_FIELD
     */
 
-    char curChar = com.cloudera.sqoop.lib.DelimiterSet.NULL_CHAR;
+    char curChar = DelimiterSet.NULL_CHAR;
     ParseState state = ParseState.FIELD_START;
     int len = input.length();
     StringBuilder sb = null;
@@ -259,7 +259,7 @@
           sb.append(curChar);
 
           if (enclosingRequired) {
-            throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+            throw new org.apache.sqoop.lib.RecordParser.ParseError(
                 "Opening field-encloser expected at position " + pos);
           }
         }
@@ -316,7 +316,7 @@
           pos = len;
         } else {
           // Don't know what to do with this character.
-          throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+          throw new org.apache.sqoop.lib.RecordParser.ParseError(
               "Expected delimiter at position " + pos);
         }
 
@@ -330,7 +330,7 @@
         break;
 
       default:
-        throw new com.cloudera.sqoop.lib.RecordParser.ParseError(
+        throw new org.apache.sqoop.lib.RecordParser.ParseError(
             "Unexpected parser state: " + state);
       }
     }
diff --git a/src/java/org/apache/sqoop/lib/SqoopRecord.java b/src/java/org/apache/sqoop/lib/SqoopRecord.java
index 9badc71..1ee8ac5 100644
--- a/src/java/org/apache/sqoop/lib/SqoopRecord.java
+++ b/src/java/org/apache/sqoop/lib/SqoopRecord.java
@@ -32,26 +32,26 @@
  * Interface implemented by the classes generated by sqoop's orm.ClassWriter.
  */
 public abstract class SqoopRecord implements Cloneable, DBWritable,
-    com.cloudera.sqoop.lib.FieldMappable, Writable  {
+    org.apache.sqoop.lib.FieldMappable, Writable  {
 
   public SqoopRecord() {
   }
 
 
   public abstract void parse(CharSequence s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(Text s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(byte [] s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(char [] s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(ByteBuffer s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void parse(CharBuffer s)
-      throws com.cloudera.sqoop.lib.RecordParser.ParseError;
+      throws org.apache.sqoop.lib.RecordParser.ParseError;
   public abstract void loadLargeObjects(
-      com.cloudera.sqoop.lib.LargeObjectLoader objLoader)
+      org.apache.sqoop.lib.LargeObjectLoader objLoader)
       throws SQLException, IOException, InterruptedException;
 
   /**
@@ -65,8 +65,7 @@
   /**
    * Format output data according to the specified delimiters.
    */
-  public abstract String toString(
-      com.cloudera.sqoop.lib.DelimiterSet delimiters);
+  public abstract String toString(DelimiterSet delimiters);
 
   /**
    * Use the default delimiters, but only append an end-of-record delimiter
@@ -91,8 +90,7 @@
    * use with TextOutputFormat, calling this with useRecordDelim=false may
    * make more sense.
    */
-  public String toString(
-      com.cloudera.sqoop.lib.DelimiterSet delimiters, boolean useRecordDelim) {
+  public String toString(DelimiterSet delimiters, boolean useRecordDelim) {
     if (useRecordDelim) {
       return toString(delimiters);
     } else {
@@ -123,11 +121,11 @@
    * @param processor A delegate that operates on this object.
    * @throws IOException if the processor encounters an IO error when
    * operating on this object.
-   * @throws com.cloudera.sqoop.lib.ProcessingException if the FieldMapProcessor
+   * @throws org.apache.sqoop.lib.ProcessingException if the FieldMapProcessor
    * encounters a general processing error when operating on this object.
    */
-  public void delegate(com.cloudera.sqoop.lib.FieldMapProcessor processor)
-      throws IOException, com.cloudera.sqoop.lib.ProcessingException {
+  public void delegate(org.apache.sqoop.lib.FieldMapProcessor processor)
+      throws IOException, org.apache.sqoop.lib.ProcessingException {
     processor.accept(this);
   }
 
diff --git a/src/java/org/apache/sqoop/manager/CatalogQueryManager.java b/src/java/org/apache/sqoop/manager/CatalogQueryManager.java
index 7b2ee78..03500bb 100644
--- a/src/java/org/apache/sqoop/manager/CatalogQueryManager.java
+++ b/src/java/org/apache/sqoop/manager/CatalogQueryManager.java
@@ -28,7 +28,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
@@ -36,7 +36,7 @@
  * (instead of metadata calls) to retrieve information.
  */
 public abstract class CatalogQueryManager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
     CatalogQueryManager.class.getName());
diff --git a/src/java/org/apache/sqoop/manager/ConnManager.java b/src/java/org/apache/sqoop/manager/ConnManager.java
index 1811ce0..d88b59b 100644
--- a/src/java/org/apache/sqoop/manager/ConnManager.java
+++ b/src/java/org/apache/sqoop/manager/ConnManager.java
@@ -40,13 +40,13 @@
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.hive.HiveTypes;
-import com.cloudera.sqoop.lib.BlobRef;
-import com.cloudera.sqoop.lib.ClobRef;
-import com.cloudera.sqoop.manager.SqlManager;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.hive.HiveTypes;
+import org.apache.sqoop.lib.BlobRef;
+import org.apache.sqoop.lib.ClobRef;
+import org.apache.sqoop.manager.SqlManager;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Abstract interface that manages connections to a database.
@@ -579,13 +579,13 @@
    * Perform an import of a table from the database into HDFS.
    */
   public abstract void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException;
 
   /**
    * Perform an import of a free-form query from the database into HDFS.
    */
-  public void importQuery(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importQuery(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     throw new ImportException(
         "This database only supports table-based imports.");
@@ -649,7 +649,7 @@
    * Export data stored in HDFS into a table in a database.
    * This inserts new rows into the target table.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("This database does not support exports");
   }
@@ -658,7 +658,7 @@
    * Export data stored in HDFS into a table in a database. This calls a stored
    * procedure to insert rows into the target table.
    */
-  public void callTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void callTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("This database does not support exports "
         + "using stored procedures");
@@ -669,7 +669,7 @@
    * This updates existing rows in the target table, based on the
    * updateKeyCol specified in the context's SqoopOptions.
    */
-  public void updateTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void updateTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("This database does not support updates");
   }
@@ -679,7 +679,7 @@
    * This may update or insert rows into the target table depending on
    * whether rows already exist in the target table or not.
    */
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("Mixed update/insert is not supported"
         + " against the target database yet");
diff --git a/src/java/org/apache/sqoop/manager/CubridManager.java b/src/java/org/apache/sqoop/manager/CubridManager.java
index 73b91d0..e27f616 100644
--- a/src/java/org/apache/sqoop/manager/CubridManager.java
+++ b/src/java/org/apache/sqoop/manager/CubridManager.java
@@ -28,18 +28,18 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.sqoop.mapreduce.cubrid.CubridUpsertOutputFormat;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.ExportBatchOutputFormat;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpsertExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.ExportBatchOutputFormat;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpsertExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages connections to CUBRID databases.
  */
 public class CubridManager extends
-    com.cloudera.sqoop.manager.CatalogQueryManager {
+    CatalogQueryManager {
 
   public static final Log LOG = LogFactory
       .getLog(CubridManager.class.getName());
@@ -50,7 +50,7 @@
 
   @Override
   public void importTable(
-      com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     // Then run the normal importTable() method.
@@ -61,7 +61,7 @@
    * Export data stored in HDFS into a table in a database.
    */
   public void exportTable(
-      com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context, null, null,
@@ -75,7 +75,7 @@
    */
   @Override
   public void upsertTable(
-      com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
 
diff --git a/src/java/org/apache/sqoop/manager/Db2Manager.java b/src/java/org/apache/sqoop/manager/Db2Manager.java
index 7525521..7ff68ce 100644
--- a/src/java/org/apache/sqoop/manager/Db2Manager.java
+++ b/src/java/org/apache/sqoop/manager/Db2Manager.java
@@ -37,18 +37,18 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.sqoop.mapreduce.db.Db2DataDrivenDBInputFormat;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.ExportBatchOutputFormat;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.ExportBatchOutputFormat;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
  * Manages connections to DB2 databases. Requires the DB2 JDBC driver.
  */
 public class Db2Manager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
       Db2Manager.class.getName());
@@ -95,7 +95,7 @@
    */
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     context.setConnManager(this);
     // Specify the DB2-specific DBInputFormat for import.
@@ -107,7 +107,7 @@
    * Export data stored in HDFS into a table in a database.
    */
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context, null, null,
diff --git a/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java b/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java
index 8cc9285..b075e0d 100644
--- a/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java
+++ b/src/java/org/apache/sqoop/manager/DefaultManagerFactory.java
@@ -21,9 +21,8 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.metastore.JobData;
 
 import static org.apache.sqoop.manager.SupportedManagers.CUBRID;
 import static org.apache.sqoop.manager.SupportedManagers.DB2;
@@ -41,7 +40,7 @@
  * shipped and enabled by default in Sqoop.
  */
 public class DefaultManagerFactory
-    extends com.cloudera.sqoop.manager.ManagerFactory {
+    extends org.apache.sqoop.manager.ManagerFactory {
 
   public static final Log LOG = LogFactory.getLog(
       DefaultManagerFactory.class.getName());
diff --git a/src/java/org/apache/sqoop/manager/DirectMySQLManager.java b/src/java/org/apache/sqoop/manager/DirectMySQLManager.java
index c984a32..c3280af 100644
--- a/src/java/org/apache/sqoop/manager/DirectMySQLManager.java
+++ b/src/java/org/apache/sqoop/manager/DirectMySQLManager.java
@@ -22,18 +22,18 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.MySQLDumpImportJob;
-import com.cloudera.sqoop.mapreduce.MySQLExportJob;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.MySQLDumpImportJob;
+import org.apache.sqoop.mapreduce.MySQLExportJob;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.ExportException;
 
 /**
  * Manages direct connections to MySQL databases
  * so we can use mysqldump to get really fast dumps.
  */
 public class DirectMySQLManager
-    extends com.cloudera.sqoop.manager.MySQLManager {
+    extends MySQLManager {
 
   public static final Log LOG = LogFactory.getLog(
       DirectMySQLManager.class.getName());
@@ -47,7 +47,7 @@
    * the database and upload the files directly to HDFS.
    */
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     context.setConnManager(this);
@@ -97,14 +97,14 @@
    * back into the database.
    */
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     MySQLExportJob exportJob = new MySQLExportJob(context);
     exportJob.runExport();
   }
 
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     throw new ExportException("MySQL direct connector does not support upsert"
       + " mode. Please use JDBC based connector (remove --direct parameter)");
diff --git a/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java b/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java
index af15824..39a14f3 100644
--- a/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java
+++ b/src/java/org/apache/sqoop/manager/DirectNetezzaManager.java
@@ -37,10 +37,10 @@
 import org.apache.sqoop.mapreduce.netezza.NetezzaExternalTableExportJob;
 import org.apache.sqoop.mapreduce.netezza.NetezzaExternalTableImportJob;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages direct mode transfers from Netezza databases using the external table
@@ -184,7 +184,7 @@
   /**
    * Export data stored in HDFS into a table in a database.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     options = context.getOptions();
     context.setConnManager(this);
@@ -214,7 +214,7 @@
    * data from the database and upload the files directly to HDFS.
    */
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     context.setConnManager(this);
diff --git a/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java b/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java
index 63b0704..c05e1c1 100644
--- a/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java
+++ b/src/java/org/apache/sqoop/manager/DirectPostgresqlManager.java
@@ -42,19 +42,19 @@
 import org.apache.sqoop.util.PostgreSQLUtils;
 import org.apache.sqoop.util.SubstitutionUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.io.SplittableBufferedWriter;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.util.AsyncSink;
-import com.cloudera.sqoop.util.DirectImportUtils;
-import com.cloudera.sqoop.util.ErrorableAsyncSink;
-import com.cloudera.sqoop.util.ErrorableThread;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.Executor;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.JdbcUrl;
-import com.cloudera.sqoop.util.LoggingAsyncSink;
-import com.cloudera.sqoop.util.PerfCounters;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.io.SplittableBufferedWriter;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.util.AsyncSink;
+import org.apache.sqoop.util.DirectImportUtils;
+import org.apache.sqoop.util.ErrorableAsyncSink;
+import org.apache.sqoop.util.ErrorableThread;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.Executor;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.JdbcUrl;
+import org.apache.sqoop.util.LoggingAsyncSink;
+import org.apache.sqoop.util.PerfCounters;
 
 
 /**
@@ -62,7 +62,7 @@
  * commands.
  */
 public class DirectPostgresqlManager
-    extends com.cloudera.sqoop.manager.PostgresqlManager {
+    extends PostgresqlManager {
 
   public static final Log LOG = LogFactory.getLog(
       DirectPostgresqlManager.class.getName());
@@ -345,7 +345,7 @@
    * Import the table into HDFS by using psql to pull the data out of the db
    * via COPY FILE TO STDOUT.
    */
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
     throws IOException, ImportException {
 
     context.setConnManager(this);
diff --git a/src/java/org/apache/sqoop/manager/ExportJobContext.java b/src/java/org/apache/sqoop/manager/ExportJobContext.java
index 2a6f2b5..773cf74 100644
--- a/src/java/org/apache/sqoop/manager/ExportJobContext.java
+++ b/src/java/org/apache/sqoop/manager/ExportJobContext.java
@@ -21,7 +21,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.util.Jars;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 
 /**
  * A set of parameters describing an export operation; this is passed to
diff --git a/src/java/org/apache/sqoop/manager/GenericJdbcManager.java b/src/java/org/apache/sqoop/manager/GenericJdbcManager.java
index f38bcc5..b88f0b8 100644
--- a/src/java/org/apache/sqoop/manager/GenericJdbcManager.java
+++ b/src/java/org/apache/sqoop/manager/GenericJdbcManager.java
@@ -29,7 +29,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.cli.RelatedOptions;
 
 /**
@@ -38,7 +38,7 @@
  * class to load.
  */
 public class GenericJdbcManager
-    extends com.cloudera.sqoop.manager.SqlManager {
+    extends SqlManager {
 
   public static final Log LOG = LogFactory.getLog(
       GenericJdbcManager.class.getName());
diff --git a/src/java/org/apache/sqoop/manager/HsqldbManager.java b/src/java/org/apache/sqoop/manager/HsqldbManager.java
index 92b7d53..1fa58ee 100644
--- a/src/java/org/apache/sqoop/manager/HsqldbManager.java
+++ b/src/java/org/apache/sqoop/manager/HsqldbManager.java
@@ -23,9 +23,9 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.AsyncSqlOutputFormat;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.AsyncSqlOutputFormat;
+import org.apache.sqoop.util.ExportException;
 import java.io.IOException;
 
 /**
@@ -33,7 +33,7 @@
  * Extends generic SQL manager.
  */
 public class HsqldbManager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
       HsqldbManager.class.getName());
@@ -83,7 +83,7 @@
 
   @Override
   /** {@inheritDoc} */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     // HSQLDB does not support multi-row inserts; disable that before export.
     context.getOptions().getConf().setInt(
diff --git a/src/java/org/apache/sqoop/manager/ImportJobContext.java b/src/java/org/apache/sqoop/manager/ImportJobContext.java
index 354cd15..4337865 100644
--- a/src/java/org/apache/sqoop/manager/ImportJobContext.java
+++ b/src/java/org/apache/sqoop/manager/ImportJobContext.java
@@ -19,8 +19,8 @@
 package org.apache.sqoop.manager;
 
 import org.apache.hadoop.mapreduce.InputFormat;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.SqoopOptions;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
diff --git a/src/java/org/apache/sqoop/manager/InformationSchemaManager.java b/src/java/org/apache/sqoop/manager/InformationSchemaManager.java
index 29be2e3..d582b27 100644
--- a/src/java/org/apache/sqoop/manager/InformationSchemaManager.java
+++ b/src/java/org/apache/sqoop/manager/InformationSchemaManager.java
@@ -21,14 +21,14 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 
 /**
  * Database manager that queries "information schema" directly
  * (instead of metadata calls) to retrieve information.
  */
 public abstract class InformationSchemaManager
-    extends com.cloudera.sqoop.manager.CatalogQueryManager {
+    extends CatalogQueryManager {
 
   public static final Log LOG = LogFactory.getLog(
     InformationSchemaManager.class.getName());
diff --git a/src/java/org/apache/sqoop/manager/MainframeManager.java b/src/java/org/apache/sqoop/manager/MainframeManager.java
index 101f3ce..a6002ef 100644
--- a/src/java/org/apache/sqoop/manager/MainframeManager.java
+++ b/src/java/org/apache/sqoop/manager/MainframeManager.java
@@ -37,14 +37,14 @@
 import org.apache.sqoop.mapreduce.mainframe.MainframeDatasetInputFormat;
 import org.apache.sqoop.mapreduce.mainframe.MainframeImportJob;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ImportException;
 
 
 /**
  * ConnManager implementation for mainframe datasets.
  */
-public class MainframeManager extends com.cloudera.sqoop.manager.ConnManager {
+public class MainframeManager extends org.apache.sqoop.manager.ConnManager {
   public static final String DEFAULT_DATASET_COLUMN_NAME = "DEFAULT_COLUMN";
   protected SqoopOptions options;
   private static final Log LOG
@@ -63,7 +63,7 @@
    * partitioned dataset with MainframeDatasetInputFormat.
    */
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String pdsName = context.getTableName();
     String jarFile = context.getJarFile();
diff --git a/src/java/org/apache/sqoop/manager/ManagerFactory.java b/src/java/org/apache/sqoop/manager/ManagerFactory.java
index 23a2a10..693d3d2 100644
--- a/src/java/org/apache/sqoop/manager/ManagerFactory.java
+++ b/src/java/org/apache/sqoop/manager/ManagerFactory.java
@@ -18,9 +18,8 @@
 
 package org.apache.sqoop.manager;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.metastore.JobData;
 
 /**
  * Interface for factory classes for ConnManager implementations.
diff --git a/src/java/org/apache/sqoop/manager/MySQLManager.java b/src/java/org/apache/sqoop/manager/MySQLManager.java
index ba612e2..2d17707 100644
--- a/src/java/org/apache/sqoop/manager/MySQLManager.java
+++ b/src/java/org/apache/sqoop/manager/MySQLManager.java
@@ -38,10 +38,10 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.mapreduce.JdbcUpsertExportJob;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.mapreduce.JdbcUpsertExportJob;
 import org.apache.sqoop.mapreduce.mysql.MySQLUpsertOutputFormat;
 import org.apache.sqoop.util.LoggingUtils;
 
@@ -49,7 +49,7 @@
  * Manages connections to MySQL databases.
  */
 public class MySQLManager
-    extends com.cloudera.sqoop.manager.InformationSchemaManager {
+    extends InformationSchemaManager {
 
   public static final Log LOG = LogFactory.getLog(MySQLManager.class.getName());
 
@@ -100,7 +100,7 @@
   }
 
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
 
     // Check that we're not doing a MapReduce from localhost. If we are, point
@@ -128,7 +128,7 @@
    * {@inheritDoc}
    */
   @Override
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     LOG.warn("MySQL Connector upsert functionality is using INSERT ON");
diff --git a/src/java/org/apache/sqoop/manager/MySQLUtils.java b/src/java/org/apache/sqoop/manager/MySQLUtils.java
index ee22f17..b005c79 100644
--- a/src/java/org/apache/sqoop/manager/MySQLUtils.java
+++ b/src/java/org/apache/sqoop/manager/MySQLUtils.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.manager;
 
-import static com.cloudera.sqoop.lib.DelimiterSet.NULL_CHAR;
+import static org.apache.sqoop.lib.DelimiterSet.NULL_CHAR;
 
 import java.io.BufferedWriter;
 import java.io.File;
@@ -31,9 +31,9 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
 
-import com.cloudera.sqoop.config.ConfigurationConstants;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.util.DirectImportUtils;
+import org.apache.sqoop.config.ConfigurationConstants;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.util.DirectImportUtils;
 import org.apache.sqoop.lib.DelimiterSet;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
diff --git a/src/java/org/apache/sqoop/manager/NetezzaManager.java b/src/java/org/apache/sqoop/manager/NetezzaManager.java
index 8c21073..5f0f13f 100644
--- a/src/java/org/apache/sqoop/manager/NetezzaManager.java
+++ b/src/java/org/apache/sqoop/manager/NetezzaManager.java
@@ -33,11 +33,11 @@
 import org.apache.sqoop.mapreduce.AsyncSqlOutputFormat;
 import org.apache.sqoop.mapreduce.netezza.NetezzaDataDrivenDBInputFormat;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages connections to Netezza databases.
@@ -93,7 +93,7 @@
   }
 
   @Override
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     context.setConnManager(this);
     // The user probably should have requested --direct to invoke external
@@ -117,7 +117,7 @@
   }
 
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     // The user probably should have requested --direct to invoke external
     // table option.
@@ -152,7 +152,7 @@
   }
 
   @Override
-  public void updateTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void updateTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     if (options.getNumMappers() > 1) {
       String msg = "Netezza update with multiple mappers can lead to "
diff --git a/src/java/org/apache/sqoop/manager/OracleManager.java b/src/java/org/apache/sqoop/manager/OracleManager.java
index c0f5114..12613e3 100644
--- a/src/java/org/apache/sqoop/manager/OracleManager.java
+++ b/src/java/org/apache/sqoop/manager/OracleManager.java
@@ -49,22 +49,22 @@
 import org.apache.sqoop.manager.oracle.OracleUtils;
 import org.apache.sqoop.util.LoggingUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.UpdateMode;
-import com.cloudera.sqoop.mapreduce.ExportBatchOutputFormat;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpsertExportJob;
-import com.cloudera.sqoop.mapreduce.OracleUpsertOutputFormat;
-import com.cloudera.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.UpdateMode;
+import org.apache.sqoop.mapreduce.ExportBatchOutputFormat;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpsertExportJob;
+import org.apache.sqoop.mapreduce.OracleUpsertOutputFormat;
+import org.apache.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Manages connections to Oracle databases.
  * Requires the Oracle JDBC driver.
  */
 public class OracleManager
-    extends com.cloudera.sqoop.manager.GenericJdbcManager {
+    extends org.apache.sqoop.manager.GenericJdbcManager {
 
   public static final Log LOG = LogFactory.getLog(
       OracleManager.class.getName());
@@ -445,7 +445,7 @@
 
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     context.setConnManager(this);
     // Specify the Oracle-specific DBInputFormat for import.
@@ -456,7 +456,7 @@
   /**
    * Export data stored in HDFS into a table in a database.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context,
@@ -468,7 +468,7 @@
   /**
    * {@inheritDoc}
    */
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcUpsertExportJob exportJob =
diff --git a/src/java/org/apache/sqoop/manager/PGBulkloadManager.java b/src/java/org/apache/sqoop/manager/PGBulkloadManager.java
index 04e1443..1d50597 100644
--- a/src/java/org/apache/sqoop/manager/PGBulkloadManager.java
+++ b/src/java/org/apache/sqoop/manager/PGBulkloadManager.java
@@ -19,9 +19,8 @@
 package org.apache.sqoop.manager;
 
 import java.io.IOException;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ExportException;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
diff --git a/src/java/org/apache/sqoop/manager/PostgresqlManager.java b/src/java/org/apache/sqoop/manager/PostgresqlManager.java
index 29f7c7c..8c810cc 100644
--- a/src/java/org/apache/sqoop/manager/PostgresqlManager.java
+++ b/src/java/org/apache/sqoop/manager/PostgresqlManager.java
@@ -31,15 +31,15 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.ImportException;
 import org.apache.sqoop.cli.RelatedOptions;
 
 /**
  * Manages connections to Postgresql databases.
  */
 public class PostgresqlManager
-    extends com.cloudera.sqoop.manager.CatalogQueryManager {
+    extends CatalogQueryManager {
 
   public static final String SCHEMA = "schema";
 
@@ -109,7 +109,7 @@
 
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
         throws IOException, ImportException {
 
     // The user probably should have requested --direct to invoke pg_dump.
diff --git a/src/java/org/apache/sqoop/manager/SQLServerManager.java b/src/java/org/apache/sqoop/manager/SQLServerManager.java
index cc5a1b4..d57a493 100644
--- a/src/java/org/apache/sqoop/manager/SQLServerManager.java
+++ b/src/java/org/apache/sqoop/manager/SQLServerManager.java
@@ -36,11 +36,11 @@
 import org.apache.sqoop.mapreduce.db.SQLServerDBInputFormat;
 import org.apache.sqoop.mapreduce.db.SQLServerConnectionFailureHandler;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpdateExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 import org.apache.sqoop.cli.RelatedOptions;
 import org.apache.sqoop.mapreduce.sqlserver.SqlServerExportBatchOutputFormat;
@@ -52,7 +52,7 @@
  * driver.
  */
 public class SQLServerManager
-    extends com.cloudera.sqoop.manager.InformationSchemaManager {
+    extends InformationSchemaManager {
 
   public static final String SCHEMA = "schema";
   public static final String TABLE_HINTS = "table-hints";
@@ -134,7 +134,7 @@
    */
   @Override
   public void importTable(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     // We're the correct connection manager
     context.setConnManager(this);
@@ -165,7 +165,7 @@
    * Export data stored in HDFS into a table in a database.
    */
   @Override
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
 
@@ -195,7 +195,7 @@
    * {@inheritDoc}
    */
   public void updateTable(
-          com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     if (isNonResilientOperation()) {
       super.updateTable(context);
@@ -212,7 +212,7 @@
   /**
    * {@inheritDoc}
    */
-  public void upsertTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
 
@@ -389,7 +389,7 @@
    * SQLServerDBInputFormat which handles connection failures while
    * using free-form query importer.
    */
-  public void importQuery(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importQuery(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     if (!isNonResilientOperation()) {
       // Enable connection recovery only if split column is provided
@@ -408,7 +408,7 @@
    * SQLServerConnectionFailureHandler by default.
    */
   protected void configureConnectionRecoveryForImport(
-      com.cloudera.sqoop.manager.ImportJobContext context) {
+      org.apache.sqoop.manager.ImportJobContext context) {
 
     Configuration conf = context.getOptions().getConf();
 
@@ -430,7 +430,7 @@
    * using SQLServerConnectionFailureHandler by default.
    */
   protected void configureConnectionRecoveryForExport(
-      com.cloudera.sqoop.manager.ExportJobContext context) {
+      org.apache.sqoop.manager.ExportJobContext context) {
 
     Configuration conf = context.getOptions().getConf();
 
@@ -451,7 +451,7 @@
    * using SQLServerConnectionFailureHandler by default.
    */
   protected void configureConnectionRecoveryForUpdate(
-      com.cloudera.sqoop.manager.ExportJobContext context) {
+      org.apache.sqoop.manager.ExportJobContext context) {
 
     Configuration conf = context.getOptions().getConf();
 
diff --git a/src/java/org/apache/sqoop/manager/SqlManager.java b/src/java/org/apache/sqoop/manager/SqlManager.java
index 808e330..fe997c5 100644
--- a/src/java/org/apache/sqoop/manager/SqlManager.java
+++ b/src/java/org/apache/sqoop/manager/SqlManager.java
@@ -47,17 +47,17 @@
 import org.apache.sqoop.util.LoggingUtils;
 import org.apache.sqoop.util.SqlTypeMap;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.hbase.HBaseUtil;
-import com.cloudera.sqoop.mapreduce.DataDrivenImportJob;
-import com.cloudera.sqoop.mapreduce.HBaseImportJob;
-import com.cloudera.sqoop.mapreduce.ImportJobBase;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.util.ResultSetPrinter;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.hbase.HBaseUtil;
+import org.apache.sqoop.mapreduce.DataDrivenImportJob;
+import org.apache.sqoop.mapreduce.HBaseImportJob;
+import org.apache.sqoop.mapreduce.ImportJobBase;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpdateExportJob;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.util.ResultSetPrinter;
 
 /**
  * ConnManager implementation for generic SQL-compliant database.
@@ -65,7 +65,7 @@
  * ConnManager implementation to actually create the connection.
  */
 public abstract class SqlManager
-    extends com.cloudera.sqoop.manager.ConnManager {
+    extends org.apache.sqoop.manager.ConnManager {
 
   public static final Log LOG = LogFactory.getLog(SqlManager.class.getName());
 
@@ -628,7 +628,7 @@
    * @throws ImportException if the import is misconfigured.
    */
   protected void checkTableImportOptions(
-          com.cloudera.sqoop.manager.ImportJobContext context)
+      org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String tableName = context.getTableName();
     SqoopOptions opts = context.getOptions();
@@ -653,7 +653,7 @@
    * Default implementation of importTable() is to launch a MapReduce job
    * via DataDrivenImportJob to read the table with DataDrivenDBInputFormat.
    */
-  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importTable(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String tableName = context.getTableName();
     String jarFile = context.getJarFile();
@@ -697,7 +697,7 @@
    * via DataDrivenImportJob to read the table with DataDrivenDBInputFormat,
    * using its free-form query importer.
    */
-  public void importQuery(com.cloudera.sqoop.manager.ImportJobContext context)
+  public void importQuery(org.apache.sqoop.manager.ImportJobContext context)
       throws IOException, ImportException {
     String jarFile = context.getJarFile();
     SqoopOptions opts = context.getOptions();
@@ -924,7 +924,7 @@
   /**
    * Export data stored in HDFS into a table in a database.
    */
-  public void exportTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcExportJob exportJob = new JdbcExportJob(context);
@@ -932,7 +932,7 @@
   }
 
   @Override
-  public void callTable(com.cloudera.sqoop.manager.ExportJobContext context)
+  public void callTable(org.apache.sqoop.manager.ExportJobContext context)
       throws IOException,
       ExportException {
     context.setConnManager(this);
@@ -958,7 +958,7 @@
    * {@inheritDoc}
    */
   public void updateTable(
-          com.cloudera.sqoop.manager.ExportJobContext context)
+      org.apache.sqoop.manager.ExportJobContext context)
       throws IOException, ExportException {
     context.setConnManager(this);
     JdbcUpdateExportJob exportJob = new JdbcUpdateExportJob(context);
diff --git a/src/java/org/apache/sqoop/manager/SupportedManagers.java b/src/java/org/apache/sqoop/manager/SupportedManagers.java
index 1b65a9a..ed9c594 100644
--- a/src/java/org/apache/sqoop/manager/SupportedManagers.java
+++ b/src/java/org/apache/sqoop/manager/SupportedManagers.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.manager;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java b/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java
index 2026c43..09207bb 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopConnManager.java
@@ -33,14 +33,14 @@
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.sqoop.manager.OracleManager;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.manager.GenericJdbcManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.JdbcExportJob;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.manager.GenericJdbcManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.JdbcExportJob;
+import org.apache.sqoop.mapreduce.JdbcUpdateExportJob;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * OraOop manager for high performance Oracle import / export.
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java b/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java
index 93efa76..948bdbb 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopDBInputSplit.java
@@ -24,7 +24,7 @@
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.io.Text;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
+import org.apache.sqoop.mapreduce.db.DBInputFormat;
 
 class OraOopDBInputSplit extends DBInputFormat.DBInputSplit {
 
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java b/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java
index f7d1889..d720cb8 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopDBRecordReader.java
@@ -26,11 +26,11 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat.DBInputSplit;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBRecordReader;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBInputFormat;
+import org.apache.sqoop.mapreduce.db.DBInputFormat.DBInputSplit;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBRecordReader;
 
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.manager.oracle.OraOopConstants.
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java b/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java
index 3e88d04..c07a34c 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormat.java
@@ -32,9 +32,9 @@
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 
 /**
  * Reads data from Oracle table - data is divided between mappers based on ROWID
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java b/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java
index 1cc8a04..a0ab604 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopManagerFactory.java
@@ -29,11 +29,11 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.IncrementalMode;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ManagerFactory;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.IncrementalMode;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.metastore.JobData;
 
 import org.apache.sqoop.manager.OracleManager;
 import org.apache.sqoop.manager.oracle.OraOopOutputFormatUpdate.UpdateMode;
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java
index c6b7098..e8f5a26 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatBase.java
@@ -33,10 +33,10 @@
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.SqoopOptions;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AsyncSqlOutputFormat;
-import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.AsyncSqlOutputFormat;
+import org.apache.sqoop.mapreduce.ExportOutputFormat;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
 abstract class OraOopOutputFormatBase<K extends SqoopRecord, V> extends
     ExportOutputFormat<K, V> {
@@ -199,7 +199,7 @@
   }
 
   abstract class OraOopDBRecordWriterBase extends
-      ExportOutputFormat<K, V>.ExportRecordWriter<K, V> {
+      ExportOutputFormat<K, V>.ExportRecordWriter {
 
     protected OracleTable oracleTable; // <- If exporting into a partitioned
                                        // table, this table will be unique for
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java
index 1874b9f..940e4ff 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatInsert.java
@@ -29,7 +29,7 @@
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.SqoopOptions;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Insert into an Oracle table based on emitted keys.
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java
index 33bcb84..4971f38 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopOutputFormatUpdate.java
@@ -28,7 +28,7 @@
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.manager.oracle.OraOopOracleQueries.
diff --git a/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java b/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java
index e73fd68..0910e95 100644
--- a/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java
+++ b/src/java/org/apache/sqoop/manager/oracle/OraOopUtilities.java
@@ -41,9 +41,9 @@
 import org.apache.sqoop.manager.oracle.OraOopOutputFormatInsert.InsertMode;
 import org.apache.sqoop.manager.oracle.OraOopOutputFormatUpdate.UpdateMode;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.ExportJobBase;
 
 /**
  * Utilities used by OraOop.
diff --git a/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java b/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java
index cb2145f..acd3201 100644
--- a/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/AccumuloImportJob.java
@@ -38,13 +38,12 @@
 import org.apache.sqoop.accumulo.AccumuloMutationProcessor;
 import org.apache.sqoop.accumulo.AccumuloUtil;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.DataDrivenImportJob;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Runs an Accumulo import via DataDrivenDBInputFormat to the
diff --git a/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java b/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java
index e196099..a27225b 100644
--- a/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/AccumuloImportMapper.java
@@ -21,8 +21,7 @@
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Imports records by writing them to Accumulo via the DelegatingOutputFormat
diff --git a/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java
index ce11f84..422653e 100644
--- a/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/AsyncSqlOutputFormat.java
@@ -33,7 +33,7 @@
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Abstract OutputFormat class that allows the RecordWriter to buffer
diff --git a/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java b/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java
index 15a62a6..e6e3efb 100644
--- a/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java
+++ b/src/java/org/apache/sqoop/mapreduce/AsyncSqlRecordWriter.java
@@ -30,8 +30,8 @@
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.util.LoggingUtils;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Abstract RecordWriter base class that buffers SqoopRecords to be injected
diff --git a/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java b/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java
index 450f947..a5e5bf5 100644
--- a/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/AvroImportMapper.java
@@ -18,9 +18,8 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.mapred.AvroWrapper;
diff --git a/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java b/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
index dc49282..a5962ba 100644
--- a/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
@@ -40,15 +40,15 @@
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.ImportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.orm.AvroSchemaGenerator;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.ImportJobBase;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.orm.AvroSchemaGenerator;
 import org.apache.sqoop.util.FileSystemUtil;
 import org.kitesdk.data.Datasets;
 import org.kitesdk.data.mapreduce.DatasetKeyOutputFormat;
diff --git a/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java
index 2dd9be2..771c8a6 100644
--- a/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/DelegatingOutputFormat.java
@@ -27,9 +27,9 @@
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.ReflectionUtils;
-import com.cloudera.sqoop.lib.FieldMappable;
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.ProcessingException;
+import org.apache.sqoop.lib.FieldMappable;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.ProcessingException;
 
 /**
  * OutputFormat that produces a RecordReader which instantiates
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java
index bc59eb9..c56fb33 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportBatchOutputFormat.java
@@ -27,8 +27,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * This class uses batch mode to execute underlying statements instead of
@@ -56,7 +55,7 @@
    * The actual database updates are executed in a second thread.
    */
   public class ExportBatchRecordWriter<K extends SqoopRecord, V>
-    extends ExportRecordWriter<K, V> {
+    extends ExportRecordWriter {
 
     public ExportBatchRecordWriter(TaskAttemptContext context)
         throws ClassNotFoundException, SQLException {
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java
index 7dc3453..e53a846 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportCallOutputFormat.java
@@ -32,7 +32,7 @@
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Insert the emitted keys as records into a database table.
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java b/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java
index 9946f14..6529bd2 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportJobBase.java
@@ -18,14 +18,13 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.JobBase;
-import com.cloudera.sqoop.orm.TableClassName;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.orm.TableClassName;
+import org.apache.sqoop.util.ExportException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
diff --git a/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java
index c2e39b1..cb21e1f 100644
--- a/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/ExportOutputFormat.java
@@ -30,9 +30,8 @@
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.AsyncSqlOutputFormat;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Insert the emitted keys as records into a database table.
diff --git a/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java b/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java
index b60ee42..cec373a 100644
--- a/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/GenericRecordExportMapper.java
@@ -18,9 +18,8 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
-import com.cloudera.sqoop.orm.ClassWriter;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.orm.ClassWriter;
 import org.apache.avro.Conversions;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericData;
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java
index ed89aeb..8d0c99f 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportJob.java
@@ -40,9 +40,9 @@
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.util.ImportException;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
 import com.google.common.base.Preconditions;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java
index 4b583dd..9eb27bd 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseBulkImportMapper.java
@@ -33,9 +33,8 @@
 import org.apache.sqoop.hbase.PutTransformer;
 import org.apache.sqoop.hbase.ToStringPutTransformer;
 
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.SqoopRecord;
 import static org.apache.sqoop.hbase.HBasePutProcessor.*;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
index 5adb788..33da487 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
@@ -40,14 +40,13 @@
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.hbase.HBasePutProcessor;
-import com.cloudera.sqoop.lib.FieldMapProcessor;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.DataDrivenImportJob;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.hbase.HBasePutProcessor;
+import org.apache.sqoop.lib.FieldMapProcessor;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Runs an HBase import via DataDrivenDBInputFormat to the HBasePutProcessor
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java b/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java
index 63e6cd3..039658d 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseImportMapper.java
@@ -21,8 +21,7 @@
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Imports records by writing them to HBase via the DelegatingOutputFormat
diff --git a/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java b/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java
index 105917c..fb5d054 100644
--- a/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/ImportJobBase.java
@@ -18,13 +18,12 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.io.CodecMap;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.JobBase;
-import com.cloudera.sqoop.orm.TableClassName;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.io.CodecMap;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.orm.TableClassName;
+import org.apache.sqoop.util.ImportException;
 import org.apache.avro.file.DataFileConstants;
 import org.apache.avro.mapred.AvroJob;
 import org.apache.commons.logging.Log;
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java b/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java
index f8594c7..b7eea93 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcCallExportJob.java
@@ -29,8 +29,8 @@
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
 import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
 import com.google.common.base.Strings;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java b/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java
index 6f9afaf..3719836 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcExportJob.java
@@ -18,11 +18,10 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.DefaultStringifier;
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java b/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java
index d13b560..86069c4 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcUpdateExportJob.java
@@ -37,11 +37,10 @@
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.kitesdk.data.mapreduce.DatasetKeyInputFormat;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 import org.apache.sqoop.util.FileSystemUtil;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java b/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java
index 8e9d1b5..9a8c17a 100644
--- a/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/JdbcUpsertExportJob.java
@@ -26,11 +26,10 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputFormat;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.JdbcUpdateExportJob;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 
 /**
  * Run an update/insert export using JDBC (JDBC-based UpsertOutputFormat).
diff --git a/src/java/org/apache/sqoop/mapreduce/JobBase.java b/src/java/org/apache/sqoop/mapreduce/JobBase.java
index 62aa3a9..6d1e049 100644
--- a/src/java/org/apache/sqoop/mapreduce/JobBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/JobBase.java
@@ -20,6 +20,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.sql.SQLException;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -33,14 +34,16 @@
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.config.ConfigurationConstants;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import com.cloudera.sqoop.util.Jars;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.util.Jars;
+import org.apache.sqoop.validation.*;
 
 /**
  * Base class for configuring and running a MapReduce job.
@@ -436,4 +439,40 @@
     // So far, propagate only verbose flag
     configuration.setBoolean(PROPERTY_VERBOSE, options.getVerbose());
   }
+
+  protected long getRowCountFromDB(ConnManager connManager, String tableName)
+      throws SQLException {
+    return connManager.getTableRowCount(tableName);
+  }
+
+  protected long getRowCountFromHadoop(Job job)
+      throws IOException, InterruptedException {
+    return ConfigurationHelper.getNumMapOutputRecords(job);
+  }
+
+  protected void doValidate(SqoopOptions options, Configuration conf,
+                            ValidationContext validationContext)
+      throws ValidationException {
+    Validator validator = (Validator) ReflectionUtils.newInstance(
+        options.getValidatorClass(), conf);
+    ValidationThreshold threshold = (ValidationThreshold)
+        ReflectionUtils.newInstance(options.getValidationThresholdClass(),
+            conf);
+    ValidationFailureHandler failureHandler = (ValidationFailureHandler)
+        ReflectionUtils.newInstance(options.getValidationFailureHandlerClass(),
+            conf);
+
+    StringBuilder sb = new StringBuilder();
+    sb.append("Validating the integrity of the import using the "
+        + "following configuration\n");
+    sb.append("\tValidator : ").append(validator.getClass().getName())
+        .append('\n');
+    sb.append("\tThreshold Specifier : ")
+        .append(threshold.getClass().getName()).append('\n');
+    sb.append("\tFailure Handler : ")
+        .append(failureHandler.getClass().getName()).append('\n');
+    LOG.info(sb.toString());
+    validator.validate(validationContext, threshold, failureHandler);
+  }
+
 }
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java b/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java
index a2277bf..3976c29 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeAvroMapper.java
@@ -31,7 +31,7 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.sqoop.avro.AvroUtil;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Mapper for the merge program which operates on AVRO data files.
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeAvroReducer.java b/src/java/org/apache/sqoop/mapreduce/MergeAvroReducer.java
index 2e85f51..f0403a0 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeAvroReducer.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeAvroReducer.java
@@ -24,7 +24,7 @@
 import org.apache.avro.mapred.AvroWrapper;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.sqoop.avro.AvroUtil;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 public class MergeAvroReducer extends MergeReducerBase<AvroWrapper<GenericRecord>, NullWritable> {
   private AvroWrapper<GenericRecord> wrapper;
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeGenericRecordExportMapper.java b/src/java/org/apache/sqoop/mapreduce/MergeGenericRecordExportMapper.java
index 31d56a5..6d8b4b5 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeGenericRecordExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeGenericRecordExportMapper.java
@@ -28,7 +28,7 @@
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 public class MergeGenericRecordExportMapper<K, V>
     extends AutoProgressMapper<K, V, Text, MergeRecord> {
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeJob.java b/src/java/org/apache/sqoop/mapreduce/MergeJob.java
index c6be189..bb21b64 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeJob.java
@@ -56,8 +56,8 @@
 import parquet.hadoop.ParquetFileReader;
 import parquet.schema.MessageType;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.JobBase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.JobBase;
 import org.apache.sqoop.util.FileSystemUtil;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeMapperBase.java b/src/java/org/apache/sqoop/mapreduce/MergeMapperBase.java
index c4fd9f7..4657e0a 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeMapperBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeMapperBase.java
@@ -28,7 +28,7 @@
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Given a set of SqoopRecord instances which are from a "new" dataset
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeParquetMapper.java b/src/java/org/apache/sqoop/mapreduce/MergeParquetMapper.java
index 8a5a7ca..42016f2 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeParquetMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeParquetMapper.java
@@ -33,7 +33,7 @@
 import org.apache.avro.mapred.Pair;
 
 import org.apache.sqoop.avro.AvroUtil;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 
 public class MergeParquetMapper
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeParquetReducer.java b/src/java/org/apache/sqoop/mapreduce/MergeParquetReducer.java
index 293ffc9..caa4f5f 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeParquetReducer.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeParquetReducer.java
@@ -33,7 +33,7 @@
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.sqoop.avro.AvroUtil;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 
 public class MergeParquetReducer extends Reducer<Text, MergeRecord,GenericRecord,NullWritable> {
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeRecord.java b/src/java/org/apache/sqoop/mapreduce/MergeRecord.java
index d73112c..08f43d1 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeRecord.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeRecord.java
@@ -25,7 +25,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Class that holds a record to be merged. This contains a SqoopRecord which
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeRecordMapper.java b/src/java/org/apache/sqoop/mapreduce/MergeRecordMapper.java
index bfab0c6..9098fab 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeRecordMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeRecordMapper.java
@@ -20,8 +20,7 @@
 
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.MergeMapperBase;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Mapper for the merge program which operates on SequenceFiles.
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeReducer.java b/src/java/org/apache/sqoop/mapreduce/MergeReducer.java
index 6192cdb..02d1be7 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeReducer.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeReducer.java
@@ -22,7 +22,7 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Reducer;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Reducer for merge tool. Given records tagged as 'old' or 'new', emit
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeReducerBase.java b/src/java/org/apache/sqoop/mapreduce/MergeReducerBase.java
index 4af498f..de3d612 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeReducerBase.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeReducerBase.java
@@ -23,7 +23,7 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.Reducer.Context;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 public abstract class MergeReducerBase<KEYOUT, VALUEOUT> extends
     Reducer<Text, MergeRecord, KEYOUT, VALUEOUT> {
diff --git a/src/java/org/apache/sqoop/mapreduce/MergeTextMapper.java b/src/java/org/apache/sqoop/mapreduce/MergeTextMapper.java
index c66067e..ef95080 100644
--- a/src/java/org/apache/sqoop/mapreduce/MergeTextMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MergeTextMapper.java
@@ -23,9 +23,8 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.ReflectionUtils;
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.MergeMapperBase;
+import org.apache.sqoop.lib.RecordParser;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Mapper for the merge program which operates on text files that we need to
diff --git a/src/java/org/apache/sqoop/mapreduce/MySQLDumpImportJob.java b/src/java/org/apache/sqoop/mapreduce/MySQLDumpImportJob.java
index 43fbec4..19e50aa 100644
--- a/src/java/org/apache/sqoop/mapreduce/MySQLDumpImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/MySQLDumpImportJob.java
@@ -24,12 +24,12 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.manager.MySQLUtils;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.manager.MySQLUtils;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 
 /**
  * Class that runs an import job using mysqldump in the mapper.
diff --git a/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java b/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java
index 72068a7..95825b3 100644
--- a/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java
@@ -24,7 +24,7 @@
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 
 /**
  * InputFormat designed to take data-driven splits and feed them to a mysqldump
diff --git a/src/java/org/apache/sqoop/mapreduce/MySQLDumpMapper.java b/src/java/org/apache/sqoop/mapreduce/MySQLDumpMapper.java
index aeeb137..790dad4 100644
--- a/src/java/org/apache/sqoop/mapreduce/MySQLDumpMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MySQLDumpMapper.java
@@ -35,13 +35,13 @@
 import org.apache.sqoop.util.JdbcUrl;
 import org.apache.sqoop.util.PerfCounters;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.lib.FieldFormatter;
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.manager.MySQLUtils;
-import com.cloudera.sqoop.util.ErrorableAsyncSink;
-import com.cloudera.sqoop.util.ErrorableThread;
-import com.cloudera.sqoop.util.LoggingAsyncSink;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.FieldFormatter;
+import org.apache.sqoop.lib.RecordParser;
+import org.apache.sqoop.manager.MySQLUtils;
+import org.apache.sqoop.util.ErrorableAsyncSink;
+import org.apache.sqoop.util.ErrorableThread;
+import org.apache.sqoop.util.LoggingAsyncSink;
 
 /**
  * Mapper that opens up a pipe to mysqldump and pulls data directly.
diff --git a/src/java/org/apache/sqoop/mapreduce/MySQLExportJob.java b/src/java/org/apache/sqoop/mapreduce/MySQLExportJob.java
index 16bdd74..e17f3df 100644
--- a/src/java/org/apache/sqoop/mapreduce/MySQLExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/MySQLExportJob.java
@@ -25,12 +25,11 @@
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.manager.MySQLUtils;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.manager.MySQLUtils;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 
 /**
  * Class that runs an export job using mysqlimport in the mapper.
diff --git a/src/java/org/apache/sqoop/mapreduce/MySQLExportMapper.java b/src/java/org/apache/sqoop/mapreduce/MySQLExportMapper.java
index 0cfb0b3..bb751ee 100644
--- a/src/java/org/apache/sqoop/mapreduce/MySQLExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MySQLExportMapper.java
@@ -34,9 +34,9 @@
 import org.apache.sqoop.util.LoggingAsyncSink;
 import org.apache.sqoop.util.NullAsyncSink;
 import org.apache.sqoop.util.TaskId;
-import com.cloudera.sqoop.io.NamedFifo;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.manager.MySQLUtils;
+import org.apache.sqoop.io.NamedFifo;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.manager.MySQLUtils;
 
 /**
  * Mapper that starts a 'mysqlimport' process and uses that to export rows from
diff --git a/src/java/org/apache/sqoop/mapreduce/MySQLRecordExportMapper.java b/src/java/org/apache/sqoop/mapreduce/MySQLRecordExportMapper.java
index d2d0c61..2b39c01 100644
--- a/src/java/org/apache/sqoop/mapreduce/MySQLRecordExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MySQLRecordExportMapper.java
@@ -20,8 +20,7 @@
 
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.MySQLExportMapper;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * mysqlimport-based exporter which accepts SqoopRecords (e.g., from
diff --git a/src/java/org/apache/sqoop/mapreduce/MySQLTextExportMapper.java b/src/java/org/apache/sqoop/mapreduce/MySQLTextExportMapper.java
index b2fb035..6165c52 100644
--- a/src/java/org/apache/sqoop/mapreduce/MySQLTextExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/MySQLTextExportMapper.java
@@ -21,8 +21,7 @@
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-import com.cloudera.sqoop.manager.MySQLUtils;
-import com.cloudera.sqoop.mapreduce.MySQLExportMapper;
+import org.apache.sqoop.manager.MySQLUtils;
 
 /**
  * mysqlimport-based exporter which accepts lines of text from files
diff --git a/src/java/org/apache/sqoop/mapreduce/OracleExportOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/OracleExportOutputFormat.java
index fb667f6..059fbfc 100644
--- a/src/java/org/apache/sqoop/mapreduce/OracleExportOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/OracleExportOutputFormat.java
@@ -22,8 +22,7 @@
 import java.sql.SQLException;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Oracle-specific SQL formatting overrides default ExportOutputFormat's.
@@ -47,7 +46,7 @@
    * The actual database updates are executed in a second thread.
    */
   public class OracleExportRecordWriter<K extends SqoopRecord, V>
-    extends ExportRecordWriter<K, V> {
+    extends ExportRecordWriter {
 
     public OracleExportRecordWriter(TaskAttemptContext context)
         throws ClassNotFoundException, SQLException {
diff --git a/src/java/org/apache/sqoop/mapreduce/OracleUpsertOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/OracleUpsertOutputFormat.java
index 79a4efd..95e5e31 100644
--- a/src/java/org/apache/sqoop/mapreduce/OracleUpsertOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/OracleUpsertOutputFormat.java
@@ -27,8 +27,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.UpdateOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.sqoop.manager.oracle.OracleUtils;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/ParquetImportMapper.java b/src/java/org/apache/sqoop/mapreduce/ParquetImportMapper.java
index 45211fc..35ab495 100644
--- a/src/java/org/apache/sqoop/mapreduce/ParquetImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/ParquetImportMapper.java
@@ -18,9 +18,8 @@
 
 package org.apache.sqoop.mapreduce;
 
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.hadoop.conf.Configuration;
diff --git a/src/java/org/apache/sqoop/mapreduce/SQLServerExportOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/SQLServerExportOutputFormat.java
index ead7fb2..6feb34a 100644
--- a/src/java/org/apache/sqoop/mapreduce/SQLServerExportOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/SQLServerExportOutputFormat.java
@@ -22,8 +22,7 @@
 import java.sql.SQLException;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * SQLServer-specific SQL formatting overrides default ExportOutputFormat's.
@@ -47,7 +46,7 @@
    * The actual database updates are executed in a second thread.
    */
   public class SQLServerExportRecordWriter<K extends SqoopRecord, V>
-    extends ExportRecordWriter<K, V> {
+    extends ExportRecordWriter {
 
     public SQLServerExportRecordWriter(TaskAttemptContext context)
         throws ClassNotFoundException, SQLException {
diff --git a/src/java/org/apache/sqoop/mapreduce/SequenceFileExportMapper.java b/src/java/org/apache/sqoop/mapreduce/SequenceFileExportMapper.java
index 434d7d3..e9d1b83 100644
--- a/src/java/org/apache/sqoop/mapreduce/SequenceFileExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/SequenceFileExportMapper.java
@@ -21,8 +21,7 @@
 import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Reads a SqoopRecord from the SequenceFile in which it's packed and emits
diff --git a/src/java/org/apache/sqoop/mapreduce/SequenceFileImportMapper.java b/src/java/org/apache/sqoop/mapreduce/SequenceFileImportMapper.java
index 96b523e..461a095 100644
--- a/src/java/org/apache/sqoop/mapreduce/SequenceFileImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/SequenceFileImportMapper.java
@@ -22,9 +22,8 @@
 import java.sql.SQLException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Imports records by writing them to a SequenceFile.
diff --git a/src/java/org/apache/sqoop/mapreduce/TextExportMapper.java b/src/java/org/apache/sqoop/mapreduce/TextExportMapper.java
index 7e05d42..691ccee 100644
--- a/src/java/org/apache/sqoop/mapreduce/TextExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/TextExportMapper.java
@@ -26,8 +26,7 @@
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.util.ReflectionUtils;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
diff --git a/src/java/org/apache/sqoop/mapreduce/TextImportMapper.java b/src/java/org/apache/sqoop/mapreduce/TextImportMapper.java
index 6f52907..af99509 100644
--- a/src/java/org/apache/sqoop/mapreduce/TextImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/TextImportMapper.java
@@ -25,9 +25,8 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.SqoopRecord;
 
 /**
  * Imports records by transforming them to strings for a plain-text flat file.
diff --git a/src/java/org/apache/sqoop/mapreduce/UpdateOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/UpdateOutputFormat.java
index 96cec17..52305d9 100644
--- a/src/java/org/apache/sqoop/mapreduce/UpdateOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/UpdateOutputFormat.java
@@ -33,9 +33,8 @@
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.AsyncSqlOutputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
 /**
  * Update an existing table of data with new value data.
diff --git a/src/java/org/apache/sqoop/mapreduce/cubrid/CubridUpsertOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/cubrid/CubridUpsertOutputFormat.java
index 46b048f..84dd6b5 100644
--- a/src/java/org/apache/sqoop/mapreduce/cubrid/CubridUpsertOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/cubrid/CubridUpsertOutputFormat.java
@@ -25,8 +25,8 @@
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.UpdateOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.UpdateOutputFormat;
 
 /**
  * Output format for CUBRID Update/insert functionality. We will use CUBID
diff --git a/src/java/org/apache/sqoop/mapreduce/db/BigDecimalSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/BigDecimalSplitter.java
index a8db2a3..f7de6f4 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/BigDecimalSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/BigDecimalSplitter.java
@@ -28,9 +28,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.DBSplitter;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.config.ConfigurationHelper;
 import org.apache.sqoop.validation.ValidationException;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/db/BooleanSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/BooleanSplitter.java
index 122bb8f..c345b34 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/BooleanSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/BooleanSplitter.java
@@ -25,9 +25,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 
-import com.cloudera.sqoop.mapreduce.db.DBSplitter;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-
 /**
  * Implement DBSplitter over boolean values.
  */
diff --git a/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java b/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java
index a9b7e42..ac90099 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java
@@ -35,8 +35,6 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat.NullDBWritable;
-
 /**
  * A container for configuration property names for jobs with DB input/output.
  *
@@ -401,7 +399,7 @@
 
   public Class<?> getInputClass() {
     return conf.getClass(DBConfiguration.INPUT_CLASS_PROPERTY,
-                         NullDBWritable.class);
+                         DBInputFormat.NullDBWritable.class);
   }
 
   public void setInputClass(Class<? extends DBWritable> inputClass) {
diff --git a/src/java/org/apache/sqoop/mapreduce/db/DBInputFormat.java b/src/java/org/apache/sqoop/mapreduce/db/DBInputFormat.java
index 0a2e396..f4cd7f6 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/DBInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/DBInputFormat.java
@@ -43,10 +43,7 @@
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBRecordReader;
-import com.cloudera.sqoop.mapreduce.db.OracleDBRecordReader;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 /**
  * A InputFormat that reads input data from an SQL table.
@@ -230,7 +227,7 @@
   }
 
   protected RecordReader<LongWritable, T> createDBRecordReader(
-      com.cloudera.sqoop.mapreduce.db.DBInputFormat.DBInputSplit split,
+      DBInputFormat.DBInputSplit split,
       Configuration conf) throws IOException {
 
     @SuppressWarnings("unchecked")
@@ -264,7 +261,7 @@
       TaskAttemptContext context) throws IOException, InterruptedException {
 
     return createDBRecordReader(
-        (com.cloudera.sqoop.mapreduce.db.DBInputFormat.DBInputSplit) split,
+        (DBInputFormat.DBInputSplit) split,
         context.getConfiguration());
   }
 
diff --git a/src/java/org/apache/sqoop/mapreduce/db/DBOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/db/DBOutputFormat.java
index f86ad2d..730ff28 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/DBOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/DBOutputFormat.java
@@ -35,8 +35,7 @@
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.config.ConfigurationHelper;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
@@ -119,7 +118,7 @@
 
       statement = connection.prepareStatement(
                     constructQuery(tableName, fieldNames));
-      return new com.cloudera.sqoop.mapreduce.db.DBOutputFormat.DBRecordWriter(
+      return new org.apache.sqoop.mapreduce.db.DBOutputFormat.DBRecordWriter(
                      connection, statement);
     } catch (Exception ex) {
       throw new IOException(ex);
diff --git a/src/java/org/apache/sqoop/mapreduce/db/DBRecordReader.java b/src/java/org/apache/sqoop/mapreduce/db/DBRecordReader.java
index eed5780..0ae324e 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/DBRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/DBRecordReader.java
@@ -34,8 +34,6 @@
 import org.apache.sqoop.mapreduce.DBWritable;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBInputFormat.java b/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBInputFormat.java
index 136b30a..d039632 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBInputFormat.java
@@ -40,17 +40,8 @@
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.BigDecimalSplitter;
-import com.cloudera.sqoop.mapreduce.db.BooleanSplitter;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBSplitter;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBRecordReader;
-import com.cloudera.sqoop.mapreduce.db.DateSplitter;
-import com.cloudera.sqoop.mapreduce.db.FloatSplitter;
-import com.cloudera.sqoop.mapreduce.db.IntegerSplitter;
-import com.cloudera.sqoop.mapreduce.db.TextSplitter;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBRecordReader;
 import org.apache.sqoop.validation.ValidationException;
 
 /**
@@ -161,7 +152,7 @@
             && (boundaryQuery == null || boundaryQuery.isEmpty())
             && splitLimit <= 0) {
       List<InputSplit> singletonSplit = new ArrayList<InputSplit>();
-      singletonSplit.add(new com.cloudera.sqoop.mapreduce.db.
+      singletonSplit.add(new org.apache.sqoop.mapreduce.db.
           DataDrivenDBInputFormat.DataDrivenDBInputSplit("1=1", "1=1"));
       return singletonSplit;
     }
diff --git a/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBRecordReader.java b/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBRecordReader.java
index b734e05..62434fa 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/DataDrivenDBRecordReader.java
@@ -27,11 +27,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBRecordReader;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-
 /**
  * A RecordReader that reads records from a SQL table,
  * using data-driven WHERE clause splits.
diff --git a/src/java/org/apache/sqoop/mapreduce/db/DateSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/DateSplitter.java
index 9b94283..7e7287f 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/DateSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/DateSplitter.java
@@ -29,9 +29,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.IntegerSplitter;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 /**
  * Implement DBSplitter over date/time values.
diff --git a/src/java/org/apache/sqoop/mapreduce/db/Db2DBRecordReader.java b/src/java/org/apache/sqoop/mapreduce/db/Db2DBRecordReader.java
index 5a3bcca..5859f0f 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/Db2DBRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/Db2DBRecordReader.java
@@ -25,10 +25,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBRecordReader;
-
 /**
  * A RecordReader that reads records from DB2.
  */
diff --git a/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBInputFormat.java b/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBInputFormat.java
index 95b1cbe..d79ff7f 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBInputFormat.java
@@ -26,9 +26,6 @@
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-
 /**
  * A InputFormat that reads input data from DB2.
  */
diff --git a/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBRecordReader.java b/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBRecordReader.java
index f9eb79c..7b6e824 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/Db2DataDrivenDBRecordReader.java
@@ -23,10 +23,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBRecordReader;
-
 /**
  * A RecordReader that reads records from DB2 via DataDrivenDBRecordReader.
  */
diff --git a/src/java/org/apache/sqoop/mapreduce/db/FloatSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/FloatSplitter.java
index 71a50d2..517d66d 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/FloatSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/FloatSplitter.java
@@ -27,9 +27,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.DBSplitter;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 /**
  * Implement DBSplitter over floating-point values.
diff --git a/src/java/org/apache/sqoop/mapreduce/db/IntegerSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/IntegerSplitter.java
index 5f8f937..22c18e2 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/IntegerSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/IntegerSplitter.java
@@ -27,9 +27,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.DBSplitter;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 /**
  * Implement DBSplitter over integer values.
diff --git a/src/java/org/apache/sqoop/mapreduce/db/OracleDBRecordReader.java b/src/java/org/apache/sqoop/mapreduce/db/OracleDBRecordReader.java
index 4d7da39..c235362 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/OracleDBRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/OracleDBRecordReader.java
@@ -29,10 +29,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DBRecordReader;
-
 /**
  * A RecordReader that reads records from an Oracle SQL table.
  */
diff --git a/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBInputFormat.java b/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBInputFormat.java
index 8b5103b..88a5369 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBInputFormat.java
@@ -27,13 +27,6 @@
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBSplitter;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.OracleDataDrivenDBRecordReader;
-import com.cloudera.sqoop.mapreduce.db.OracleDateSplitter;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat.DBInputSplit;
-
 /**
  * A InputFormat that reads input data from an SQL table in an Oracle db.
  */
diff --git a/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBRecordReader.java b/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBRecordReader.java
index 871b59b..e8ef9e9 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/OracleDataDrivenDBRecordReader.java
@@ -23,11 +23,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.mapreduce.DBWritable;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBRecordReader;
-import com.cloudera.sqoop.mapreduce.db.OracleDBRecordReader;
-
 /**
  * A RecordReader that reads records from a Oracle table
  * via DataDrivenDBRecordReader.
diff --git a/src/java/org/apache/sqoop/mapreduce/db/OracleDateSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/OracleDateSplitter.java
index 5d2e9d3..a82abfb 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/OracleDateSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/OracleDateSplitter.java
@@ -19,8 +19,6 @@
 
 import java.util.Date;
 
-import com.cloudera.sqoop.mapreduce.db.DateSplitter;
-
 /**
  * Implement DBSplitter over date/time values returned by an Oracle db.
  * Make use of logic from DateSplitter, since this just needs to use
diff --git a/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBInputFormat.java b/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBInputFormat.java
index c90daeb..09fb52f 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBInputFormat.java
@@ -31,8 +31,6 @@
 import org.apache.sqoop.mapreduce.DBWritable;
 import org.apache.sqoop.lib.SqoopRecord;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-
 /**
  * A InputFormat that reads input data from a SQL table.
  * Operates like DataDrivenDBInputFormat, but attempts to recover from
diff --git a/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBRecordReader.java b/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBRecordReader.java
index 1dea842..2c8a9fd 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/SQLServerDBRecordReader.java
@@ -30,10 +30,6 @@
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-
 import org.apache.sqoop.lib.SqoopRecord;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/db/TextSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/TextSplitter.java
index 8c98d14..22bbfe6 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/TextSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/TextSplitter.java
@@ -28,9 +28,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.mapreduce.db.BigDecimalSplitter;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.config.ConfigurationHelper;
 import org.apache.sqoop.validation.ValidationException;
 
 /**
diff --git a/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaDBDataSliceSplitter.java b/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaDBDataSliceSplitter.java
index 368a349..9857192 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaDBDataSliceSplitter.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaDBDataSliceSplitter.java
@@ -27,7 +27,7 @@
 import
   org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.DataDrivenDBInputSplit;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 /**
  * Netezza specific splitter based on data slice id.
diff --git a/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableExportMapper.java b/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableExportMapper.java
index aa058d1..5bf2188 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableExportMapper.java
@@ -46,7 +46,7 @@
 import org.apache.sqoop.util.PerfCounters;
 import org.apache.sqoop.util.TaskId;
 
-import com.cloudera.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.DelimiterSet;
 
 /**
  * Netezza export mapper using external tables.
diff --git a/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableImportMapper.java b/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableImportMapper.java
index 0b4b134..306062a 100644
--- a/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableImportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/db/netezza/NetezzaExternalTableImportMapper.java
@@ -40,13 +40,12 @@
 import org.apache.sqoop.io.NamedFifo;
 import org.apache.sqoop.lib.DelimiterSet;
 import org.apache.sqoop.manager.DirectNetezzaManager;
+import org.apache.sqoop.mapreduce.AutoProgressMapper;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
 import org.apache.sqoop.util.FileUploader;
 import org.apache.sqoop.util.PerfCounters;
 import org.apache.sqoop.util.TaskId;
 
-import com.cloudera.sqoop.mapreduce.AutoProgressMapper;
-
 /**
  * Netezza import mapper using external tables.
  */
diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java
index efaef17..21ff60a 100644
--- a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java
+++ b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java
@@ -52,11 +52,11 @@
 import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.sqoop.mapreduce.ImportJobBase;
 
-import com.cloudera.sqoop.lib.BlobRef;
-import com.cloudera.sqoop.lib.ClobRef;
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.lib.FieldFormatter;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.BlobRef;
+import org.apache.sqoop.lib.ClobRef;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.FieldFormatter;
+import org.apache.sqoop.lib.LargeObjectLoader;
 
 /**
  * Helper class for Sqoop HCat Integration import jobs.
diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java
index 2a1de7f..784b5f2 100644
--- a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java
+++ b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatUtilities.java
@@ -74,9 +74,9 @@
 import org.apache.sqoop.util.LoggingAsyncSink;
 import org.apache.sqoop.util.SubprocessSecurityManager;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.util.ExitSecurityException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.util.ExitSecurityException;
 
 /**
  * Utility methods for the HCatalog support for Sqoop.
diff --git a/src/java/org/apache/sqoop/mapreduce/mainframe/MainframeImportJob.java b/src/java/org/apache/sqoop/mapreduce/mainframe/MainframeImportJob.java
index f222dc8..7e975c7 100644
--- a/src/java/org/apache/sqoop/mapreduce/mainframe/MainframeImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/mainframe/MainframeImportJob.java
@@ -26,8 +26,8 @@
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ImportJobContext;
 
 import org.apache.sqoop.mapreduce.DataDrivenImportJob;
 
diff --git a/src/java/org/apache/sqoop/mapreduce/mysql/MySQLUpsertOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/mysql/MySQLUpsertOutputFormat.java
index 72fffc4..207c170 100644
--- a/src/java/org/apache/sqoop/mapreduce/mysql/MySQLUpsertOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/mysql/MySQLUpsertOutputFormat.java
@@ -17,8 +17,8 @@
  */
 package org.apache.sqoop.mapreduce.mysql;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.UpdateOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.UpdateOutputFormat;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.RecordWriter;
diff --git a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaDataDrivenDBInputFormat.java b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaDataDrivenDBInputFormat.java
index c4e0062..e5c772b 100644
--- a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaDataDrivenDBInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaDataDrivenDBInputFormat.java
@@ -30,7 +30,7 @@
 import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 import org.apache.sqoop.mapreduce.db.netezza.NetezzaDBDataSliceSplitter;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 /**
  * Netezza specific DB input format.
diff --git a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableExportJob.java b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableExportJob.java
index b035d86..11ac95d 100644
--- a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableExportJob.java
@@ -38,11 +38,11 @@
   org.apache.sqoop.mapreduce.db.netezza.NetezzaExternalTableTextExportMapper;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.mapreduce.ExportJobBase;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.mapreduce.ExportJobBase;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBOutputFormat;
 
 /**
  * Class that runs an export job using netezza external tables in the mapper.
diff --git a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableImportJob.java b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableImportJob.java
index 58be69d..c983866 100644
--- a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableImportJob.java
@@ -39,11 +39,11 @@
   org.apache.sqoop.mapreduce.db.netezza.NetezzaExternalTableTextImportMapper;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 
 /**
  * Class that runs an import job using netezza external tables in the mapper.
diff --git a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableInputFormat.java b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableInputFormat.java
index 631c664..a6605dc 100644
--- a/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/netezza/NetezzaExternalTableInputFormat.java
@@ -31,7 +31,7 @@
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 /**
  * InputFormat designed to take data-driven splits and use them in the netezza
diff --git a/src/java/org/apache/sqoop/mapreduce/postgresql/PGBulkloadExportJob.java b/src/java/org/apache/sqoop/mapreduce/postgresql/PGBulkloadExportJob.java
index 3a5f277..21df486 100644
--- a/src/java/org/apache/sqoop/mapreduce/postgresql/PGBulkloadExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/postgresql/PGBulkloadExportJob.java
@@ -19,9 +19,9 @@
 package org.apache.sqoop.mapreduce.postgresql;
 
 import java.io.IOException;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
diff --git a/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportJob.java b/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportJob.java
index 483949f..e4b1350 100644
--- a/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportJob.java
@@ -18,9 +18,9 @@
 
 package org.apache.sqoop.mapreduce.postgresql;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.manager.ExportJobContext;
 import java.io.IOException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
diff --git a/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportMapper.java b/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportMapper.java
index d10cadb..cf9a3cd 100644
--- a/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportMapper.java
+++ b/src/java/org/apache/sqoop/mapreduce/postgresql/PostgreSQLCopyExportMapper.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.mapreduce.postgresql;
 
-import com.cloudera.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.DelimiterSet;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.SQLException;
diff --git a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerExportBatchOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerExportBatchOutputFormat.java
index e583bbb..af15015 100644
--- a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerExportBatchOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerExportBatchOutputFormat.java
@@ -17,7 +17,7 @@
  */
 package org.apache.sqoop.mapreduce.sqlserver;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.manager.SQLServerManager;
diff --git a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerInputFormat.java b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerInputFormat.java
index 9996d1b..398cdc4 100644
--- a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerInputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerInputFormat.java
@@ -17,8 +17,8 @@
  */
 package org.apache.sqoop.mapreduce.sqlserver;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapreduce.RecordReader;
diff --git a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerRecordReader.java b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerRecordReader.java
index bc101c5..b7a964b 100644
--- a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerRecordReader.java
+++ b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerRecordReader.java
@@ -17,10 +17,10 @@
  */
 package org.apache.sqoop.mapreduce.sqlserver;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
-import com.cloudera.sqoop.mapreduce.db.DataDrivenDBRecordReader;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBInputFormat;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBRecordReader;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
diff --git a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormat.java b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormat.java
index 5abb5ea..d082ddb 100644
--- a/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormat.java
+++ b/src/java/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormat.java
@@ -28,8 +28,8 @@
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.sqoop.manager.SQLServerManager;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.mapreduce.UpdateOutputFormat;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.mapreduce.UpdateOutputFormat;
 
 /**
  * Update an existing table with new value if the table already
diff --git a/src/java/org/apache/sqoop/metastore/GenericJobStorage.java b/src/java/org/apache/sqoop/metastore/GenericJobStorage.java
index 4117d7a..648e1f6 100644
--- a/src/java/org/apache/sqoop/metastore/GenericJobStorage.java
+++ b/src/java/org/apache/sqoop/metastore/GenericJobStorage.java
@@ -31,16 +31,14 @@
 import java.util.Properties;
 import java.util.Set;
 
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ConnManager;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.metastore.JobStorage;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.SqoopTool;
 import org.apache.sqoop.manager.DefaultManagerFactory;
 import org.apache.sqoop.manager.JdbcDrivers;
 
diff --git a/src/java/org/apache/sqoop/metastore/JobData.java b/src/java/org/apache/sqoop/metastore/JobData.java
index f3d5677..2c68e9f 100644
--- a/src/java/org/apache/sqoop/metastore/JobData.java
+++ b/src/java/org/apache/sqoop/metastore/JobData.java
@@ -18,8 +18,8 @@
 
 package org.apache.sqoop.metastore;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.SqoopTool;
 
 /**
  * Container for all job data that should be stored to a
diff --git a/src/java/org/apache/sqoop/metastore/JobStorage.java b/src/java/org/apache/sqoop/metastore/JobStorage.java
index e66ba5c..b7804ab 100644
--- a/src/java/org/apache/sqoop/metastore/JobStorage.java
+++ b/src/java/org/apache/sqoop/metastore/JobStorage.java
@@ -24,7 +24,6 @@
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configured;
-import com.cloudera.sqoop.metastore.JobData;
 
 /**
  * API that defines how jobs are saved, restored, and manipulated.
diff --git a/src/java/org/apache/sqoop/metastore/JobStorageFactory.java b/src/java/org/apache/sqoop/metastore/JobStorageFactory.java
index 2163f2c..13d5201 100644
--- a/src/java/org/apache/sqoop/metastore/JobStorageFactory.java
+++ b/src/java/org/apache/sqoop/metastore/JobStorageFactory.java
@@ -23,7 +23,6 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.metastore.JobStorage;
 
 /**
  * Factory that produces the correct JobStorage system to work with
@@ -42,7 +41,7 @@
 
   /** The default list of available JobStorage implementations. */
   private static final String DEFAULT_AVAILABLE_STORAGES =
-      "com.cloudera.sqoop.metastore.GenericJobStorage,"
+      "org.apache.sqoop.metastore.GenericJobStorage,"
       + "org.apache.sqoop.metastore.AutoGenericJobStorage";
 
   public JobStorageFactory(Configuration config) {
diff --git a/src/java/org/apache/sqoop/metastore/hsqldb/HsqldbMetaStore.java b/src/java/org/apache/sqoop/metastore/hsqldb/HsqldbMetaStore.java
index 273cc1d..4a949cb 100644
--- a/src/java/org/apache/sqoop/metastore/hsqldb/HsqldbMetaStore.java
+++ b/src/java/org/apache/sqoop/metastore/hsqldb/HsqldbMetaStore.java
@@ -32,9 +32,9 @@
 import org.hsqldb.Server;
 import org.hsqldb.ServerConstants;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 
-import com.cloudera.sqoop.manager.HsqldbManager;
+import org.apache.sqoop.manager.HsqldbManager;
 
 /**
  * Container for an HSQLDB-backed metastore.
diff --git a/src/java/org/apache/sqoop/orm/AvroSchemaGenerator.java b/src/java/org/apache/sqoop/orm/AvroSchemaGenerator.java
index 3c31c43..7a2a5f9 100644
--- a/src/java/org/apache/sqoop/orm/AvroSchemaGenerator.java
+++ b/src/java/org/apache/sqoop/orm/AvroSchemaGenerator.java
@@ -32,8 +32,8 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
 import org.apache.sqoop.avro.AvroUtil;
 
 import org.apache.sqoop.config.ConfigurationConstants;
diff --git a/src/java/org/apache/sqoop/orm/ClassWriter.java b/src/java/org/apache/sqoop/orm/ClassWriter.java
index cdb2364..a4a768a 100644
--- a/src/java/org/apache/sqoop/orm/ClassWriter.java
+++ b/src/java/org/apache/sqoop/orm/ClassWriter.java
@@ -38,19 +38,19 @@
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.sqoop.mapreduce.ImportJobBase;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.lib.BigDecimalSerializer;
-import com.cloudera.sqoop.lib.BlobRef;
-import com.cloudera.sqoop.lib.BooleanParser;
-import com.cloudera.sqoop.lib.ClobRef;
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.lib.FieldFormatter;
-import com.cloudera.sqoop.lib.JdbcWritableBridge;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.LobSerializer;
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.lib.BigDecimalSerializer;
+import org.apache.sqoop.lib.BlobRef;
+import org.apache.sqoop.lib.BooleanParser;
+import org.apache.sqoop.lib.ClobRef;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.FieldFormatter;
+import org.apache.sqoop.lib.JdbcWritableBridge;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.LobSerializer;
+import org.apache.sqoop.lib.RecordParser;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.ConnManager;
 
 /**
  * Creates an ORM class to represent a table from a database.
diff --git a/src/java/org/apache/sqoop/orm/CompilationManager.java b/src/java/org/apache/sqoop/orm/CompilationManager.java
index 3322c8b..6590cac 100644
--- a/src/java/org/apache/sqoop/orm/CompilationManager.java
+++ b/src/java/org/apache/sqoop/orm/CompilationManager.java
@@ -41,9 +41,9 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.Shell;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.util.FileListing;
-import com.cloudera.sqoop.util.Jars;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.util.FileListing;
+import org.apache.sqoop.util.Jars;
 
 import static org.apache.commons.lang3.StringUtils.substringBeforeLast;
 
diff --git a/src/java/org/apache/sqoop/orm/TableClassName.java b/src/java/org/apache/sqoop/orm/TableClassName.java
index 13aa301..5cb48a6 100644
--- a/src/java/org/apache/sqoop/orm/TableClassName.java
+++ b/src/java/org/apache/sqoop/orm/TableClassName.java
@@ -21,7 +21,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 
 /**
  * Reconciles the table name being imported with the class naming information
diff --git a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
index b4f972c..ce21918 100644
--- a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
+++ b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
@@ -33,20 +33,21 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.manager.SupportedManagers;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.apache.sqoop.util.CredentialsUtil;
 import org.apache.sqoop.util.LoggingUtils;
 import org.apache.sqoop.util.password.CredentialProviderHelper;
 
-import com.cloudera.sqoop.ConnFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.IncrementalMode;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.IncrementalMode;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.metastore.JobData;
 
 /**
  * Layer on top of SqoopTool that provides some basic common code
@@ -55,7 +56,7 @@
  * Subclasses should call init() at the top of their run() method,
  * and call destroy() at the end in a finally block.
  */
-public abstract class BaseSqoopTool extends com.cloudera.sqoop.tool.SqoopTool {
+public abstract class BaseSqoopTool extends org.apache.sqoop.tool.SqoopTool {
 
   public static final String METADATA_TRANSACTION_ISOLATION_LEVEL = "metadata-transaction-isolation-level";
 
@@ -1823,5 +1824,13 @@
 
     return dashPos;
   }
+
+  protected void validateHasDirectConnectorOption(SqoopOptions options) throws SqoopOptions.InvalidOptionsException {
+    SupportedManagers m = SupportedManagers.createFrom(options);
+    if (m != null && options.isDirect() && !m.hasDirectConnector()) {
+      throw new SqoopOptions.InvalidOptionsException(
+          "Was called with the --direct option, but no direct connector available.");
+    }
+  }
 }
 
diff --git a/src/java/org/apache/sqoop/tool/CodeGenTool.java b/src/java/org/apache/sqoop/tool/CodeGenTool.java
index 443cbf1..f0a8971 100644
--- a/src/java/org/apache/sqoop/tool/CodeGenTool.java
+++ b/src/java/org/apache/sqoop/tool/CodeGenTool.java
@@ -28,18 +28,18 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.hive.HiveImport;
-import com.cloudera.sqoop.orm.ClassWriter;
-import com.cloudera.sqoop.orm.CompilationManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.hive.HiveImport;
+import org.apache.sqoop.orm.ClassWriter;
+import org.apache.sqoop.orm.CompilationManager;
 
 /**
  * Tool that generates code from a database schema.
  */
-public class CodeGenTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class CodeGenTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(CodeGenTool.class.getName());
 
diff --git a/src/java/org/apache/sqoop/tool/CreateHiveTableTool.java b/src/java/org/apache/sqoop/tool/CreateHiveTableTool.java
index ec35491..d259566 100644
--- a/src/java/org/apache/sqoop/tool/CreateHiveTableTool.java
+++ b/src/java/org/apache/sqoop/tool/CreateHiveTableTool.java
@@ -26,16 +26,16 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.hive.HiveImport;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.hive.HiveImport;
 
 /**
  * Tool that creates a Hive table definition.
  */
-public class CreateHiveTableTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class CreateHiveTableTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       CreateHiveTableTool.class.getName());
diff --git a/src/java/org/apache/sqoop/tool/EvalSqlTool.java b/src/java/org/apache/sqoop/tool/EvalSqlTool.java
index 413aa3d..7c204c4 100644
--- a/src/java/org/apache/sqoop/tool/EvalSqlTool.java
+++ b/src/java/org/apache/sqoop/tool/EvalSqlTool.java
@@ -33,16 +33,16 @@
 
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.util.ResultSetPrinter;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.util.ResultSetPrinter;
 
 /**
  * Tool that evaluates a SQL statement and displays the results.
  */
-public class EvalSqlTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class EvalSqlTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(EvalSqlTool.class.getName());
 
diff --git a/src/java/org/apache/sqoop/tool/ExportTool.java b/src/java/org/apache/sqoop/tool/ExportTool.java
index 4bd2fed..060f2c0 100644
--- a/src/java/org/apache/sqoop/tool/ExportTool.java
+++ b/src/java/org/apache/sqoop/tool/ExportTool.java
@@ -28,19 +28,19 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.SqoopOptions.UpdateMode;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.util.ExportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.SqoopOptions.UpdateMode;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.util.ExportException;
 import static org.apache.sqoop.manager.SupportedManagers.MYSQL;
 
 /**
  * Tool that performs HDFS exports to databases.
  */
-public class ExportTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class ExportTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(ExportTool.class.getName());
   public static final String NOT_SUPPORTED_FILE_FORMAT_ERROR_MSG = "Please note that the export tool " +
diff --git a/src/java/org/apache/sqoop/tool/HelpTool.java b/src/java/org/apache/sqoop/tool/HelpTool.java
index bc1d7e9..fc8dbde 100644
--- a/src/java/org/apache/sqoop/tool/HelpTool.java
+++ b/src/java/org/apache/sqoop/tool/HelpTool.java
@@ -20,13 +20,13 @@
 
 import java.util.Set;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that explains the usage of Sqoop.
  */
-public class HelpTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class HelpTool extends BaseSqoopTool {
 
   public HelpTool() {
     super("help");
diff --git a/src/java/org/apache/sqoop/tool/ImportAllTablesTool.java b/src/java/org/apache/sqoop/tool/ImportAllTablesTool.java
index efe4e32..18f7a0a 100644
--- a/src/java/org/apache/sqoop/tool/ImportAllTablesTool.java
+++ b/src/java/org/apache/sqoop/tool/ImportAllTablesTool.java
@@ -28,16 +28,16 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.hive.HiveImport;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.hive.HiveImport;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Tool that performs database imports of all tables in a database to HDFS.
  */
-public class ImportAllTablesTool extends com.cloudera.sqoop.tool.ImportTool {
+public class ImportAllTablesTool extends ImportTool {
 
   public static final Log LOG = LogFactory.getLog(
       ImportAllTablesTool.class.getName());
diff --git a/src/java/org/apache/sqoop/tool/ImportTool.java b/src/java/org/apache/sqoop/tool/ImportTool.java
index 807ec8c..e992005 100644
--- a/src/java/org/apache/sqoop/tool/ImportTool.java
+++ b/src/java/org/apache/sqoop/tool/ImportTool.java
@@ -38,28 +38,28 @@
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.avro.AvroSchemaMismatchException;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.hive.HiveImport;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.MergeJob;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.metastore.JobStorage;
-import com.cloudera.sqoop.metastore.JobStorageFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.hive.HiveImport;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.MergeJob;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.metastore.JobStorage;
+import org.apache.sqoop.metastore.JobStorageFactory;
 import org.apache.sqoop.orm.ClassWriter;
-import com.cloudera.sqoop.orm.TableClassName;
-import com.cloudera.sqoop.util.AppendUtils;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.orm.TableClassName;
+import org.apache.sqoop.util.AppendUtils;
+import org.apache.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.util.ImportException;
 
 import static org.apache.sqoop.manager.SupportedManagers.MYSQL;
 
 /**
  * Tool that performs database imports to HDFS.
  */
-public class ImportTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class ImportTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(ImportTool.class.getName());
 
diff --git a/src/java/org/apache/sqoop/tool/JobTool.java b/src/java/org/apache/sqoop/tool/JobTool.java
index 72234ba..cf5f320 100644
--- a/src/java/org/apache/sqoop/tool/JobTool.java
+++ b/src/java/org/apache/sqoop/tool/JobTool.java
@@ -18,9 +18,9 @@
 
 package org.apache.sqoop.tool;
 
-import static com.cloudera.sqoop.metastore.GenericJobStorage.META_CONNECT_KEY;
-import static com.cloudera.sqoop.metastore.GenericJobStorage.META_PASSWORD_KEY;
-import static com.cloudera.sqoop.metastore.GenericJobStorage.META_USERNAME_KEY;
+import static org.apache.sqoop.metastore.GenericJobStorage.META_CONNECT_KEY;
+import static org.apache.sqoop.metastore.GenericJobStorage.META_PASSWORD_KEY;
+import static org.apache.sqoop.metastore.GenericJobStorage.META_USERNAME_KEY;
 
 import java.io.IOException;
 
@@ -39,19 +39,21 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.metastore.JobStorage;
-import com.cloudera.sqoop.metastore.JobStorageFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.metastore.GenericJobStorage;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.metastore.JobStorage;
+import org.apache.sqoop.metastore.JobStorageFactory;
+import org.apache.sqoop.manager.JdbcDrivers;
 import org.apache.sqoop.metastore.PasswordRedactor;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
  * Tool that creates and executes saved jobs.
  */
-public class JobTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class JobTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       JobTool.class.getName());
@@ -180,8 +182,7 @@
 
     // Now that the tool is fully configured, materialize the job.
     //TODO(jarcec): Remove the cast when JobData will be moved to apache package
-    JobData jobData = new JobData(jobOptions,
-            (com.cloudera.sqoop.tool.SqoopTool)jobTool);
+    JobData jobData = new JobData(jobOptions, jobTool);
     this.storage.create(jobName, jobData);
     return 0; // Success.
   }
diff --git a/src/java/org/apache/sqoop/tool/ListDatabasesTool.java b/src/java/org/apache/sqoop/tool/ListDatabasesTool.java
index 2dfbfb5..86c0444 100644
--- a/src/java/org/apache/sqoop/tool/ListDatabasesTool.java
+++ b/src/java/org/apache/sqoop/tool/ListDatabasesTool.java
@@ -22,14 +22,14 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that lists available databases on a server.
  */
-public class ListDatabasesTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class ListDatabasesTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       ListDatabasesTool.class.getName());
diff --git a/src/java/org/apache/sqoop/tool/ListTablesTool.java b/src/java/org/apache/sqoop/tool/ListTablesTool.java
index 5d47bc9..0441352 100644
--- a/src/java/org/apache/sqoop/tool/ListTablesTool.java
+++ b/src/java/org/apache/sqoop/tool/ListTablesTool.java
@@ -22,14 +22,14 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that lists available tables in a database.
  */
-public class ListTablesTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class ListTablesTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       ListTablesTool.class.getName());
diff --git a/src/java/org/apache/sqoop/tool/MainframeImportTool.java b/src/java/org/apache/sqoop/tool/MainframeImportTool.java
index 0cb91db..8883301 100644
--- a/src/java/org/apache/sqoop/tool/MainframeImportTool.java
+++ b/src/java/org/apache/sqoop/tool/MainframeImportTool.java
@@ -25,10 +25,10 @@
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.sqoop.mapreduce.mainframe.MainframeConfiguration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that performs mainframe dataset imports to HDFS.
diff --git a/src/java/org/apache/sqoop/tool/MergeTool.java b/src/java/org/apache/sqoop/tool/MergeTool.java
index a710740..311fee8 100644
--- a/src/java/org/apache/sqoop/tool/MergeTool.java
+++ b/src/java/org/apache/sqoop/tool/MergeTool.java
@@ -25,17 +25,17 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.mapreduce.MergeJob;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.mapreduce.MergeJob;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
  * Tool that merges a more recent dataset on top of an older one.
  */
-public class MergeTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class MergeTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(MergeTool.class.getName());
 
diff --git a/src/java/org/apache/sqoop/tool/MetastoreTool.java b/src/java/org/apache/sqoop/tool/MetastoreTool.java
index 53e56f0..596e911 100644
--- a/src/java/org/apache/sqoop/tool/MetastoreTool.java
+++ b/src/java/org/apache/sqoop/tool/MetastoreTool.java
@@ -23,16 +23,16 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.metastore.hsqldb.HsqldbMetaStore;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.metastore.hsqldb.HsqldbMetaStore;
 
 /**
  * Tool that runs a standalone Sqoop metastore.
  */
-public class MetastoreTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class MetastoreTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       MetastoreTool.class.getName());
diff --git a/src/java/org/apache/sqoop/tool/SqoopTool.java b/src/java/org/apache/sqoop/tool/SqoopTool.java
index 5b8453d..732cbe9 100644
--- a/src/java/org/apache/sqoop/tool/SqoopTool.java
+++ b/src/java/org/apache/sqoop/tool/SqoopTool.java
@@ -42,11 +42,10 @@
 import org.apache.sqoop.util.ClassLoaderStack;
 import org.apache.sqoop.config.ConfigurationHelper;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.SqoopParser;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.tool.ToolDesc;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.SqoopParser;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Base class for Sqoop subprograms (e.g., SqoopImport, SqoopExport, etc.)
@@ -412,7 +411,7 @@
     // This tool is the "active" tool; bind it in the SqoopOptions.
     //TODO(jarcec): Remove the cast when SqoopOptions will be moved
     //              to apache package
-    out.setActiveSqoopTool((com.cloudera.sqoop.tool.SqoopTool)this);
+    out.setActiveSqoopTool(this);
 
     String [] toolArgs = args; // args after generic parser is done.
     if (useGenericOptions) {
diff --git a/src/java/org/apache/sqoop/tool/ToolPlugin.java b/src/java/org/apache/sqoop/tool/ToolPlugin.java
index 5fa5e6f..d5ec199 100644
--- a/src/java/org/apache/sqoop/tool/ToolPlugin.java
+++ b/src/java/org/apache/sqoop/tool/ToolPlugin.java
@@ -20,8 +20,6 @@
 
 import java.util.List;
 
-import com.cloudera.sqoop.tool.ToolDesc;
-
 /**
  * Abstract base class that defines the ToolPlugin API; additional SqoopTool
  * implementations may be registered with the system via ToolPlugin classes.
diff --git a/src/java/org/apache/sqoop/tool/VersionTool.java b/src/java/org/apache/sqoop/tool/VersionTool.java
index bcd824d..aafddb2 100644
--- a/src/java/org/apache/sqoop/tool/VersionTool.java
+++ b/src/java/org/apache/sqoop/tool/VersionTool.java
@@ -18,13 +18,13 @@
 
 package org.apache.sqoop.tool;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that prints Sqoop's version.
  */
-public class VersionTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class VersionTool extends BaseSqoopTool {
 
   public VersionTool() {
     super("version");
diff --git a/src/java/org/apache/sqoop/util/AppendUtils.java b/src/java/org/apache/sqoop/util/AppendUtils.java
index a3082c4..fa85280 100644
--- a/src/java/org/apache/sqoop/util/AppendUtils.java
+++ b/src/java/org/apache/sqoop/util/AppendUtils.java
@@ -27,8 +27,8 @@
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
diff --git a/src/java/org/apache/sqoop/util/CredentialsUtil.java b/src/java/org/apache/sqoop/util/CredentialsUtil.java
index c627b32..fee0bdc 100644
--- a/src/java/org/apache/sqoop/util/CredentialsUtil.java
+++ b/src/java/org/apache/sqoop/util/CredentialsUtil.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.util;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
diff --git a/src/java/org/apache/sqoop/util/DirectImportUtils.java b/src/java/org/apache/sqoop/util/DirectImportUtils.java
index d801c8f..a0a5efc 100644
--- a/src/java/org/apache/sqoop/util/DirectImportUtils.java
+++ b/src/java/org/apache/sqoop/util/DirectImportUtils.java
@@ -31,13 +31,13 @@
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.conf.Configuration;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.io.CodecMap;
-import com.cloudera.sqoop.io.SplittingOutputStream;
-import com.cloudera.sqoop.io.SplittableBufferedWriter;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.io.CodecMap;
+import org.apache.sqoop.io.SplittingOutputStream;
+import org.apache.sqoop.io.SplittableBufferedWriter;
 
 import org.apache.hadoop.util.Shell;
-import com.cloudera.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.manager.ImportJobContext;
 
 /**
  * Utility methods that are common to various the direct import managers.
diff --git a/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java b/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java
index aaea322..47d2857 100644
--- a/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java
+++ b/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java
@@ -18,8 +18,6 @@
 
 package org.apache.sqoop.util;
 
-import com.cloudera.sqoop.util.AsyncSink;
-
 /**
  * Partial implementation of AsyncSink that relies on ErrorableThread to
  * provide a status bit for the join() method.
diff --git a/src/java/org/apache/sqoop/util/Jars.java b/src/java/org/apache/sqoop/util/Jars.java
index 476d59a..3809ada 100644
--- a/src/java/org/apache/sqoop/util/Jars.java
+++ b/src/java/org/apache/sqoop/util/Jars.java
@@ -26,7 +26,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ConnManager;
 
 /**
  * Utility class; returns the locations of various jars.
diff --git a/src/java/org/apache/sqoop/util/LoggingAsyncSink.java b/src/java/org/apache/sqoop/util/LoggingAsyncSink.java
index 5f20539..58a3c55 100644
--- a/src/java/org/apache/sqoop/util/LoggingAsyncSink.java
+++ b/src/java/org/apache/sqoop/util/LoggingAsyncSink.java
@@ -26,8 +26,6 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.util.AsyncSink;
-
 /**
  * An AsyncSink that takes the contents of a stream and writes
  * it to log4j.
diff --git a/src/java/org/apache/sqoop/util/LoggingUtils.java b/src/java/org/apache/sqoop/util/LoggingUtils.java
index 06b012d..c478245 100644
--- a/src/java/org/apache/sqoop/util/LoggingUtils.java
+++ b/src/java/org/apache/sqoop/util/LoggingUtils.java
@@ -52,7 +52,6 @@
 
   public static void setDebugLevel() {
     Logger.getLogger("org.apache.sqoop").setLevel(Level.DEBUG);
-    Logger.getLogger("com.cloudera.apache").setLevel(Level.DEBUG);
   }
 }
 
diff --git a/src/java/org/apache/sqoop/util/NullAsyncSink.java b/src/java/org/apache/sqoop/util/NullAsyncSink.java
index a42e4e9..ffe4560 100644
--- a/src/java/org/apache/sqoop/util/NullAsyncSink.java
+++ b/src/java/org/apache/sqoop/util/NullAsyncSink.java
@@ -26,8 +26,6 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.util.AsyncSink;
-
 /**
  * An AsyncSink that takes the contents of a stream and ignores it.
  */
diff --git a/src/java/org/apache/sqoop/util/OptionsFileUtil.java b/src/java/org/apache/sqoop/util/OptionsFileUtil.java
index 993ac1b..b487941 100644
--- a/src/java/org/apache/sqoop/util/OptionsFileUtil.java
+++ b/src/java/org/apache/sqoop/util/OptionsFileUtil.java
@@ -30,7 +30,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.Sqoop;
+import org.apache.sqoop.Sqoop;
 
 /**
  * Provides utility functions to read in options file. An options file is a
diff --git a/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java b/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java
index 7a861a0..ea39c6c 100644
--- a/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java
+++ b/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java
@@ -80,7 +80,7 @@
    */
   public void checkExit(int status) {
     LOG.debug("Rejecting System.exit call with status=" + status);
-    throw new com.cloudera.sqoop.util.ExitSecurityException(status);
+    throw new org.apache.sqoop.util.ExitSecurityException(status);
   }
 
   @Override
diff --git a/src/java/org/apache/sqoop/util/TaskId.java b/src/java/org/apache/sqoop/util/TaskId.java
index c543754..4caca73 100644
--- a/src/java/org/apache/sqoop/util/TaskId.java
+++ b/src/java/org/apache/sqoop/util/TaskId.java
@@ -23,7 +23,7 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.config.ConfigurationConstants;
+import org.apache.sqoop.config.ConfigurationConstants;
 
 /**
  * Utility class; returns task attempt Id of the current job
diff --git a/src/perftest/ExportStressTest.java b/src/perftest/ExportStressTest.java
index b5710e0..8c6f0f3 100644
--- a/src/perftest/ExportStressTest.java
+++ b/src/perftest/ExportStressTest.java
@@ -23,10 +23,10 @@
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.*;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.ExportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.ExportTool;
+import org.apache.sqoop.tool.SqoopTool;
 
 /**
  * Stress test export procedure by running a large-scale export to MySQL.
diff --git a/src/perftest/ExtConnFactoryTest.java b/src/perftest/ExtConnFactoryTest.java
index 6d2dec5..c614011 100644
--- a/src/perftest/ExtConnFactoryTest.java
+++ b/src/perftest/ExtConnFactoryTest.java
@@ -20,13 +20,13 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.manager.SqlManager;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.manager.SqlManager;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Test external connection factory classes on the classpath.
diff --git a/src/perftest/ExtFactory.java b/src/perftest/ExtFactory.java
index ecde08c..3e3aec8 100644
--- a/src/perftest/ExtFactory.java
+++ b/src/perftest/ExtFactory.java
@@ -19,9 +19,9 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ManagerFactory;
 
 /**
  * An external ConnFactory used by ExtConnFactoryTest.
diff --git a/src/perftest/LobFilePerfTest.java b/src/perftest/LobFilePerfTest.java
index a16bb5e..f822f16 100644
--- a/src/perftest/LobFilePerfTest.java
+++ b/src/perftest/LobFilePerfTest.java
@@ -19,7 +19,7 @@
 import java.io.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
-import com.cloudera.sqoop.io.*;
+import org.apache.sqoop.io.*;
 
 /**
  * A simple benchmark to performance test LobFile reader/writer speed.
diff --git a/src/perftest/LobFileStressTest.java b/src/perftest/LobFileStressTest.java
index dafebb4..eebc0c1 100644
--- a/src/perftest/LobFileStressTest.java
+++ b/src/perftest/LobFileStressTest.java
@@ -20,7 +20,7 @@
 import java.util.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
-import com.cloudera.sqoop.io.*;
+import org.apache.sqoop.io.*;
 
 /**
  * Stress test LobFiles by writing a bunch of different files and reading
diff --git a/src/scripts/write-version-info.cmd b/src/scripts/write-version-info.cmd
index 247f5f3..8b70a68 100644
--- a/src/scripts/write-version-info.cmd
+++ b/src/scripts/write-version-info.cmd
@@ -29,9 +29,6 @@
 set version=%2
 set specifiedgithash=%3
 
-set outputdir=%buildroot%\src\com\cloudera\sqoop
-set outputfile=%outputdir%\SqoopVersion.java
-
 set newoutputdir=%buildroot%\src\org\apache\sqoop
 set newoutputfile=%newoutputdir%\SqoopVersion.java
 
@@ -45,31 +42,6 @@
 set host=%COMPUTERNAME%
 set compiledate=%date%-%time%
 
-mkdir %outputdir%
-
-(
-  echo.// generated by src/scripts/write-version-info.cmd
-  echo.package com.cloudera.sqoop;
-  echo.
-  echo./**
-  echo. * @deprecated use org.apache.sqoop.SqoopVersion instead
-  echo. * @see org.apache.sqoop.SqoopVersion
-  echo. */
-  echo.public final class SqoopVersion extends org.apache.sqoop.SqoopVersion {
-  echo.  public SqoopVersion^(^) {
-  echo.    super^(^);
-  echo.  }
-  echo.  public static final String VERSION =
-  echo.    org.apache.sqoop.SqoopVersion.VERSION;
-  echo.  public static final String GIT_HASH =
-  echo.    org.apache.sqoop.SqoopVersion.GIT_HASH;
-  echo.  public static final String COMPILE_USER =
-  echo.    org.apache.sqoop.SqoopVersion.COMPILE_USER;
-  echo.  public static final String COMPILE_DATE =
-  echo.    org.apache.sqoop.SqoopVersion.COMPILE_DATE;
-  echo.}
-) > %outputfile%
-
 mkdir %newoutputdir%
 
 (
diff --git a/src/scripts/write-version-info.sh b/src/scripts/write-version-info.sh
index 70ce3c7..a88ebf8 100755
--- a/src/scripts/write-version-info.sh
+++ b/src/scripts/write-version-info.sh
@@ -30,9 +30,6 @@
 version=$2
 specifiedgithash=$3
 
-outputdir="${buildroot}/src/com/cloudera/sqoop"
-outputfile="${outputdir}/SqoopVersion.java"
-
 newoutputdir="${buildroot}/src/org/apache/sqoop"
 newoutputfile="${newoutputdir}/SqoopVersion.java"
 
@@ -44,31 +41,6 @@
 host=`hostname`
 compiledate=`date`
 
-mkdir -p "${outputdir}"
-cat > "${outputfile}" <<EOF
-// generated by src/scripts/write-version-info.sh
-package com.cloudera.sqoop;
-
-/**
- * @deprecated use org.apache.sqoop.SqoopVersion instead
- * @see org.apache.sqoop.SqoopVersion
- */
-public final class SqoopVersion extends org.apache.sqoop.SqoopVersion {
-  public SqoopVersion() {
-    super();
-  }
-  public static final String VERSION =
-    org.apache.sqoop.SqoopVersion.VERSION;
-  public static final String GIT_HASH =
-    org.apache.sqoop.SqoopVersion.GIT_HASH;
-  public static final String COMPILE_USER =
-    org.apache.sqoop.SqoopVersion.COMPILE_USER;
-  public static final String COMPILE_DATE =
-    org.apache.sqoop.SqoopVersion.COMPILE_DATE;
-}
-EOF
-
-
 mkdir -p "${newoutputdir}"
 cat > "${newoutputfile}" <<EOF
 // generated by src/scripts/write-version-info.sh
diff --git a/src/test/com/cloudera/sqoop/TestSqoopOptions.java b/src/test/com/cloudera/sqoop/TestSqoopOptions.java
deleted file mode 100644
index dbdd2f1..0000000
--- a/src/test/com/cloudera/sqoop/TestSqoopOptions.java
+++ /dev/null
@@ -1,797 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.util.Properties;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.sqoop.manager.oracle.OracleUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.tool.BaseSqoopTool;
-import com.cloudera.sqoop.tool.ImportTool;
-
-import static org.apache.sqoop.Sqoop.SQOOP_RETHROW_PROPERTY;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test aspects of the SqoopOptions class.
- */
-public class TestSqoopOptions {
-
-  private Properties originalSystemProperties;
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  @Before
-  public void setup() {
-   originalSystemProperties = System.getProperties();
-  }
-
-  @After
-  public void tearDown() {
-    System.setProperties(originalSystemProperties);
-  }
-
-  // tests for the toChar() parser
-  @Test
-  public void testNormalChar() throws Exception {
-    assertEquals('a', SqoopOptions.toChar("a"));
-  }
-
-  @Test
-  public void testEmptyString() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on empty string");
-    SqoopOptions.toChar("");
-  }
-
-  @Test
-  public void testNullString() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on null string");
-    SqoopOptions.toChar(null);
-  }
-
-  @Test
-  public void testTooLong() throws Exception {
-    // Should just use the first character and log a warning.
-    assertEquals('x', SqoopOptions.toChar("xyz"));
-  }
-
-  @Test
-  public void testHexChar1() throws Exception {
-    assertEquals(0xF, SqoopOptions.toChar("\\0xf"));
-  }
-
-  @Test
-  public void testHexChar2() throws Exception {
-    assertEquals(0xF, SqoopOptions.toChar("\\0xF"));
-  }
-
-  @Test
-  public void testHexChar3() throws Exception {
-    assertEquals(0xF0, SqoopOptions.toChar("\\0xf0"));
-  }
-
-  @Test
-  public void testHexChar4() throws Exception {
-    assertEquals(0xF0, SqoopOptions.toChar("\\0Xf0"));
-  }
-
-  @Test
-  public void testEscapeChar1() throws Exception {
-    assertEquals('\n', SqoopOptions.toChar("\\n"));
-  }
-
-  @Test
-  public void testEscapeChar2() throws Exception {
-    assertEquals('\\', SqoopOptions.toChar("\\\\"));
-  }
-
-  @Test
-  public void testEscapeChar3() throws Exception {
-    assertEquals('\\', SqoopOptions.toChar("\\"));
-  }
-
-  @Test
-  public void testWhitespaceToChar() throws Exception {
-    assertEquals(' ', SqoopOptions.toChar(" "));
-    assertEquals(' ', SqoopOptions.toChar("   "));
-    assertEquals('\t', SqoopOptions.toChar("\t"));
-  }
-
-  @Test
-  public void testUnknownEscape1() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on unknown escaping");
-    SqoopOptions.toChar("\\Q");
-  }
-
-  @Test
-  public void testUnknownEscape2() throws Exception {
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on unknown escaping");
-    SqoopOptions.toChar("\\nn");
-  }
-
-  @Test
-  public void testEscapeNul1() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0"));
-  }
-
-  @Test
-  public void testEscapeNul2() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\00"));
-  }
-
-  @Test
-  public void testEscapeNul3() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0000"));
-  }
-
-  @Test
-  public void testEscapeNul4() throws Exception {
-    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0x0"));
-  }
-
-  @Test
-  public void testOctalChar1() throws Exception {
-    assertEquals(04, SqoopOptions.toChar("\\04"));
-  }
-
-  @Test
-  public void testOctalChar2() throws Exception {
-    assertEquals(045, SqoopOptions.toChar("\\045"));
-  }
-
-  @Test
-  public void testErrOctalChar() throws Exception {
-    thrown.expect(NumberFormatException.class);
-    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException on erroneous octal char");
-    SqoopOptions.toChar("\\095");
-  }
-
-  @Test
-  public void testErrHexChar() throws Exception {
-    thrown.expect(NumberFormatException.class);
-    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException on erroneous hex char");
-    SqoopOptions.toChar("\\0x9K5");
-  }
-
-  private SqoopOptions parse(String [] argv) throws Exception {
-    ImportTool importTool = new ImportTool();
-    return importTool.parseArguments(argv, null, null, false);
-  }
-
-  // test that setting output delimiters also sets input delimiters
-  @Test
-  public void testDelimitersInherit() throws Exception {
-    String [] args = {
-      "--fields-terminated-by",
-      "|",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals('|', opts.getInputFieldDelim());
-    assertEquals('|', opts.getOutputFieldDelim());
-  }
-
-  // Test that setting output delimiters and setting input delims
-  // separately works.
-  @Test
-  public void testDelimOverride1() throws Exception {
-    String [] args = {
-      "--fields-terminated-by",
-      "|",
-      "--input-fields-terminated-by",
-      "*",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals('*', opts.getInputFieldDelim());
-    assertEquals('|', opts.getOutputFieldDelim());
-  }
-
-  // test that the order in which delims are specified doesn't matter
-  @Test
-  public void testDelimOverride2() throws Exception {
-    String [] args = {
-      "--input-fields-terminated-by",
-      "*",
-      "--fields-terminated-by",
-      "|",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals('*', opts.getInputFieldDelim());
-    assertEquals('|', opts.getOutputFieldDelim());
-  }
-
-  @Test
-  public void testBadNumMappers1() throws Exception {
-    String [] args = {
-      "--num-mappers",
-      "x",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on invalid --num-mappers argument");
-    parse(args);
-  }
-
-  @Test
-  public void testBadNumMappers2() throws Exception {
-    String [] args = {
-      "-m",
-      "x",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on invalid -m argument");
-    parse(args);
-  }
-
-  @Test
-  public void testGoodNumMappers() throws Exception {
-    String [] args = {
-      "-m",
-      "4",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals(4, opts.getNumMappers());
-  }
-
-  @Test
-  public void testHivePartitionParams() throws Exception {
-    String[] args = {
-        "--hive-partition-key", "ds",
-        "--hive-partition-value", "20110413",
-    };
-    SqoopOptions opts = parse(args);
-    assertEquals("ds", opts.getHivePartitionKey());
-    assertEquals("20110413", opts.getHivePartitionValue());
-  }
-
-  @Test
-  public void testBoundaryQueryParams() throws Exception {
-    String[] args = {
-      "--boundary-query", "select 1, 2",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals("select 1, 2", opts.getBoundaryQuery());
-  }
-
-  @Test
-  public void testMapColumnHiveParams() throws Exception {
-    String[] args = {
-      "--map-column-hive", "id=STRING",
-    };
-
-    SqoopOptions opts = parse(args);
-    Properties mapping = opts.getMapColumnHive();
-    assertTrue(mapping.containsKey("id"));
-    assertEquals("STRING", mapping.get("id"));
-  }
-
-  @Test
-  public void testMalformedMapColumnHiveParams() throws Exception {
-    String[] args = {
-      "--map-column-hive", "id",
-    };
-    try {
-      SqoopOptions opts = parse(args);
-      fail("Malformed hive mapping does not throw exception");
-    } catch (Exception e) {
-      // Caught exception as expected
-    }
-  }
-
-  @Test
-  public void testMapColumnJavaParams() throws Exception {
-    String[] args = {
-      "--map-column-java", "id=String",
-    };
-
-    SqoopOptions opts = parse(args);
-    Properties mapping = opts.getMapColumnJava();
-    assertTrue(mapping.containsKey("id"));
-    assertEquals("String", mapping.get("id"));
-  }
-
-  @Test
-  public void testMalfromedMapColumnJavaParams() throws Exception {
-    String[] args = {
-      "--map-column-java", "id",
-    };
-    try {
-      SqoopOptions opts = parse(args);
-      fail("Malformed java mapping does not throw exception");
-    } catch (Exception e) {
-      // Caught exception as expected
-    }
-  }
-
-  @Test
-  public void testSkipDistCacheOption() throws Exception {
-    String[] args = {"--skip-dist-cache"};
-    SqoopOptions opts = parse(args);
-    assertTrue(opts.isSkipDistCache());
-  }
-
-  @Test
-  public void testPropertySerialization1() {
-    // Test that if we write a SqoopOptions out to a Properties,
-    // and then read it back in, we get all the same results.
-    SqoopOptions out = new SqoopOptions();
-    out.setUsername("user");
-    out.setConnectString("bla");
-    out.setNumMappers(4);
-    out.setAppendMode(true);
-    out.setHBaseTable("hbasetable");
-    out.setWarehouseDir("Warehouse");
-    out.setClassName("someclass");
-    out.setSplitByCol("somecol");
-    out.setSqlQuery("the query");
-    out.setPackageName("a.package");
-    out.setHiveImport(true);
-    out.setFetchSize(null);
-
-    Properties connParams = new Properties();
-    connParams.put("conn.timeout", "3000");
-    connParams.put("conn.buffer_size", "256");
-    connParams.put("conn.dummy", "dummy");
-    connParams.put("conn.foo", "bar");
-
-    out.setConnectionParams(connParams);
-
-    Properties outProps = out.writeProperties();
-
-    SqoopOptions in = new SqoopOptions();
-    in.loadProperties(outProps);
-
-    Properties inProps = in.writeProperties();
-
-    assertEquals("properties don't match", outProps, inProps);
-
-    assertEquals("connection params don't match",
-            connParams, out.getConnectionParams());
-    assertEquals("connection params don't match",
-            connParams, in.getConnectionParams());
-  }
-
-  @Test
-  public void testPropertySerialization2() {
-    // Test that if we write a SqoopOptions out to a Properties,
-    // and then read it back in, we get all the same results.
-    SqoopOptions out = new SqoopOptions();
-    out.setUsername("user");
-    out.setConnectString("bla");
-    out.setNumMappers(4);
-    out.setAppendMode(true);
-    out.setHBaseTable("hbasetable");
-    out.setWarehouseDir("Warehouse");
-    out.setClassName("someclass");
-    out.setSplitByCol("somecol");
-    out.setSqlQuery("the query");
-    out.setPackageName("a.package");
-    out.setHiveImport(true);
-    out.setFetchSize(42);
-
-    Properties connParams = new Properties();
-    connParams.setProperty("a", "value-a");
-    connParams.setProperty("b", "value-b");
-    connParams.setProperty("a.b", "value-a.b");
-    connParams.setProperty("a.b.c", "value-a.b.c");
-    connParams.setProperty("aaaaaaaaaa.bbbbbbb.cccccccc", "value-abc");
-
-    out.setConnectionParams(connParams);
-
-    Properties outProps = out.writeProperties();
-
-    SqoopOptions in = new SqoopOptions();
-    in.loadProperties(outProps);
-
-    Properties inProps = in.writeProperties();
-
-    assertEquals("properties don't match", outProps, inProps);
-    assertEquals("connection params don't match",
-            connParams, out.getConnectionParams());
-    assertEquals("connection params don't match",
-            connParams, in.getConnectionParams());
-  }
-
-  @Test
-  public void testDefaultTempRootDir() {
-    SqoopOptions opts = new SqoopOptions();
-
-    assertEquals("_sqoop", opts.getTempRootDir());
-  }
-
-  @Test
-  public void testDefaultLoadedTempRootDir() {
-    SqoopOptions out = new SqoopOptions();
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertEquals("_sqoop", opts.getTempRootDir());
-  }
-
-  @Test
-  public void testLoadedTempRootDir() {
-    SqoopOptions out = new SqoopOptions();
-    final String tempRootDir = "customRoot";
-    out.setTempRootDir(tempRootDir);
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertEquals(tempRootDir, opts.getTempRootDir());
-  }
-
-  @Test
-  public void testNulledTempRootDir() {
-    SqoopOptions out = new SqoopOptions();
-    out.setTempRootDir(null);
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertEquals("_sqoop", opts.getTempRootDir());
-  }
-
-  @Test
-  public void testDefaultThrowOnErrorWithNotSetSystemProperty() {
-    System.clearProperty(SQOOP_RETHROW_PROPERTY);
-    SqoopOptions opts = new SqoopOptions();
-    assertFalse(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testDefaultThrowOnErrorWithSetSystemProperty() {
-    String testSqoopRethrowProperty = "";
-    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
-    SqoopOptions opts = new SqoopOptions();
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testDefaultLoadedThrowOnErrorWithNotSetSystemProperty() {
-    System.clearProperty(SQOOP_RETHROW_PROPERTY);
-    SqoopOptions out = new SqoopOptions();
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertFalse(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testDefaultLoadedThrowOnErrorWithSetSystemProperty() {
-    String testSqoopRethrowProperty = "";
-    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
-    SqoopOptions out = new SqoopOptions();
-    Properties props = out.writeProperties();
-    SqoopOptions opts = new SqoopOptions();
-    opts.loadProperties(props);
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testThrowOnErrorWithNotSetSystemProperty() throws Exception {
-    System.clearProperty(SQOOP_RETHROW_PROPERTY);
-    String[] args = {"--throw-on-error"};
-    SqoopOptions opts = parse(args);
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void testThrowOnErrorWithSetSystemProperty() throws Exception {
-    String testSqoopRethrowProperty = "";
-    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
-    String[] args = {"--throw-on-error"};
-    SqoopOptions opts = parse(args);
-
-    assertTrue(opts.isThrowOnError());
-  }
-
-  @Test
-  public void defaultValueOfOracleEscapingDisabledShouldBeFalse() {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse() {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue() {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue() {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
-  }
-
-  @Test
-  public void hadoopConfigurationInstanceOfSqoopOptionsShouldContainTheSameValueForOracleEscapingDisabledAsSqoopOptionsProperty() {
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(opts.isOracleEscapingDisabled())));
-  }
-
-  @Test
-  public void hadoopConfigurationInstanceOfSqoopOptionsShouldContainTrueForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions() {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    opts.setOracleEscapingDisabled(true);
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(true)));
-  }
-
-  @Test
-  public void hadoopConfigurationInstanceOfSqoopOptionsShouldContainFalseForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions() {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    opts.setOracleEscapingDisabled(false);
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse() {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue() {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(true)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue() {
-    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
-    SqoopOptions opts = new SqoopOptions();
-
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledShouldBeAbleToSavedAndLoadedBackWithTheSameValue() {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    opts.setOracleEscapingDisabled(false);
-    Properties out = opts.writeProperties();
-    opts = new SqoopOptions();
-    opts.loadProperties(out);
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(false)));
-  }
-
-  @Test
-  public void valueOfOracleEscapingDisabledShouldBeEqualToNullIfASqoopOptionsInstanceWasLoadedWhichDidntContainASavedValueForIt() {
-    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
-    SqoopOptions opts = new SqoopOptions();
-    Properties out = opts.writeProperties();
-    opts = new SqoopOptions();
-    opts.loadProperties(out);
-
-    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
-    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
-        is(equalTo(true)));
-  }
-
-  // test that hadoop-home is accepted as an option
-  @Test
-  public void testHadoopHome() throws Exception {
-    String [] args = {
-      "--hadoop-home",
-      "/usr/lib/hadoop",
-    };
-
-    SqoopOptions opts = parse(args);
-    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
-  }
-
-  // test that hadoop-home is accepted as an option
-  @Test
-  public void testHadoopMapRedOverridesHadoopHome() throws Exception {
-    String[] args = { "--hadoop-home", "/usr/lib/hadoop-ignored", "--hadoop-mapred-home", "/usr/lib/hadoop", };
-
-    SqoopOptions opts = parse(args);
-    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
-  }
-
-
-  //helper method to validate given import options
-  private void validateImportOptions(String[] extraArgs) throws Exception {
-    String [] args = {
-      "--connect", HsqldbTestServer.getUrl(),
-      "--table", "test",
-      "-m", "1",
-    };
-    ImportTool importTool = new ImportTool();
-    SqoopOptions opts = importTool.parseArguments(
-        (String []) ArrayUtils.addAll(args, extraArgs), null, null, false);
-    importTool.validateOptions(opts);
-  }
-
-  //test compatability of --detele-target-dir with import
-  @Test
-  public void testDeteleTargetDir() throws Exception {
-    String [] extraArgs = {
-      "--delete-target-dir",
-    };
-    try {
-      validateImportOptions(extraArgs);
-    } catch(SqoopOptions.InvalidOptionsException ioe) {
-      fail("Unexpected InvalidOptionsException" + ioe);
-    }
-  }
-
-  //test incompatability of --delete-target-dir & --append with import
-  @Test
-  public void testDeleteTargetDirWithAppend() throws Exception {
-    String [] extraArgs = {
-      "--append",
-      "--delete-target-dir",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on incompatibility of " +
-        "--delete-target-dir and --append");
-    validateImportOptions(extraArgs);
-  }
-
-  //test incompatability of --delete-target-dir with incremental import
-  @Test
-  public void testDeleteWithIncrementalImport() throws Exception {
-    String [] extraArgs = {
-      "--incremental", "append",
-      "--delete-target-dir",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on incompatibility of " +
-        "--delete-target-dir and --incremental");
-    validateImportOptions(extraArgs);
-  }
-
-  // test that hbase bulk load import with table name and target dir
-  // passes validation
-  @Test
-  public void testHBaseBulkLoad() throws Exception {
-    String [] extraArgs = {
-        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
-        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test",
-        longArgument(BaseSqoopTool.HBASE_TABLE_ARG), "test_table",
-        longArgument(BaseSqoopTool.HBASE_COL_FAM_ARG), "d"};
-
-    validateImportOptions(extraArgs);
-  }
-
-  // test that hbase bulk load import with a missing --hbase-table fails
-  @Test
-  public void testHBaseBulkLoadMissingHbaseTable() throws Exception {
-    String [] extraArgs = {
-        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
-        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test"};
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException because of missing --hbase-table");
-    validateImportOptions(extraArgs);
-  }
-
-  private static String longArgument(String argument) {
-    return String.format("--%s", argument);
-  }
-
-  @Test
-  public void testRelaxedIsolation() throws Exception {
-    String extraArgs[] = {
-      "--relaxed-isolation",
-    };
-    validateImportOptions(extraArgs);
-  }
-
-  @Test
-  public void testResetToOneMapper() throws Exception {
-    String extraArgs[] = {
-      "--autoreset-to-one-mapper",
-    };
-    validateImportOptions(extraArgs);
-  }
-
-  @Test
-  public void testResetToOneMapperAndSplitBy() throws Exception {
-    String extraArgs[] = {
-      "--autoreset-to-one-mapper",
-      "--split-by",
-      "col0",
-    };
-
-    thrown.expect(SqoopOptions.InvalidOptionsException.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception on incompatibility of " +
-        "--autoreset-to-one-mapper and --split-by");
-    validateImportOptions(extraArgs);
-  }
-
-  @Test
-  public void testEscapeMapingColumnNames() throws Exception {
-    SqoopOptions opts = new SqoopOptions();
-    // enabled by default
-    assertTrue(opts.getEscapeMappingColumnNamesEnabled());
-
-    String [] args = {
-        "--" + org.apache.sqoop.tool.BaseSqoopTool.ESCAPE_MAPPING_COLUMN_NAMES_ENABLED,
-        "false",
-    };
-
-    opts = parse(args);
-    assertFalse(opts.getEscapeMappingColumnNamesEnabled());
-  }
-
-}
diff --git a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
deleted file mode 100644
index dbf0dde..0000000
--- a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
+++ /dev/null
@@ -1,289 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.hive;
-
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sqoop.util.SqlTypeMap;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-
-import java.sql.Types;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-
-/**
- * Test Hive DDL statement generation.
- */
-public class TestTableDefWriter {
-
-  public static final Log LOG = LogFactory.getLog(
-      TestTableDefWriter.class.getName());
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  // Test getHiveOctalCharCode and expect an IllegalArgumentException.
-  private void expectExceptionInCharCode(int charCode) {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException with out-of-range Hive delimiter");
-    TableDefWriter.getHiveOctalCharCode(charCode);
-  }
-
-  @Test
-  public void testHiveOctalCharCode() {
-    assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
-    assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
-    assertEquals("\\012", TableDefWriter.getHiveOctalCharCode((int) '\n'));
-    assertEquals("\\177", TableDefWriter.getHiveOctalCharCode(0177));
-
-    expectExceptionInCharCode(4096);
-    expectExceptionInCharCode(0200);
-    expectExceptionInCharCode(254);
-  }
-
-  @Test
-  public void testDifferentTableNames() throws Exception {
-    Configuration conf = new Configuration();
-    SqoopOptions options = new SqoopOptions();
-    TableDefWriter writer = new TableDefWriter(options, null,
-        "inputTable", "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    LOG.debug("Create table stmt: " + createTable);
-    LOG.debug("Load data stmt: " + loadData);
-
-    // Assert that the statements generated have the form we expect.
-    assertTrue(createTable.indexOf(
-        "CREATE TABLE IF NOT EXISTS `outputTable`") != -1);
-    assertTrue(loadData.indexOf("INTO TABLE `outputTable`") != -1);
-    assertTrue(loadData.indexOf("/inputTable'") != -1);
-  }
-
-  @Test
-  public void testDifferentTargetDirs() throws Exception {
-    String targetDir = "targetDir";
-    String inputTable = "inputTable";
-    String outputTable = "outputTable";
-
-    Configuration conf = new Configuration();
-    SqoopOptions options = new SqoopOptions();
-    // Specify a different target dir from input table name
-    options.setTargetDir(targetDir);
-    TableDefWriter writer = new TableDefWriter(options, null,
-        inputTable, outputTable, conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    LOG.debug("Create table stmt: " + createTable);
-    LOG.debug("Load data stmt: " + loadData);
-
-    // Assert that the statements generated have the form we expect.
-    assertTrue(createTable.indexOf(
-        "CREATE TABLE IF NOT EXISTS `" + outputTable + "`") != -1);
-    assertTrue(loadData.indexOf("INTO TABLE `" + outputTable + "`") != -1);
-    assertTrue(loadData.indexOf("/" + targetDir + "'") != -1);
-  }
-
-  @Test
-  public void testPartitions() throws Exception {
-    String[] args = {
-        "--hive-partition-key", "ds",
-        "--hive-partition-value", "20110413",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, "inputTable", "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    assertNotNull(createTable);
-    assertNotNull(loadData);
-    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
-        + "PARTITIONED BY (ds STRING) "
-        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
-        + "LINES TERMINATED BY '\\012' STORED AS TEXTFILE", createTable);
-    assertTrue(loadData.endsWith(" PARTITION (ds='20110413')"));
-  }
-
-  @Test
-  public void testLzoSplitting() throws Exception {
-    String[] args = {
-        "--compress",
-        "--compression-codec", "lzop",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, "inputTable", "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    String loadData = writer.getLoadDataStmt();
-
-    assertNotNull(createTable);
-    assertNotNull(loadData);
-    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
-        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
-        + "LINES TERMINATED BY '\\012' STORED AS "
-        + "INPUTFORMAT 'com.hadoop.mapred.DeprecatedLzoTextInputFormat' "
-        + "OUTPUTFORMAT "
-        + "'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'",
-        createTable);
-  }
-
-  @Test
-  public void testUserMappingNoDecimal() throws Exception {
-    String[] args = {
-        "--map-column-hive", "id=STRING,value=INTEGER",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    colTypes.put("id", Types.INTEGER);
-    colTypes.put("value", Types.VARCHAR);
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-
-    assertNotNull(createTable);
-
-    assertTrue(createTable.contains("`id` STRING"));
-    assertTrue(createTable.contains("`value` INTEGER"));
-
-    assertFalse(createTable.contains("`id` INTEGER"));
-    assertFalse(createTable.contains("`value` STRING"));
-  }
-
-  @Test
-  public void testUserMappingWithDecimal() throws Exception {
-    String[] args = {
-        "--map-column-hive", "id=STRING,value2=DECIMAL(13,5),value1=INTEGER," +
-                             "value3=DECIMAL(4,5),value4=VARCHAR(255)",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-        new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    colTypes.put("id", Types.INTEGER);
-    colTypes.put("value1", Types.VARCHAR);
-    colTypes.put("value2", Types.DOUBLE);
-    colTypes.put("value3", Types.FLOAT);
-    colTypes.put("value4", Types.CHAR);
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-
-    assertNotNull(createTable);
-
-    assertTrue(createTable.contains("`id` STRING"));
-    assertTrue(createTable.contains("`value1` INTEGER"));
-    assertTrue(createTable.contains("`value2` DECIMAL(13,5)"));
-    assertTrue(createTable.contains("`value3` DECIMAL(4,5)"));
-    assertTrue(createTable.contains("`value4` VARCHAR(255)"));
-
-    assertFalse(createTable.contains("`id` INTEGER"));
-    assertFalse(createTable.contains("`value1` STRING"));
-    assertFalse(createTable.contains("`value2` DOUBLE"));
-    assertFalse(createTable.contains("`value3` FLOAT"));
-    assertFalse(createTable.contains("`value4` CHAR"));
-  }
-
-  @Test
-  public void testUserMappingFailWhenCantBeApplied() throws Exception {
-    String[] args = {
-        "--map-column-hive", "id=STRING,value=INTEGER",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    colTypes.put("id", Types.INTEGER);
-    writer.setColumnTypes(colTypes);
-
-    thrown.expect(IllegalArgumentException.class);
-    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException on non applied Hive type mapping");
-    String createTable = writer.getCreateTableStmt();
-  }
-
-  @Test
-  public void testHiveDatabase() throws Exception {
-    String[] args = {
-        "--hive-database", "db",
-    };
-    Configuration conf = new Configuration();
-    SqoopOptions options =
-      new ImportTool().parseArguments(args, null, null, false);
-    TableDefWriter writer = new TableDefWriter(options,
-        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
-
-    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
-    writer.setColumnTypes(colTypes);
-
-    String createTable = writer.getCreateTableStmt();
-    assertNotNull(createTable);
-    assertTrue(createTable.contains("`db`.`outputTable`"));
-
-    String loadStmt = writer.getLoadDataStmt();
-    assertNotNull(loadStmt);
-    assertTrue(createTable.contains("`db`.`outputTable`"));
-  }
-
-}
diff --git a/src/test/com/cloudera/sqoop/manager/TestSqlManager.java b/src/test/com/cloudera/sqoop/manager/TestSqlManager.java
deleted file mode 100644
index bffff4a..0000000
--- a/src/test/com/cloudera/sqoop/manager/TestSqlManager.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop.manager;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-
-/**
- * Test methods of the generic SqlManager implementation.
- */
-public class TestSqlManager {
-
-  public static final Log LOG = LogFactory.getLog(
-      TestSqlManager.class.getName());
-
-  /** the name of a table that doesn't exist. */
-  static final String MISSING_TABLE = "MISSING_TABLE";
-
-  // instance variables populated during setUp, used during tests
-  private HsqldbTestServer testServer;
-  private ConnManager manager;
-
-  @Before
-  public void setUp() {
-    testServer = new HsqldbTestServer();
-    try {
-      testServer.resetServer();
-    } catch (SQLException sqlE) {
-      LOG.error("Got SQLException: " + sqlE.toString());
-      fail("Got SQLException: " + sqlE.toString());
-    } catch (ClassNotFoundException cnfe) {
-      LOG.error("Could not find class for db driver: " + cnfe.toString());
-      fail("Could not find class for db driver: " + cnfe.toString());
-    }
-
-    manager = testServer.getManager();
-  }
-
-  @After
-  public void tearDown() {
-    try {
-      manager.close();
-    } catch (SQLException sqlE) {
-      LOG.error("Got SQLException: " + sqlE.toString());
-      fail("Got SQLException: " + sqlE.toString());
-    }
-  }
-
-  @Test
-  public void testListColNames() {
-    String [] colNames = manager.getColumnNames(
-        HsqldbTestServer.getTableName());
-    assertNotNull("manager returned no colname list", colNames);
-    assertEquals("Table list should be length 2", 2, colNames.length);
-    String [] knownFields = HsqldbTestServer.getFieldNames();
-    for (int i = 0; i < colNames.length; i++) {
-      assertEquals(knownFields[i], colNames[i]);
-    }
-  }
-
-  @Test
-  public void testListColTypes() {
-    Map<String, Integer> types = manager.getColumnTypes(
-        HsqldbTestServer.getTableName());
-
-    assertNotNull("manager returned no types map", types);
-    assertEquals("Map should be size=2", 2, types.size());
-    assertEquals(types.get("INTFIELD1").intValue(), Types.INTEGER);
-    assertEquals(types.get("INTFIELD2").intValue(), Types.INTEGER);
-  }
-
-  @Test
-  public void testMissingTableColNames() {
-    String [] colNames = manager.getColumnNames(MISSING_TABLE);
-    assertNull("No column names should be returned for missing table",
-        colNames);
-  }
-
-  @Test
-  public void testMissingTableColTypes() {
-    Map<String, Integer> colTypes = manager.getColumnTypes(MISSING_TABLE);
-    assertNull("No column types should be returned for missing table",
-        colTypes);
-  }
-
-  @Test
-  public void testListTables() {
-    String [] tables = manager.listTables();
-    for (String table : tables) {
-      System.err.println("Got table: " + table);
-    }
-    assertNotNull("manager returned no table list", tables);
-    assertEquals("Table list should be length 1", 1, tables.length);
-    assertEquals(HsqldbTestServer.getTableName(), tables[0]);
-  }
-
-  // constants related to testReadTable()
-  static final int EXPECTED_NUM_ROWS = 4;
-  static final int EXPECTED_COL1_SUM = 16;
-  static final int EXPECTED_COL2_SUM = 20;
-
-  @Test
-  public void testReadTable() {
-    ResultSet results = null;
-    try {
-      results = manager.readTable(HsqldbTestServer.getTableName(),
-          HsqldbTestServer.getFieldNames());
-
-      assertNotNull("ResultSet from readTable() is null!", results);
-
-      ResultSetMetaData metaData = results.getMetaData();
-      assertNotNull("ResultSetMetadata is null in readTable()", metaData);
-
-      // ensure that we get the correct number of columns back
-      assertEquals("Number of returned columns was unexpected!",
-          metaData.getColumnCount(),
-          HsqldbTestServer.getFieldNames().length);
-
-      // should get back 4 rows. They are:
-      // 1 2
-      // 3 4
-      // 5 6
-      // 7 8
-      // .. so while order isn't guaranteed, we should get back 16 on the left
-      // and 20 on the right.
-      int sumCol1 = 0, sumCol2 = 0, rowCount = 0;
-      while (results.next()) {
-        rowCount++;
-        sumCol1 += results.getInt(1);
-        sumCol2 += results.getInt(2);
-      }
-
-      assertEquals("Expected 4 rows back", EXPECTED_NUM_ROWS, rowCount);
-      assertEquals("Expected left sum of 16", EXPECTED_COL1_SUM, sumCol1);
-      assertEquals("Expected right sum of 20", EXPECTED_COL2_SUM, sumCol2);
-    } catch (SQLException sqlException) {
-      fail("SQL Exception: " + sqlException.toString());
-    } finally {
-      if (null != results) {
-        try {
-          results.close();
-        } catch (SQLException sqlE) {
-          fail("SQL Exception in ResultSet.close(): " + sqlE.toString());
-        }
-      }
-
-      manager.release();
-    }
-  }
-
-  @Test
-  public void testReadMissingTable() {
-    ResultSet results = null;
-    try {
-      String [] colNames = { "*" };
-      results = manager.readTable(MISSING_TABLE, colNames);
-      assertNull("Expected null resultset from readTable(MISSING_TABLE)",
-          results);
-    } catch (SQLException sqlException) {
-      // we actually expect this. pass.
-    } finally {
-      if (null != results) {
-        try {
-          results.close();
-        } catch (SQLException sqlE) {
-          fail("SQL Exception in ResultSet.close(): " + sqlE.toString());
-        }
-      }
-
-      manager.release();
-    }
-  }
-
-  @Test
-  public void getPrimaryKeyFromMissingTable() {
-    String primaryKey = manager.getPrimaryKey(MISSING_TABLE);
-    assertNull("Expected null pkey for missing table", primaryKey);
-  }
-
-  @Test
-  public void getPrimaryKeyFromTableWithoutKey() {
-    String primaryKey = manager.getPrimaryKey(HsqldbTestServer.getTableName());
-    assertNull("Expected null pkey for table without key", primaryKey);
-  }
-
-  // constants for getPrimaryKeyFromTable()
-  static final String TABLE_WITH_KEY = "TABLE_WITH_KEY";
-  static final String KEY_FIELD_NAME = "KEYFIELD";
-
-  @Test
-  public void getPrimaryKeyFromTable() {
-    // first, create a table with a primary key
-    Connection conn = null;
-    try {
-      conn = testServer.getConnection();
-      PreparedStatement statement = conn.prepareStatement(
-          "CREATE TABLE " + TABLE_WITH_KEY + "(" + KEY_FIELD_NAME
-          + " INT NOT NULL PRIMARY KEY, foo INT)",
-          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-      statement.executeUpdate();
-      statement.close();
-    } catch (SQLException sqlException) {
-      fail("Could not create table with primary key: "
-          + sqlException.toString());
-    } finally {
-      if (null != conn) {
-        try {
-          conn.close();
-        } catch (SQLException sqlE) {
-          LOG.warn("Got SQLException during close: " + sqlE.toString());
-        }
-      }
-    }
-
-    String primaryKey = manager.getPrimaryKey(TABLE_WITH_KEY);
-    assertEquals("Expected null pkey for table without key", primaryKey,
-        KEY_FIELD_NAME);
-  }
-}
diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestIntegerSplitter.java b/src/test/com/cloudera/sqoop/mapreduce/db/TestIntegerSplitter.java
deleted file mode 100644
index e5a7777..0000000
--- a/src/test/com/cloudera/sqoop/mapreduce/db/TestIntegerSplitter.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-import org.junit.Test;
-
-/**
- * Test that the IntegerSplitter generates sane splits.
- *
- * @deprecated use org.apache.sqoop.mapreduce.db.TestIntegerSplitter isntead.
- * @see org.apache.sqoop.mapreduce.db.TestIntegerSplitter
- */
-public class TestIntegerSplitter
-  extends org.apache.sqoop.mapreduce.db.TestIntegerSplitter {
-
-  @Test
-  public void testDummy() {
-    // Nothing to do
-  }
-}
-
diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/com/cloudera/sqoop/mapreduce/db/TestTextSplitter.java
deleted file mode 100644
index e585310..0000000
--- a/src/test/com/cloudera/sqoop/mapreduce/db/TestTextSplitter.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.cloudera.sqoop.mapreduce.db;
-
-
-import org.junit.Test;
-
-/**
- * Test that the TextSplitter implementation creates a sane set of splits.
- * @deprecated use org.apache.sqoop.mapreduce.db.TestTextSplitter instead.
- * @see org.apache.sqoop.mapreduce.db.TestTextSplitter
- */
-public class TestTextSplitter extends
-  org.apache.sqoop.mapreduce.db.TestTextSplitter {
-
-  @Test
-  public void testDummy() {
-    // Nothing to do
-  }
-
-}
-
diff --git a/src/test/findbugsExcludeFile.xml b/src/test/findbugsExcludeFile.xml
deleted file mode 100644
index 8aa4ed5..0000000
--- a/src/test/findbugsExcludeFile.xml
+++ /dev/null
@@ -1,78 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-  Copyright 2011 The Apache Software Foundation
- 
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
- 
-      http://www.apache.org/licenses/LICENSE-2.0
- 
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-
-
-<!--
-  This file enumerates all the findbugs warnings that we want to suppress.
-  If you add a spurious warning, you should add it to this file so that it
-  does not generate warnings in the official report.
-
-  For each exception you add, include a comment in your <Match> block
-  explaining why this is not a bug.
--->
-<FindBugsFilter>
-  <Match>
-    <!-- SQL db can return null for a boolean column; so can we. -->
-    <Class name="com.cloudera.sqoop.lib.JdbcWritableBridge" />
-    <Method name="readBoolean" />
-    <Bug pattern="NP_BOOLEAN_RETURN_NULL" />
-  </Match>
-  <Match>
-    <!-- This mapper intentially triggers an NPE to cause an exception
-         which the test case much catch. -->
-    <Class name="com.cloudera.sqoop.mapreduce.TestImportJob$NullDereferenceMapper" />
-    <Method name="map" />
-    <Bug pattern="NP_ALWAYS_NULL" />
-  </Match>
-  <Match>
-    <!-- createRootTable() allows a user-specified table name retrieved
-         from properties. This since instance is allowed for now.
-    -->
-    <Class name="com.cloudera.sqoop.metastore.GenericJobStorage" />
-    <Method name="createRootTable" />
-    <Bug pattern="SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE" />
-  </Match>
-
-  <!-- The following broad categories suppress warnings in test code that do
-       not need to be rigidly upheld. -->
-  <Match>
-    <!-- Performance warnings are ignored in test code. -->
-    <Class name="~com\.cloudera\.sqoop\..*Test.*" />
-    <Bug category="PERFORMANCE" />
-  </Match>
-  <Match>
-    <!-- More performance warnings to suppress in tests. -->
-    <Class name="~com\.cloudera\.sqoop\..*Test.*" />
-    <Bug pattern="SBSC_USE_STRINGBUFFER_CONCATENATION" />
-  </Match>
-  <Match>
-    <!-- Security warnings are ignored in test code. -->
-    <Class name="~com\.cloudera\.sqoop\..*Test.*" />
-    <Bug category="SECURITY" />
-  </Match>
-  <Match>
-    <!-- Ok to use methods to generate SQL statements in tests. -->
-    <Class name="~com\.cloudera\.sqoop\..*Test.*" />
-    <Bug pattern="SQL_PREPARED_STATEMENT_GENERATED_FROM_NONCONSTANT_STRING" />
-  </Match>
-
-</FindBugsFilter>
diff --git a/src/test/com/cloudera/sqoop/TestAllTables.java b/src/test/org/apache/sqoop/TestAllTables.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/TestAllTables.java
rename to src/test/org/apache/sqoop/TestAllTables.java
index 232b82f..56d1f57 100644
--- a/src/test/com/cloudera/sqoop/TestAllTables.java
+++ b/src/test/org/apache/sqoop/TestAllTables.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.*;
 import java.sql.SQLException;
@@ -32,9 +32,9 @@
 import org.junit.Before;
 import org.junit.After;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportAllTablesTool;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ImportAllTablesTool;
 import org.junit.Test;
 import org.kitesdk.data.Dataset;
 import org.kitesdk.data.DatasetReader;
diff --git a/src/test/com/cloudera/sqoop/TestAppendUtils.java b/src/test/org/apache/sqoop/TestAppendUtils.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/TestAppendUtils.java
rename to src/test/org/apache/sqoop/TestAppendUtils.java
index 486afee..f14fc6a 100644
--- a/src/test/com/cloudera/sqoop/TestAppendUtils.java
+++ b/src/test/org/apache/sqoop/TestAppendUtils.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -34,13 +34,13 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.manager.ImportJobContext;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.AppendUtils;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.AppendUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/org/apache/sqoop/TestAutoResetMapper.java b/src/test/org/apache/sqoop/TestAutoResetMapper.java
index fd29c2d..1ad9c33 100644
--- a/src/test/org/apache/sqoop/TestAutoResetMapper.java
+++ b/src/test/org/apache/sqoop/TestAutoResetMapper.java
@@ -36,7 +36,7 @@
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/TestAvroExport.java b/src/test/org/apache/sqoop/TestAvroExport.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/TestAvroExport.java
rename to src/test/org/apache/sqoop/TestAvroExport.java
index ea456e2..d1f1054 100644
--- a/src/test/com/cloudera/sqoop/TestAvroExport.java
+++ b/src/test/org/apache/sqoop/TestAvroExport.java
@@ -16,14 +16,14 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import com.google.common.collect.Lists;
 
 import java.io.IOException;
diff --git a/src/test/com/cloudera/sqoop/TestAvroImport.java b/src/test/org/apache/sqoop/TestAvroImport.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/TestAvroImport.java
rename to src/test/org/apache/sqoop/TestAvroImport.java
index da79c7a..1172fc5 100644
--- a/src/test/com/cloudera/sqoop/TestAvroImport.java
+++ b/src/test/org/apache/sqoop/TestAvroImport.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.File;
 import java.io.IOException;
@@ -41,10 +41,10 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java b/src/test/org/apache/sqoop/TestAvroImportExportRoundtrip.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java
rename to src/test/org/apache/sqoop/TestAvroImportExportRoundtrip.java
index 8e718c3..6de09f3 100644
--- a/src/test/com/cloudera/sqoop/TestAvroImportExportRoundtrip.java
+++ b/src/test/org/apache/sqoop/TestAvroImportExportRoundtrip.java
@@ -16,12 +16,12 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ExportTool;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ExportTool;
 
 import java.io.IOException;
 import java.sql.Connection;
diff --git a/src/test/org/apache/sqoop/TestBigDecimalExport.java b/src/test/org/apache/sqoop/TestBigDecimalExport.java
index 414e3d9..ccea173 100644
--- a/src/test/org/apache/sqoop/TestBigDecimalExport.java
+++ b/src/test/org/apache/sqoop/TestBigDecimalExport.java
@@ -33,8 +33,8 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/org/apache/sqoop/TestBigDecimalImport.java b/src/test/org/apache/sqoop/TestBigDecimalImport.java
index d265d17..286f54e 100644
--- a/src/test/org/apache/sqoop/TestBigDecimalImport.java
+++ b/src/test/org/apache/sqoop/TestBigDecimalImport.java
@@ -28,8 +28,8 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/TestBoundaryQuery.java b/src/test/org/apache/sqoop/TestBoundaryQuery.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/TestBoundaryQuery.java
rename to src/test/org/apache/sqoop/TestBoundaryQuery.java
index 925bec3..4461006 100644
--- a/src/test/com/cloudera/sqoop/TestBoundaryQuery.java
+++ b/src/test/org/apache/sqoop/TestBoundaryQuery.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -26,14 +26,15 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/TestColumnTypes.java b/src/test/org/apache/sqoop/TestColumnTypes.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/TestColumnTypes.java
rename to src/test/org/apache/sqoop/TestColumnTypes.java
index 8397a37..9674030 100644
--- a/src/test/com/cloudera/sqoop/TestColumnTypes.java
+++ b/src/test/org/apache/sqoop/TestColumnTypes.java
@@ -16,12 +16,12 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
+import org.apache.sqoop.testutil.ManagerCompatTestCase;
 
 /**
  * Test that each of the different SQL Column types that we support
diff --git a/src/test/com/cloudera/sqoop/TestCompression.java b/src/test/org/apache/sqoop/TestCompression.java
similarity index 93%
rename from src/test/com/cloudera/sqoop/TestCompression.java
rename to src/test/org/apache/sqoop/TestCompression.java
index 7110510..6480c44 100644
--- a/src/test/com/cloudera/sqoop/TestCompression.java
+++ b/src/test/org/apache/sqoop/TestCompression.java
@@ -16,16 +16,17 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 
 import java.io.BufferedReader;
 import java.io.IOException;
diff --git a/src/test/com/cloudera/sqoop/TestConnFactory.java b/src/test/org/apache/sqoop/TestConnFactory.java
similarity index 93%
rename from src/test/com/cloudera/sqoop/TestConnFactory.java
rename to src/test/org/apache/sqoop/TestConnFactory.java
index b362e49..fb6c940 100644
--- a/src/test/com/cloudera/sqoop/TestConnFactory.java
+++ b/src/test/org/apache/sqoop/TestConnFactory.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.sql.Connection;
@@ -26,11 +26,13 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.manager.ManagerFactory;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.tool.ImportTool;
+import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.tool.ImportTool;
 import org.junit.Test;
 
 import static org.junit.Assert.assertNotNull;
diff --git a/src/test/com/cloudera/sqoop/TestDirectImport.java b/src/test/org/apache/sqoop/TestDirectImport.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/TestDirectImport.java
rename to src/test/org/apache/sqoop/TestDirectImport.java
index 927b1fe..edad143 100644
--- a/src/test/com/cloudera/sqoop/TestDirectImport.java
+++ b/src/test/org/apache/sqoop/TestDirectImport.java
@@ -16,11 +16,11 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
diff --git a/src/test/com/cloudera/sqoop/TestExport.java b/src/test/org/apache/sqoop/TestExport.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/TestExport.java
rename to src/test/org/apache/sqoop/TestExport.java
index 7948dae..f32f620 100644
--- a/src/test/com/cloudera/sqoop/TestExport.java
+++ b/src/test/org/apache/sqoop/TestExport.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -32,8 +32,9 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -45,11 +46,11 @@
 import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
-import com.cloudera.sqoop.tool.CodeGenTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.lib.RecordParser;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.tool.CodeGenTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/TestExportUpdate.java b/src/test/org/apache/sqoop/TestExportUpdate.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/TestExportUpdate.java
rename to src/test/org/apache/sqoop/TestExportUpdate.java
index 683f591..d28edd7 100644
--- a/src/test/com/cloudera/sqoop/TestExportUpdate.java
+++ b/src/test/org/apache/sqoop/TestExportUpdate.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.BufferedWriter;
 import java.io.IOException;
@@ -32,8 +32,8 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
diff --git a/src/test/org/apache/sqoop/TestExportUsingProcedure.java b/src/test/org/apache/sqoop/TestExportUsingProcedure.java
index 1783676..64e7af3 100644
--- a/src/test/org/apache/sqoop/TestExportUsingProcedure.java
+++ b/src/test/org/apache/sqoop/TestExportUsingProcedure.java
@@ -37,8 +37,6 @@
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestExport;
 import org.junit.Rule;
 import org.junit.rules.TestName;
 
diff --git a/src/test/com/cloudera/sqoop/TestFreeFormQueryImport.java b/src/test/org/apache/sqoop/TestFreeFormQueryImport.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/TestFreeFormQueryImport.java
rename to src/test/org/apache/sqoop/TestFreeFormQueryImport.java
index 4f9e652..2df4352 100644
--- a/src/test/com/cloudera/sqoop/TestFreeFormQueryImport.java
+++ b/src/test/org/apache/sqoop/TestFreeFormQueryImport.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -34,8 +34,8 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.junit.After;
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/TestIncrementalImport.java b/src/test/org/apache/sqoop/TestIncrementalImport.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/TestIncrementalImport.java
rename to src/test/org/apache/sqoop/TestIncrementalImport.java
index 1faa52b..1ab9802 100644
--- a/src/test/com/cloudera/sqoop/TestIncrementalImport.java
+++ b/src/test/org/apache/sqoop/TestIncrementalImport.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -31,7 +31,7 @@
 import java.util.Arrays;
 import java.util.List;
 
-import com.cloudera.sqoop.metastore.SavedJobsTestBase;
+import org.apache.sqoop.metastore.SavedJobsTestBase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -40,14 +40,14 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.hive.HiveImport;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.HsqldbManager;
-import com.cloudera.sqoop.manager.ManagerFactory;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.JobTool;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.HsqldbManager;
+import org.apache.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.JobTool;
 import org.apache.sqoop.metastore.AutoGenericJobStorage;
 import org.junit.Before;
 import org.junit.Rule;
diff --git a/src/test/com/cloudera/sqoop/TestMerge.java b/src/test/org/apache/sqoop/TestMerge.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/TestMerge.java
rename to src/test/org/apache/sqoop/TestMerge.java
index 9639f84..8eef8d4 100644
--- a/src/test/com/cloudera/sqoop/TestMerge.java
+++ b/src/test/org/apache/sqoop/TestMerge.java
@@ -16,28 +16,24 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.sql.Connection;
-import java.sql.Timestamp;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.util.Arrays;
-import java.util.LinkedList;
 import java.util.List;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.SqoopOptions.FileLayout;
-import com.cloudera.sqoop.SqoopOptions.IncrementalMode;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.tool.CodeGenTool;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.MergeTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.tool.CodeGenTool;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.MergeTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.apache.avro.file.DataFileReader;
 import org.apache.avro.file.FileReader;
 import org.apache.avro.file.SeekableInput;
diff --git a/src/test/com/cloudera/sqoop/TestMultiCols.java b/src/test/org/apache/sqoop/TestMultiCols.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/TestMultiCols.java
rename to src/test/org/apache/sqoop/TestMultiCols.java
index 94721b8..1c932e9 100644
--- a/src/test/com/cloudera/sqoop/TestMultiCols.java
+++ b/src/test/org/apache/sqoop/TestMultiCols.java
@@ -16,14 +16,14 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.junit.Test;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/TestMultiMaps.java b/src/test/org/apache/sqoop/TestMultiMaps.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/TestMultiMaps.java
rename to src/test/org/apache/sqoop/TestMultiMaps.java
index c5f487b..050e268 100644
--- a/src/test/com/cloudera/sqoop/TestMultiMaps.java
+++ b/src/test/org/apache/sqoop/TestMultiMaps.java
@@ -16,13 +16,12 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import com.cloudera.sqoop.testutil.*;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -33,10 +32,12 @@
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.*;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/TestParquetExport.java b/src/test/org/apache/sqoop/TestParquetExport.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/TestParquetExport.java
rename to src/test/org/apache/sqoop/TestParquetExport.java
index 680fd73..fdaa5d8 100644
--- a/src/test/com/cloudera/sqoop/TestParquetExport.java
+++ b/src/test/org/apache/sqoop/TestParquetExport.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import com.google.common.collect.Lists;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Field;
diff --git a/src/test/com/cloudera/sqoop/TestParquetImport.java b/src/test/org/apache/sqoop/TestParquetImport.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/TestParquetImport.java
rename to src/test/org/apache/sqoop/TestParquetImport.java
index 4ff025b..379529a 100644
--- a/src/test/com/cloudera/sqoop/TestParquetImport.java
+++ b/src/test/org/apache/sqoop/TestParquetImport.java
@@ -16,20 +16,18 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.Schema.Type;
-import org.apache.avro.file.DataFileReader;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.kitesdk.data.CompressionType;
 import org.kitesdk.data.Dataset;
diff --git a/src/test/com/cloudera/sqoop/TestQuery.java b/src/test/org/apache/sqoop/TestQuery.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/TestQuery.java
rename to src/test/org/apache/sqoop/TestQuery.java
index 0836b8d..9dfad07 100644
--- a/src/test/com/cloudera/sqoop/TestQuery.java
+++ b/src/test/org/apache/sqoop/TestQuery.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -27,14 +27,15 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/TestSplitBy.java b/src/test/org/apache/sqoop/TestSplitBy.java
similarity index 91%
rename from src/test/com/cloudera/sqoop/TestSplitBy.java
rename to src/test/org/apache/sqoop/TestSplitBy.java
index c13fbcc..2518935 100644
--- a/src/test/com/cloudera/sqoop/TestSplitBy.java
+++ b/src/test/org/apache/sqoop/TestSplitBy.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -27,14 +27,15 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java b/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
index fb89a0b..b3579ac 100644
--- a/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
+++ b/src/test/org/apache/sqoop/TestSqoopJobDataPublisher.java
@@ -18,11 +18,11 @@
 
 package org.apache.sqoop;
 
-import com.cloudera.sqoop.hive.HiveImport;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.hive.HiveImport;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
@@ -107,7 +107,7 @@
 
         // set up our mock hive shell to compare our generated script
         // against the correct expected one.
-        com.cloudera.sqoop.SqoopOptions options = getSqoopOptions(args, tool);
+        SqoopOptions options = getSqoopOptions(args, tool);
         String hiveHome = options.getHiveHome();
         assertNotNull("hive.home was not set", hiveHome);
         String testDataPath = new Path(new Path(hiveHome),
@@ -119,8 +119,8 @@
         runImport(tool, args);
     }
 
-    private com.cloudera.sqoop.SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
-        com.cloudera.sqoop.SqoopOptions opts = null;
+    private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
+        SqoopOptions opts = null;
         try {
             opts = tool.parseArguments(args, null, null, true);
         } catch (Exception e) {
diff --git a/src/test/org/apache/sqoop/TestSqoopOptions.java b/src/test/org/apache/sqoop/TestSqoopOptions.java
index 94d9089..16901ca 100644
--- a/src/test/org/apache/sqoop/TestSqoopOptions.java
+++ b/src/test/org/apache/sqoop/TestSqoopOptions.java
@@ -31,19 +31,42 @@
 import java.util.Set;
 import java.util.UUID;
 
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.sqoop.manager.oracle.OracleUtils;
+import org.junit.After;
 import org.apache.commons.lang3.reflect.FieldUtils;
 import org.apache.sqoop.tool.ImportAllTablesTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.tool.SqoopTool;
 import org.apache.sqoop.validation.AbsoluteValidationThreshold;
 import org.assertj.core.api.SoftAssertions;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.tool.BaseSqoopTool;
+import org.apache.sqoop.tool.ImportTool;
+
+import static org.apache.sqoop.Sqoop.SQOOP_RETHROW_PROPERTY;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
+/**
+ * Test aspects of the SqoopOptions class.
+ */
 public class TestSqoopOptions {
 
+  private Properties originalSystemProperties;
+
   private Random random = new Random();
 
   public static final String COLUMN_MAPPING = "test=INTEGER,test1=DECIMAL(1%2C1),test2=NUMERIC(1%2C%202)";
@@ -51,8 +74,12 @@
   private Set<Class> excludedClassesFromClone = new HashSet<>();
   private Set<String> excludedFieldsFromClone = new HashSet<>();
 
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   @Before
-  public void setUp() {
+  public void setup() {
+    originalSystemProperties = System.getProperties();
     excludedClassesFromClone.add(String.class);
     excludedClassesFromClone.add(Class.class);
     excludedClassesFromClone.add(Integer.class);
@@ -64,10 +91,747 @@
     excludedFieldsFromClone.add("activeSqoopTool");
   }
 
+  @After
+  public void tearDown() {
+    System.setProperties(originalSystemProperties);
+  }
+
+  // tests for the toChar() parser
+  @Test
+  public void testNormalChar() throws Exception {
+    assertEquals('a', SqoopOptions.toChar("a"));
+  }
+
+  @Test
+  public void testEmptyString() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on empty string");
+    SqoopOptions.toChar("");
+  }
+
+  @Test
+  public void testNullString() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on null string");
+    SqoopOptions.toChar(null);
+  }
+
+  @Test
+  public void testTooLong() throws Exception {
+    // Should just use the first character and log a warning.
+    assertEquals('x', SqoopOptions.toChar("xyz"));
+  }
+
+  @Test
+  public void testHexChar1() throws Exception {
+    assertEquals(0xF, SqoopOptions.toChar("\\0xf"));
+  }
+
+  @Test
+  public void testHexChar2() throws Exception {
+    assertEquals(0xF, SqoopOptions.toChar("\\0xF"));
+  }
+
+  @Test
+  public void testHexChar3() throws Exception {
+    assertEquals(0xF0, SqoopOptions.toChar("\\0xf0"));
+  }
+
+  @Test
+  public void testHexChar4() throws Exception {
+    assertEquals(0xF0, SqoopOptions.toChar("\\0Xf0"));
+  }
+
+  @Test
+  public void testEscapeChar1() throws Exception {
+    assertEquals('\n', SqoopOptions.toChar("\\n"));
+  }
+
+  @Test
+  public void testEscapeChar2() throws Exception {
+    assertEquals('\\', SqoopOptions.toChar("\\\\"));
+  }
+
+  @Test
+  public void testEscapeChar3() throws Exception {
+    assertEquals('\\', SqoopOptions.toChar("\\"));
+  }
+
+  @Test
+  public void testWhitespaceToChar() throws Exception {
+    assertEquals(' ', SqoopOptions.toChar(" "));
+    assertEquals(' ', SqoopOptions.toChar("   "));
+    assertEquals('\t', SqoopOptions.toChar("\t"));
+  }
+
+  @Test
+  public void testUnknownEscape1() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on unknown escaping");
+    SqoopOptions.toChar("\\Q");
+  }
+
+  @Test
+  public void testUnknownEscape2() throws Exception {
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on unknown escaping");
+    SqoopOptions.toChar("\\nn");
+  }
+
+  @Test
+  public void testEscapeNul1() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0"));
+  }
+
+  @Test
+  public void testEscapeNul2() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\00"));
+  }
+
+  @Test
+  public void testEscapeNul3() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0000"));
+  }
+
+  @Test
+  public void testEscapeNul4() throws Exception {
+    assertEquals(DelimiterSet.NULL_CHAR, SqoopOptions.toChar("\\0x0"));
+  }
+
+  @Test
+  public void testOctalChar1() throws Exception {
+    assertEquals(04, SqoopOptions.toChar("\\04"));
+  }
+
+  @Test
+  public void testOctalChar2() throws Exception {
+    assertEquals(045, SqoopOptions.toChar("\\045"));
+  }
+
+  @Test
+  public void testErrOctalChar() throws Exception {
+    thrown.expect(NumberFormatException.class);
+    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException on erroneous octal char");
+    SqoopOptions.toChar("\\095");
+  }
+
+  @Test
+  public void testErrHexChar() throws Exception {
+    thrown.expect(NumberFormatException.class);
+    thrown.reportMissingExceptionWithMessage("Expected NumberFormatException on erroneous hex char");
+    SqoopOptions.toChar("\\0x9K5");
+  }
+
+  private SqoopOptions parse(String [] argv) throws Exception {
+    ImportTool importTool = new ImportTool();
+    return importTool.parseArguments(argv, null, null, false);
+  }
+
+  // test that setting output delimiters also sets input delimiters
+  @Test
+  public void testDelimitersInherit() throws Exception {
+    String [] args = {
+        "--fields-terminated-by",
+        "|",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals('|', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // Test that setting output delimiters and setting input delims
+  // separately works.
+  @Test
+  public void testDelimOverride1() throws Exception {
+    String [] args = {
+        "--fields-terminated-by",
+        "|",
+        "--input-fields-terminated-by",
+        "*",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  // test that the order in which delims are specified doesn't matter
+  @Test
+  public void testDelimOverride2() throws Exception {
+    String [] args = {
+        "--input-fields-terminated-by",
+        "*",
+        "--fields-terminated-by",
+        "|",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals('*', opts.getInputFieldDelim());
+    assertEquals('|', opts.getOutputFieldDelim());
+  }
+
+  @Test
+  public void testBadNumMappers1() throws Exception {
+    String [] args = {
+        "--num-mappers",
+        "x",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on invalid --num-mappers argument");
+    parse(args);
+  }
+
+  @Test
+  public void testBadNumMappers2() throws Exception {
+    String [] args = {
+        "-m",
+        "x",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on invalid -m argument");
+    parse(args);
+  }
+
+  @Test
+  public void testGoodNumMappers() throws Exception {
+    String [] args = {
+        "-m",
+        "4",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals(4, opts.getNumMappers());
+  }
+
+  @Test
+  public void testHivePartitionParams() throws Exception {
+    String[] args = {
+        "--hive-partition-key", "ds",
+        "--hive-partition-value", "20110413",
+    };
+    SqoopOptions opts = parse(args);
+    assertEquals("ds", opts.getHivePartitionKey());
+    assertEquals("20110413", opts.getHivePartitionValue());
+  }
+
+  @Test
+  public void testBoundaryQueryParams() throws Exception {
+    String[] args = {
+        "--boundary-query", "select 1, 2",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals("select 1, 2", opts.getBoundaryQuery());
+  }
+
+  @Test
+  public void testMapColumnHiveParams() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING",
+    };
+
+    SqoopOptions opts = parse(args);
+    Properties mapping = opts.getMapColumnHive();
+    assertTrue(mapping.containsKey("id"));
+    assertEquals("STRING", mapping.get("id"));
+  }
+
+  @Test
+  public void testMalformedMapColumnHiveParams() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id",
+    };
+    try {
+      SqoopOptions opts = parse(args);
+      fail("Malformed hive mapping does not throw exception");
+    } catch (Exception e) {
+      // Caught exception as expected
+    }
+  }
+
+  @Test
+  public void testMapColumnJavaParams() throws Exception {
+    String[] args = {
+        "--map-column-java", "id=String",
+    };
+
+    SqoopOptions opts = parse(args);
+    Properties mapping = opts.getMapColumnJava();
+    assertTrue(mapping.containsKey("id"));
+    assertEquals("String", mapping.get("id"));
+  }
+
+  @Test
+  public void testMalfromedMapColumnJavaParams() throws Exception {
+    String[] args = {
+        "--map-column-java", "id",
+    };
+    try {
+      SqoopOptions opts = parse(args);
+      fail("Malformed java mapping does not throw exception");
+    } catch (Exception e) {
+      // Caught exception as expected
+    }
+  }
+
+  @Test
+  public void testSkipDistCacheOption() throws Exception {
+    String[] args = {"--skip-dist-cache"};
+    SqoopOptions opts = parse(args);
+    assertTrue(opts.isSkipDistCache());
+  }
+
+  @Test
+  public void testPropertySerialization1() {
+    // Test that if we write a SqoopOptions out to a Properties,
+    // and then read it back in, we get all the same results.
+    SqoopOptions out = new SqoopOptions();
+    out.setUsername("user");
+    out.setConnectString("bla");
+    out.setNumMappers(4);
+    out.setAppendMode(true);
+    out.setHBaseTable("hbasetable");
+    out.setWarehouseDir("Warehouse");
+    out.setClassName("someclass");
+    out.setSplitByCol("somecol");
+    out.setSqlQuery("the query");
+    out.setPackageName("a.package");
+    out.setHiveImport(true);
+    out.setFetchSize(null);
+
+    Properties connParams = new Properties();
+    connParams.put("conn.timeout", "3000");
+    connParams.put("conn.buffer_size", "256");
+    connParams.put("conn.dummy", "dummy");
+    connParams.put("conn.foo", "bar");
+
+    out.setConnectionParams(connParams);
+
+    Properties outProps = out.writeProperties();
+
+    SqoopOptions in = new SqoopOptions();
+    in.loadProperties(outProps);
+
+    Properties inProps = in.writeProperties();
+
+    assertEquals("properties don't match", outProps, inProps);
+
+    assertEquals("connection params don't match",
+        connParams, out.getConnectionParams());
+    assertEquals("connection params don't match",
+        connParams, in.getConnectionParams());
+  }
+
+  @Test
+  public void testPropertySerialization2() {
+    // Test that if we write a SqoopOptions out to a Properties,
+    // and then read it back in, we get all the same results.
+    SqoopOptions out = new SqoopOptions();
+    out.setUsername("user");
+    out.setConnectString("bla");
+    out.setNumMappers(4);
+    out.setAppendMode(true);
+    out.setHBaseTable("hbasetable");
+    out.setWarehouseDir("Warehouse");
+    out.setClassName("someclass");
+    out.setSplitByCol("somecol");
+    out.setSqlQuery("the query");
+    out.setPackageName("a.package");
+    out.setHiveImport(true);
+    out.setFetchSize(42);
+
+    Properties connParams = new Properties();
+    connParams.setProperty("a", "value-a");
+    connParams.setProperty("b", "value-b");
+    connParams.setProperty("a.b", "value-a.b");
+    connParams.setProperty("a.b.c", "value-a.b.c");
+    connParams.setProperty("aaaaaaaaaa.bbbbbbb.cccccccc", "value-abc");
+
+    out.setConnectionParams(connParams);
+
+    Properties outProps = out.writeProperties();
+
+    SqoopOptions in = new SqoopOptions();
+    in.loadProperties(outProps);
+
+    Properties inProps = in.writeProperties();
+
+    assertEquals("properties don't match", outProps, inProps);
+    assertEquals("connection params don't match",
+        connParams, out.getConnectionParams());
+    assertEquals("connection params don't match",
+        connParams, in.getConnectionParams());
+  }
+
+  @Test
+  public void testDefaultTempRootDir() {
+    SqoopOptions opts = new SqoopOptions();
+
+    assertEquals("_sqoop", opts.getTempRootDir());
+  }
+
+  @Test
+  public void testDefaultLoadedTempRootDir() {
+    SqoopOptions out = new SqoopOptions();
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertEquals("_sqoop", opts.getTempRootDir());
+  }
+
+  @Test
+  public void testLoadedTempRootDir() {
+    SqoopOptions out = new SqoopOptions();
+    final String tempRootDir = "customRoot";
+    out.setTempRootDir(tempRootDir);
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertEquals(tempRootDir, opts.getTempRootDir());
+  }
+
+  @Test
+  public void testNulledTempRootDir() {
+    SqoopOptions out = new SqoopOptions();
+    out.setTempRootDir(null);
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertEquals("_sqoop", opts.getTempRootDir());
+  }
+
+  @Test
+  public void testDefaultThrowOnErrorWithNotSetSystemProperty() {
+    System.clearProperty(SQOOP_RETHROW_PROPERTY);
+    SqoopOptions opts = new SqoopOptions();
+    assertFalse(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testDefaultThrowOnErrorWithSetSystemProperty() {
+    String testSqoopRethrowProperty = "";
+    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
+    SqoopOptions opts = new SqoopOptions();
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testDefaultLoadedThrowOnErrorWithNotSetSystemProperty() {
+    System.clearProperty(SQOOP_RETHROW_PROPERTY);
+    SqoopOptions out = new SqoopOptions();
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertFalse(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testDefaultLoadedThrowOnErrorWithSetSystemProperty() {
+    String testSqoopRethrowProperty = "";
+    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
+    SqoopOptions out = new SqoopOptions();
+    Properties props = out.writeProperties();
+    SqoopOptions opts = new SqoopOptions();
+    opts.loadProperties(props);
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testThrowOnErrorWithNotSetSystemProperty() throws Exception {
+    System.clearProperty(SQOOP_RETHROW_PROPERTY);
+    String[] args = {"--throw-on-error"};
+    SqoopOptions opts = parse(args);
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void testThrowOnErrorWithSetSystemProperty() throws Exception {
+    String testSqoopRethrowProperty = "";
+    System.setProperty(SQOOP_RETHROW_PROPERTY, testSqoopRethrowProperty);
+    String[] args = {"--throw-on-error"};
+    SqoopOptions opts = parse(args);
+
+    assertTrue(opts.isThrowOnError());
+  }
+
+  @Test
+  public void defaultValueOfOracleEscapingDisabledShouldBeFalse() {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse() {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue() {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue() {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
+  }
+
+  @Test
+  public void hadoopConfigurationInstanceOfSqoopOptionsShouldContainTheSameValueForOracleEscapingDisabledAsSqoopOptionsProperty() {
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(opts.isOracleEscapingDisabled())));
+  }
+
+  @Test
+  public void hadoopConfigurationInstanceOfSqoopOptionsShouldContainTrueForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions() {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    opts.setOracleEscapingDisabled(true);
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(true)));
+  }
+
+  @Test
+  public void hadoopConfigurationInstanceOfSqoopOptionsShouldContainFalseForOracleEscapingDisabledAsTheValueDirectlyHasBeenSetToSqoopOptions() {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    opts.setOracleEscapingDisabled(false);
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToFalse() {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "false");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeTrueIfTheValueOfTheRelatedEnvironmentVariableIsSetToTrue() {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "true");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(true)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledInHadoopConfigurationInstanceOfSqoopOptionsShouldBeFalseIfTheValueOfTheRelatedEnvironmentVariableIsSetToAnyNonBooleanValue() {
+    System.setProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED, "falsetrue");
+    SqoopOptions opts = new SqoopOptions();
+
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledShouldBeAbleToSavedAndLoadedBackWithTheSameValue() {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    opts.setOracleEscapingDisabled(false);
+    Properties out = opts.writeProperties();
+    opts = new SqoopOptions();
+    opts.loadProperties(out);
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(false)));
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(false)));
+  }
+
+  @Test
+  public void valueOfOracleEscapingDisabledShouldBeEqualToNullIfASqoopOptionsInstanceWasLoadedWhichDidntContainASavedValueForIt() {
+    System.clearProperty(SqoopOptions.ORACLE_ESCAPING_DISABLED);
+    SqoopOptions opts = new SqoopOptions();
+    Properties out = opts.writeProperties();
+    opts = new SqoopOptions();
+    opts.loadProperties(out);
+
+    assertThat(opts.isOracleEscapingDisabled(), is(equalTo(true)));
+    assertThat(OracleUtils.isOracleEscapingDisabled(opts.getConf()),
+        is(equalTo(true)));
+  }
+
+  // test that hadoop-home is accepted as an option
+  @Test
+  public void testHadoopHome() throws Exception {
+    String [] args = {
+        "--hadoop-home",
+        "/usr/lib/hadoop",
+    };
+
+    SqoopOptions opts = parse(args);
+    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
+  }
+
+  // test that hadoop-home is accepted as an option
+  @Test
+  public void testHadoopMapRedOverridesHadoopHome() throws Exception {
+    String[] args = { "--hadoop-home", "/usr/lib/hadoop-ignored", "--hadoop-mapred-home", "/usr/lib/hadoop", };
+
+    SqoopOptions opts = parse(args);
+    assertEquals("/usr/lib/hadoop", opts.getHadoopMapRedHome());
+  }
+
+
+  //helper method to validate given import options
+  private void validateImportOptions(String[] extraArgs) throws Exception {
+    String [] args = {
+        "--connect", HsqldbTestServer.getUrl(),
+        "--table", "test",
+        "-m", "1",
+    };
+    ImportTool importTool = new ImportTool();
+    SqoopOptions opts = importTool.parseArguments(
+        (String []) ArrayUtils.addAll(args, extraArgs), null, null, false);
+    importTool.validateOptions(opts);
+  }
+
+  //test compatability of --detele-target-dir with import
+  @Test
+  public void testDeteleTargetDir() throws Exception {
+    String [] extraArgs = {
+        "--delete-target-dir",
+    };
+    try {
+      validateImportOptions(extraArgs);
+    } catch(SqoopOptions.InvalidOptionsException ioe) {
+      fail("Unexpected InvalidOptionsException" + ioe);
+    }
+  }
+
+  //test incompatability of --delete-target-dir & --append with import
+  @Test
+  public void testDeleteTargetDirWithAppend() throws Exception {
+    String [] extraArgs = {
+        "--append",
+        "--delete-target-dir",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on incompatibility of " +
+        "--delete-target-dir and --append");
+    validateImportOptions(extraArgs);
+  }
+
+  //test incompatability of --delete-target-dir with incremental import
+  @Test
+  public void testDeleteWithIncrementalImport() throws Exception {
+    String [] extraArgs = {
+        "--incremental", "append",
+        "--delete-target-dir",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException on incompatibility of " +
+        "--delete-target-dir and --incremental");
+    validateImportOptions(extraArgs);
+  }
+
+  // test that hbase bulk load import with table name and target dir
+  // passes validation
+  @Test
+  public void testHBaseBulkLoad() throws Exception {
+    String [] extraArgs = {
+        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
+        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test",
+        longArgument(BaseSqoopTool.HBASE_TABLE_ARG), "test_table",
+        longArgument(BaseSqoopTool.HBASE_COL_FAM_ARG), "d"};
+
+    validateImportOptions(extraArgs);
+  }
+
+  // test that hbase bulk load import with a missing --hbase-table fails
+  @Test
+  public void testHBaseBulkLoadMissingHbaseTable() throws Exception {
+    String [] extraArgs = {
+        longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
+        longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test"};
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException because of missing --hbase-table");
+    validateImportOptions(extraArgs);
+  }
+
+  private static String longArgument(String argument) {
+    return String.format("--%s", argument);
+  }
+
+  @Test
+  public void testRelaxedIsolation() throws Exception {
+    String extraArgs[] = {
+        "--relaxed-isolation",
+    };
+    validateImportOptions(extraArgs);
+  }
+
+  @Test
+  public void testResetToOneMapper() throws Exception {
+    String extraArgs[] = {
+        "--autoreset-to-one-mapper",
+    };
+    validateImportOptions(extraArgs);
+  }
+
+  @Test
+  public void testResetToOneMapperAndSplitBy() throws Exception {
+    String extraArgs[] = {
+        "--autoreset-to-one-mapper",
+        "--split-by",
+        "col0",
+    };
+
+    thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected Exception on incompatibility of " +
+        "--autoreset-to-one-mapper and --split-by");
+    validateImportOptions(extraArgs);
+  }
+
+  @Test
+  public void testEscapeMapingColumnNames() throws Exception {
+    SqoopOptions opts = new SqoopOptions();
+    // enabled by default
+    assertTrue(opts.getEscapeMappingColumnNamesEnabled());
+
+    String [] args = {
+        "--" + org.apache.sqoop.tool.BaseSqoopTool.ESCAPE_MAPPING_COLUMN_NAMES_ENABLED,
+        "false",
+    };
+
+    opts = parse(args);
+    assertFalse(opts.getEscapeMappingColumnNamesEnabled());
+  }
+
   @Test
   public void testParseColumnParsing() {
     new SqoopOptions() {
-  @Test
+      @Test
       public void testParseColumnMapping() {
         Properties result = new Properties();
         parseColumnMapping(COLUMN_MAPPING, result);
@@ -183,6 +947,9 @@
     else if(type.equals(String.class)) {
       return UUID.randomUUID().toString();
     }
+    else if(type.equals(Character.TYPE) || type.equals(Character.class)) {
+      return UUID.randomUUID().toString().charAt(0);
+    }
     else if(type.equals(BigInteger.class)){
       return BigInteger.valueOf(random.nextInt());
     }
diff --git a/src/test/com/cloudera/sqoop/TestTargetDir.java b/src/test/org/apache/sqoop/TestTargetDir.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/TestTargetDir.java
rename to src/test/org/apache/sqoop/TestTargetDir.java
index d7ebd34..c2cacd4 100644
--- a/src/test/com/cloudera/sqoop/TestTargetDir.java
+++ b/src/test/org/apache/sqoop/TestTargetDir.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -26,9 +26,9 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
diff --git a/src/test/com/cloudera/sqoop/TestWhere.java b/src/test/org/apache/sqoop/TestWhere.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/TestWhere.java
rename to src/test/org/apache/sqoop/TestWhere.java
index 340be9c..ed8199b 100644
--- a/src/test/com/cloudera/sqoop/TestWhere.java
+++ b/src/test/org/apache/sqoop/TestWhere.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop;
+package org.apache.sqoop;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -27,14 +27,15 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java b/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
index 95c9b56..14413b1 100644
--- a/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
+++ b/src/test/org/apache/sqoop/accumulo/AccumuloTestCase.java
@@ -49,8 +49,8 @@
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
diff --git a/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java b/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
index 244c744..9c1e9f9 100644
--- a/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
+++ b/src/test/org/apache/sqoop/credentials/TestPassingSecurePassword.java
@@ -18,9 +18,9 @@
 
 package org.apache.sqoop.credentials;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -28,12 +28,10 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.sqoop.mapreduce.db.DBConfiguration;
-import org.apache.sqoop.tool.BaseSqoopTool;
 import org.apache.sqoop.tool.ImportTool;
 import org.apache.sqoop.util.password.CredentialProviderHelper;
 import org.apache.sqoop.util.password.CredentialProviderPasswordLoader;
 import org.apache.sqoop.util.password.CryptoFileLoader;
-import org.apache.sqoop.util.password.PasswordLoader;
 import org.junit.Test;
 
 import javax.crypto.Cipher;
@@ -45,7 +43,6 @@
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
 import java.sql.Connection;
 import java.util.ArrayList;
 import java.util.Collections;
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportAddRowKeyTest.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
rename to src/test/org/apache/sqoop/hbase/HBaseImportAddRowKeyTest.java
index fd00498..c4caafb 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportAddRowKeyTest.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportAddRowKeyTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import org.apache.commons.lang.StringUtils;
 import org.junit.Before;
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportNullTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
rename to src/test/org/apache/sqoop/hbase/HBaseImportNullTest.java
index dd88fe7..114cc6a 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportNullTest.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportNullTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import java.io.IOException;
 
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportTest.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
rename to src/test/org/apache/sqoop/hbase/HBaseImportTest.java
index 4d79341..2e73cf3 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportTest.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import java.io.IOException;
 
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseImportTypesTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportTypesTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/hbase/HBaseImportTypesTest.java
rename to src/test/org/apache/sqoop/hbase/HBaseImportTypesTest.java
index ae08a66..f3616c3 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseImportTypesTest.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportTypesTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import java.io.IOException;
 
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseKerberizedConnectivityTest.java b/src/test/org/apache/sqoop/hbase/HBaseKerberizedConnectivityTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/hbase/HBaseKerberizedConnectivityTest.java
rename to src/test/org/apache/sqoop/hbase/HBaseKerberizedConnectivityTest.java
index 3c027ad..73a2247 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseKerberizedConnectivityTest.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseKerberizedConnectivityTest.java
@@ -1,4 +1,4 @@
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import org.apache.sqoop.infrastructure.kerberos.MiniKdcInfrastructureRule;
 import org.junit.ClassRule;
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java b/src/test/org/apache/sqoop/hbase/HBaseQueryImportTest.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
rename to src/test/org/apache/sqoop/hbase/HBaseQueryImportTest.java
index d71d4e3..b73afcd 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseQueryImportTest.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseQueryImportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import java.io.IOException;
 
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java b/src/test/org/apache/sqoop/hbase/HBaseTestCase.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
rename to src/test/org/apache/sqoop/hbase/HBaseTestCase.java
index 99fcd70..98f8698 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseTestCase.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import static org.apache.hadoop.hbase.HConstants.MASTER_INFO_PORT;
 import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_CLIENT_PORT;
@@ -66,9 +66,9 @@
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 /**
  * Utility methods that facilitate HBase import tests.
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java b/src/test/org/apache/sqoop/hbase/HBaseUtilTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
rename to src/test/org/apache/sqoop/hbase/HBaseUtilTest.java
index 4201139..c6a808c 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseUtilTest.java
+++ b/src/test/org/apache/sqoop/hbase/HBaseUtilTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hbase;
+package org.apache.sqoop.hbase;
 
 import org.junit.Test;
 
diff --git a/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java b/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java
index 73b3177..e78a535 100644
--- a/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java
+++ b/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java
@@ -17,7 +17,7 @@
  */
 package org.apache.sqoop.hbase;
 
-import com.cloudera.sqoop.lib.FieldMappable;
+import org.apache.sqoop.lib.FieldMappable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.BufferedMutator;
 import org.apache.hadoop.hbase.client.Connection;
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
index 7ff046e..a124dd0 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
@@ -32,7 +32,7 @@
 import java.util.Map;
 import java.util.TimeZone;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -47,7 +47,7 @@
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.junit.Before;
 
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
index b6741f4..c7e1ea6 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
@@ -42,7 +42,6 @@
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
@@ -53,12 +52,12 @@
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.junit.Before;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java b/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
index 6fb6486..28b42dc 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
@@ -63,9 +63,9 @@
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.junit.Assert;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 /**
  * HCatalog common test utilities.
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index 104effb..ba05cab 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -20,9 +20,9 @@
 
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.ExportTool;
-import com.cloudera.sqoop.tool.ImportTool;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.ExportTool;
+import org.apache.sqoop.tool.ImportTool;
 
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/org/apache/sqoop/hive/TestHiveImport.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/hive/TestHiveImport.java
rename to src/test/org/apache/sqoop/hive/TestHiveImport.java
index a624f52..4e1f249 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/org/apache/sqoop/hive/TestHiveImport.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.hive;
+package org.apache.sqoop.hive;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -26,7 +26,7 @@
 import java.util.Arrays;
 import java.util.List;
 
-import com.cloudera.sqoop.Sqoop;
+import org.apache.sqoop.Sqoop;
 
 import org.apache.avro.Schema;
 import org.apache.avro.SchemaBuilder;
@@ -43,15 +43,15 @@
 import org.junit.Rule;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.BaseSqoopTool;
-import com.cloudera.sqoop.tool.CodeGenTool;
-import com.cloudera.sqoop.tool.CreateHiveTableTool;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.BaseSqoopTool;
+import org.apache.sqoop.tool.CodeGenTool;
+import org.apache.sqoop.tool.CreateHiveTableTool;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
 import org.apache.commons.cli.ParseException;
 import org.junit.rules.ExpectedException;
 import org.kitesdk.data.Dataset;
diff --git a/src/test/org/apache/sqoop/hive/TestTableDefWriter.java b/src/test/org/apache/sqoop/hive/TestTableDefWriter.java
index 035b0e1..496b5ad 100644
--- a/src/test/org/apache/sqoop/hive/TestTableDefWriter.java
+++ b/src/test/org/apache/sqoop/hive/TestTableDefWriter.java
@@ -15,112 +15,275 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.sqoop.hive;
 
-import static org.mockito.Mockito.*;
+import java.util.Map;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.SqoopOptions;
-
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.junit.BeforeClass;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sqoop.util.SqlTypeMap;
+
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+
+import org.junit.Rule;
 import org.junit.Test;
-import org.mockito.Mockito;
+import org.junit.rules.ExpectedException;
 
-import java.sql.*;
-import java.util.HashMap;
-import java.io.IOException;
+import java.sql.Types;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+
+/**
+ * Test Hive DDL statement generation.
+ */
 public class TestTableDefWriter {
-  static String inputTableName = "genres";
-  static String outputTableName = "genres";
-  static String testTargetDir = "/tmp/testDB/genre";
-  static String hdfsTableDir = "/data/movielens/genre";
-  static String testDbUri = "jdbc:postgresql://localhost/movielens";
-  static ConnManager manager;
-  static SqoopOptions options;
+
   public static final Log LOG = LogFactory.getLog(
       TestTableDefWriter.class.getName());
-  TableDefWriter tableDefWriter;
 
-  @BeforeClass
-  public static void setup() {
-    // create mock
-    HashMap<String, Integer> map = new HashMap<String, Integer>();
-    map.put("id", Types.TINYINT);
-    map.put("name", Types.VARCHAR);
-    manager = Mockito.mock(ConnManager.class);
-    when(manager.getColumnNames(inputTableName)).thenReturn(new String[] { "id", "name" });
-    when(manager.getColumnTypes(inputTableName)).thenReturn(map);
-    options = new SqoopOptions(testDbUri, inputTableName);
-    options.setTargetDir(testTargetDir);
-    options.setHiveExternalTableDir(hdfsTableDir);
-    String[] cols = new String[] { "id", "name" };
-    options.setColumns(cols);
-    options.setMapColumnHive("id=TINYINT,name=STRING");
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  // Test getHiveOctalCharCode and expect an IllegalArgumentException.
+  private void expectExceptionInCharCode(int charCode) {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException with out-of-range Hive delimiter");
+    TableDefWriter.getHiveOctalCharCode(charCode);
   }
 
   @Test
-  public void testGenerateExternalTableStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(hdfsTableDir);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE ") && stmt.contains("LOCATION '" + hdfsTableDir));
+  public void testHiveOctalCharCode() {
+    assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
+    assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
+    assertEquals("\\012", TableDefWriter.getHiveOctalCharCode((int) '\n'));
+    assertEquals("\\177", TableDefWriter.getHiveOctalCharCode(0177));
+
+    expectExceptionInCharCode(4096);
+    expectExceptionInCharCode(0200);
+    expectExceptionInCharCode(254);
   }
 
   @Test
-  public void testGenerateTableStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(null);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE "));
+  public void testDifferentTableNames() throws Exception {
+    Configuration conf = new Configuration();
+    SqoopOptions options = new SqoopOptions();
+    TableDefWriter writer = new TableDefWriter(options, null,
+        "inputTable", "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    LOG.debug("Create table stmt: " + createTable);
+    LOG.debug("Load data stmt: " + loadData);
+
+    // Assert that the statements generated have the form we expect.
+    assertTrue(createTable.indexOf(
+        "CREATE TABLE IF NOT EXISTS `outputTable`") != -1);
+    assertTrue(loadData.indexOf("INTO TABLE `outputTable`") != -1);
+    assertTrue(loadData.indexOf("/inputTable'") != -1);
   }
 
   @Test
-  public void testGenerateExternalTableIfExistsStatement() throws IOException, SQLException {
-    options.setFailIfHiveTableExists(false);
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(hdfsTableDir);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE IF NOT EXISTS") && stmt.contains("LOCATION '"
-        + hdfsTableDir));
+  public void testDifferentTargetDirs() throws Exception {
+    String targetDir = "targetDir";
+    String inputTable = "inputTable";
+    String outputTable = "outputTable";
+
+    Configuration conf = new Configuration();
+    SqoopOptions options = new SqoopOptions();
+    // Specify a different target dir from input table name
+    options.setTargetDir(targetDir);
+    TableDefWriter writer = new TableDefWriter(options, null,
+        inputTable, outputTable, conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    LOG.debug("Create table stmt: " + createTable);
+    LOG.debug("Load data stmt: " + loadData);
+
+    // Assert that the statements generated have the form we expect.
+    assertTrue(createTable.indexOf(
+        "CREATE TABLE IF NOT EXISTS `" + outputTable + "`") != -1);
+    assertTrue(loadData.indexOf("INTO TABLE `" + outputTable + "`") != -1);
+    assertTrue(loadData.indexOf("/" + targetDir + "'") != -1);
   }
 
   @Test
-  public void testGenerateTableIfExistsStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(null);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE IF NOT EXISTS"));
+  public void testPartitions() throws Exception {
+    String[] args = {
+        "--hive-partition-key", "ds",
+        "--hive-partition-value", "20110413",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, "inputTable", "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    assertNotNull(createTable);
+    assertNotNull(loadData);
+    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
+        + "PARTITIONED BY (ds STRING) "
+        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
+        + "LINES TERMINATED BY '\\012' STORED AS TEXTFILE", createTable);
+    assertTrue(loadData.endsWith(" PARTITION (ds='20110413')"));
   }
 
   @Test
-  public void testGenerateExternalTableLoadStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(hdfsTableDir);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getLoadDataStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (isHiveExternalTableSet && stmt.contains("LOAD DATA INPATH ") && stmt.contains(testTargetDir));
+  public void testLzoSplitting() throws Exception {
+    String[] args = {
+        "--compress",
+        "--compression-codec", "lzop",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, "inputTable", "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    assertNotNull(createTable);
+    assertNotNull(loadData);
+    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
+        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
+        + "LINES TERMINATED BY '\\012' STORED AS "
+        + "INPUTFORMAT 'com.hadoop.mapred.DeprecatedLzoTextInputFormat' "
+        + "OUTPUTFORMAT "
+        + "'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'",
+        createTable);
   }
+
+  @Test
+  public void testUserMappingNoDecimal() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING,value=INTEGER",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    colTypes.put("id", Types.INTEGER);
+    colTypes.put("value", Types.VARCHAR);
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+
+    assertNotNull(createTable);
+
+    assertTrue(createTable.contains("`id` STRING"));
+    assertTrue(createTable.contains("`value` INTEGER"));
+
+    assertFalse(createTable.contains("`id` INTEGER"));
+    assertFalse(createTable.contains("`value` STRING"));
+  }
+
+  @Test
+  public void testUserMappingWithDecimal() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING,value2=DECIMAL(13,5),value1=INTEGER," +
+                             "value3=DECIMAL(4,5),value4=VARCHAR(255)",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+        new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    colTypes.put("id", Types.INTEGER);
+    colTypes.put("value1", Types.VARCHAR);
+    colTypes.put("value2", Types.DOUBLE);
+    colTypes.put("value3", Types.FLOAT);
+    colTypes.put("value4", Types.CHAR);
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+
+    assertNotNull(createTable);
+
+    assertTrue(createTable.contains("`id` STRING"));
+    assertTrue(createTable.contains("`value1` INTEGER"));
+    assertTrue(createTable.contains("`value2` DECIMAL(13,5)"));
+    assertTrue(createTable.contains("`value3` DECIMAL(4,5)"));
+    assertTrue(createTable.contains("`value4` VARCHAR(255)"));
+
+    assertFalse(createTable.contains("`id` INTEGER"));
+    assertFalse(createTable.contains("`value1` STRING"));
+    assertFalse(createTable.contains("`value2` DOUBLE"));
+    assertFalse(createTable.contains("`value3` FLOAT"));
+    assertFalse(createTable.contains("`value4` CHAR"));
+  }
+
+  @Test
+  public void testUserMappingFailWhenCantBeApplied() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING,value=INTEGER",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    colTypes.put("id", Types.INTEGER);
+    writer.setColumnTypes(colTypes);
+
+    thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException on non applied Hive type mapping");
+    String createTable = writer.getCreateTableStmt();
+  }
+
+  @Test
+  public void testHiveDatabase() throws Exception {
+    String[] args = {
+        "--hive-database", "db",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    assertNotNull(createTable);
+    assertTrue(createTable.contains("`db`.`outputTable`"));
+
+    String loadStmt = writer.getLoadDataStmt();
+    assertNotNull(loadStmt);
+    assertTrue(createTable.contains("`db`.`outputTable`"));
+  }
+
 }
diff --git a/src/test/org/apache/sqoop/hive/TestTableDefWriterForExternalTable.java b/src/test/org/apache/sqoop/hive/TestTableDefWriterForExternalTable.java
new file mode 100644
index 0000000..f1768ee
--- /dev/null
+++ b/src/test/org/apache/sqoop/hive/TestTableDefWriterForExternalTable.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.hive;
+
+import static org.mockito.Mockito.*;
+
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.sql.*;
+import java.util.HashMap;
+import java.io.IOException;
+
+public class TestTableDefWriterForExternalTable {
+  static String inputTableName = "genres";
+  static String outputTableName = "genres";
+  static String testTargetDir = "/tmp/testDB/genre";
+  static String hdfsTableDir = "/data/movielens/genre";
+  static String testDbUri = "jdbc:postgresql://localhost/movielens";
+  static ConnManager manager;
+  static SqoopOptions options;
+  public static final Log LOG = LogFactory.getLog(
+      TestTableDefWriterForExternalTable.class.getName());
+  TableDefWriter tableDefWriter;
+
+  @BeforeClass
+  public static void setup() {
+    // create mock
+    HashMap<String, Integer> map = new HashMap<String, Integer>();
+    map.put("id", Types.TINYINT);
+    map.put("name", Types.VARCHAR);
+    manager = Mockito.mock(ConnManager.class);
+    when(manager.getColumnNames(inputTableName)).thenReturn(new String[] { "id", "name" });
+    when(manager.getColumnTypes(inputTableName)).thenReturn(map);
+    options = new SqoopOptions(testDbUri, inputTableName);
+    options.setTargetDir(testTargetDir);
+    options.setHiveExternalTableDir(hdfsTableDir);
+    String[] cols = new String[] { "id", "name" };
+    options.setColumns(cols);
+    options.setMapColumnHive("id=TINYINT,name=STRING");
+  }
+
+  @Test
+  public void testGenerateExternalTableStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(hdfsTableDir);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE ") && stmt.contains("LOCATION '" + hdfsTableDir));
+  }
+
+  @Test
+  public void testGenerateTableStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(null);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE "));
+  }
+
+  @Test
+  public void testGenerateExternalTableIfExistsStatement() throws IOException, SQLException {
+    options.setFailIfHiveTableExists(false);
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(hdfsTableDir);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE IF NOT EXISTS") && stmt.contains("LOCATION '"
+        + hdfsTableDir));
+  }
+
+  @Test
+  public void testGenerateTableIfExistsStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(null);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE IF NOT EXISTS"));
+  }
+
+  @Test
+  public void testGenerateExternalTableLoadStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(hdfsTableDir);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getLoadDataStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (isHiveExternalTableSet && stmt.contains("LOAD DATA INPATH ") && stmt.contains(testTargetDir));
+  }
+}
diff --git a/src/test/com/cloudera/sqoop/io/TestCodecMap.java b/src/test/org/apache/sqoop/io/TestCodecMap.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/io/TestCodecMap.java
rename to src/test/org/apache/sqoop/io/TestCodecMap.java
index c78a5ae..e719218 100644
--- a/src/test/com/cloudera/sqoop/io/TestCodecMap.java
+++ b/src/test/org/apache/sqoop/io/TestCodecMap.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.io;
+package org.apache.sqoop.io;
 
 import java.io.IOException;
 
diff --git a/src/test/com/cloudera/sqoop/io/TestLobFile.java b/src/test/org/apache/sqoop/io/TestLobFile.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/io/TestLobFile.java
rename to src/test/org/apache/sqoop/io/TestLobFile.java
index ac461fd..2bc95f2 100644
--- a/src/test/com/cloudera/sqoop/io/TestLobFile.java
+++ b/src/test/org/apache/sqoop/io/TestLobFile.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.io;
+package org.apache.sqoop.io;
 
 import java.io.BufferedReader;
 import java.io.File;
diff --git a/src/test/com/cloudera/sqoop/io/TestNamedFifo.java b/src/test/org/apache/sqoop/io/TestNamedFifo.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/io/TestNamedFifo.java
rename to src/test/org/apache/sqoop/io/TestNamedFifo.java
index b11bbee..a93784e 100644
--- a/src/test/com/cloudera/sqoop/io/TestNamedFifo.java
+++ b/src/test/org/apache/sqoop/io/TestNamedFifo.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.io;
+package org.apache.sqoop.io;
 
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
diff --git a/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java b/src/test/org/apache/sqoop/io/TestSplittableBufferedWriter.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java
rename to src/test/org/apache/sqoop/io/TestSplittableBufferedWriter.java
index 8b2b1e5..c59aa26 100644
--- a/src/test/com/cloudera/sqoop/io/TestSplittableBufferedWriter.java
+++ b/src/test/org/apache/sqoop/io/TestSplittableBufferedWriter.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.io;
+package org.apache.sqoop.io;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -35,7 +35,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.compress.GzipCodec;
 
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/com/cloudera/sqoop/lib/TestBlobRef.java b/src/test/org/apache/sqoop/lib/TestBlobRef.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/lib/TestBlobRef.java
rename to src/test/org/apache/sqoop/lib/TestBlobRef.java
index 0d010b0..b271d3c 100644
--- a/src/test/com/cloudera/sqoop/lib/TestBlobRef.java
+++ b/src/test/org/apache/sqoop/lib/TestBlobRef.java
@@ -16,17 +16,17 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.lib;
+package org.apache.sqoop.lib;
 
 import java.io.*;
 
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import com.cloudera.sqoop.io.LobFile;
+import org.apache.sqoop.io.LobFile;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/lib/TestBooleanParser.java b/src/test/org/apache/sqoop/lib/TestBooleanParser.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/lib/TestBooleanParser.java
rename to src/test/org/apache/sqoop/lib/TestBooleanParser.java
index c1ab7e5..914ab37 100644
--- a/src/test/com/cloudera/sqoop/lib/TestBooleanParser.java
+++ b/src/test/org/apache/sqoop/lib/TestBooleanParser.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.lib;
+package org.apache.sqoop.lib;
 
 
 import org.junit.Test;
diff --git a/src/test/com/cloudera/sqoop/lib/TestClobRef.java b/src/test/org/apache/sqoop/lib/TestClobRef.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/lib/TestClobRef.java
rename to src/test/org/apache/sqoop/lib/TestClobRef.java
index 7efc3b6..f94d1a8 100644
--- a/src/test/com/cloudera/sqoop/lib/TestClobRef.java
+++ b/src/test/org/apache/sqoop/lib/TestClobRef.java
@@ -16,17 +16,17 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.lib;
+package org.apache.sqoop.lib;
 
 import java.io.*;
 
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import com.cloudera.sqoop.io.LobFile;
+import org.apache.sqoop.io.LobFile;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java b/src/test/org/apache/sqoop/lib/TestFieldFormatter.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java
rename to src/test/org/apache/sqoop/lib/TestFieldFormatter.java
index 1fc9cd2..9ac55e7 100644
--- a/src/test/com/cloudera/sqoop/lib/TestFieldFormatter.java
+++ b/src/test/org/apache/sqoop/lib/TestFieldFormatter.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.lib;
+package org.apache.sqoop.lib;
 
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java b/src/test/org/apache/sqoop/lib/TestLargeObjectLoader.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java
rename to src/test/org/apache/sqoop/lib/TestLargeObjectLoader.java
index c55f14b..1e07d71 100644
--- a/src/test/com/cloudera/sqoop/lib/TestLargeObjectLoader.java
+++ b/src/test/org/apache/sqoop/lib/TestLargeObjectLoader.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.lib;
+package org.apache.sqoop.lib;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -24,14 +24,13 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-
-import com.cloudera.sqoop.testutil.MockResultSet;
+import org.apache.sqoop.testutil.MockResultSet;
 import org.junit.Before;
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/lib/TestRecordParser.java b/src/test/org/apache/sqoop/lib/TestRecordParser.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/lib/TestRecordParser.java
rename to src/test/org/apache/sqoop/lib/TestRecordParser.java
index d964cef..d6844c1 100644
--- a/src/test/com/cloudera/sqoop/lib/TestRecordParser.java
+++ b/src/test/org/apache/sqoop/lib/TestRecordParser.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.lib;
+package org.apache.sqoop.lib;
 
 import java.util.ArrayList;
 import java.util.List;
diff --git a/src/test/org/apache/sqoop/manager/TestDefaultManagerFactory.java b/src/test/org/apache/sqoop/manager/TestDefaultManagerFactory.java
index fd72ef4..8e16324 100644
--- a/src/test/org/apache/sqoop/manager/TestDefaultManagerFactory.java
+++ b/src/test/org/apache/sqoop/manager/TestDefaultManagerFactory.java
@@ -18,8 +18,8 @@
 
 package org.apache.sqoop.manager;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.metastore.JobData;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/manager/TestMainframeManager.java b/src/test/org/apache/sqoop/manager/TestMainframeManager.java
index 9359ac4..97e48e8 100644
--- a/src/test/org/apache/sqoop/manager/TestMainframeManager.java
+++ b/src/test/org/apache/sqoop/manager/TestMainframeManager.java
@@ -36,13 +36,11 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.ConnFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.util.ImportException;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
diff --git a/src/test/org/apache/sqoop/manager/TestSqlManager.java b/src/test/org/apache/sqoop/manager/TestSqlManager.java
index 571ed50..185f5a7 100644
--- a/src/test/org/apache/sqoop/manager/TestSqlManager.java
+++ b/src/test/org/apache/sqoop/manager/TestSqlManager.java
@@ -18,19 +18,240 @@
 
 package org.apache.sqoop.manager;
 
-import static org.junit.Assert.assertArrayEquals;
-
 import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
+import java.sql.Types;
+import java.util.Map;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+import static org.junit.Assert.assertArrayEquals;
+
 /**
  * Test methods of the generic SqlManager implementation.
  */
 public class TestSqlManager {
 
+  public static final Log LOG = LogFactory.getLog(TestSqlManager.class.getName());
+
+  /** the name of a table that doesn't exist. */
+  static final String MISSING_TABLE = "MISSING_TABLE";
+
+  // instance variables populated during setUp, used during tests
+  private HsqldbTestServer testServer;
+  private ConnManager manager;
+
+  @Before
+  public void setUp() {
+    testServer = new HsqldbTestServer();
+    try {
+      testServer.resetServer();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    } catch (ClassNotFoundException cnfe) {
+      LOG.error("Could not find class for db driver: " + cnfe.toString());
+      fail("Could not find class for db driver: " + cnfe.toString());
+    }
+
+    manager = testServer.getManager();
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      manager.close();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  @Test
+  public void testListColNames() {
+    String [] colNames = manager.getColumnNames(
+        HsqldbTestServer.getTableName());
+    assertNotNull("manager returned no colname list", colNames);
+    assertEquals("Table list should be length 2", 2, colNames.length);
+    String [] knownFields = HsqldbTestServer.getFieldNames();
+    for (int i = 0; i < colNames.length; i++) {
+      assertEquals(knownFields[i], colNames[i]);
+    }
+  }
+
+  @Test
+  public void testListColTypes() {
+    Map<String, Integer> types = manager.getColumnTypes(
+        HsqldbTestServer.getTableName());
+
+    assertNotNull("manager returned no types map", types);
+    assertEquals("Map should be size=2", 2, types.size());
+    assertEquals(types.get("INTFIELD1").intValue(), Types.INTEGER);
+    assertEquals(types.get("INTFIELD2").intValue(), Types.INTEGER);
+  }
+
+  @Test
+  public void testMissingTableColNames() {
+    String [] colNames = manager.getColumnNames(MISSING_TABLE);
+    assertNull("No column names should be returned for missing table",
+        colNames);
+  }
+
+  @Test
+  public void testMissingTableColTypes() {
+    Map<String, Integer> colTypes = manager.getColumnTypes(MISSING_TABLE);
+    assertNull("No column types should be returned for missing table",
+        colTypes);
+  }
+
+  @Test
+  public void testListTables() {
+    String [] tables = manager.listTables();
+    for (String table : tables) {
+      System.err.println("Got table: " + table);
+    }
+    assertNotNull("manager returned no table list", tables);
+    assertEquals("Table list should be length 1", 1, tables.length);
+    assertEquals(HsqldbTestServer.getTableName(), tables[0]);
+  }
+
+  // constants related to testReadTable()
+  static final int EXPECTED_NUM_ROWS = 4;
+  static final int EXPECTED_COL1_SUM = 16;
+  static final int EXPECTED_COL2_SUM = 20;
+
+  @Test
+  public void testReadTable() {
+    ResultSet results = null;
+    try {
+      results = manager.readTable(HsqldbTestServer.getTableName(),
+          HsqldbTestServer.getFieldNames());
+
+      assertNotNull("ResultSet from readTable() is null!", results);
+
+      ResultSetMetaData metaData = results.getMetaData();
+      assertNotNull("ResultSetMetadata is null in readTable()", metaData);
+
+      // ensure that we get the correct number of columns back
+      assertEquals("Number of returned columns was unexpected!",
+          metaData.getColumnCount(),
+          HsqldbTestServer.getFieldNames().length);
+
+      // should get back 4 rows. They are:
+      // 1 2
+      // 3 4
+      // 5 6
+      // 7 8
+      // .. so while order isn't guaranteed, we should get back 16 on the left
+      // and 20 on the right.
+      int sumCol1 = 0, sumCol2 = 0, rowCount = 0;
+      while (results.next()) {
+        rowCount++;
+        sumCol1 += results.getInt(1);
+        sumCol2 += results.getInt(2);
+      }
+
+      assertEquals("Expected 4 rows back", EXPECTED_NUM_ROWS, rowCount);
+      assertEquals("Expected left sum of 16", EXPECTED_COL1_SUM, sumCol1);
+      assertEquals("Expected right sum of 20", EXPECTED_COL2_SUM, sumCol2);
+    } catch (SQLException sqlException) {
+      fail("SQL Exception: " + sqlException.toString());
+    } finally {
+      if (null != results) {
+        try {
+          results.close();
+        } catch (SQLException sqlE) {
+          fail("SQL Exception in ResultSet.close(): " + sqlE.toString());
+        }
+      }
+
+      manager.release();
+    }
+  }
+
+  @Test
+  public void testReadMissingTable() {
+    ResultSet results = null;
+    try {
+      String [] colNames = { "*" };
+      results = manager.readTable(MISSING_TABLE, colNames);
+      assertNull("Expected null resultset from readTable(MISSING_TABLE)",
+          results);
+    } catch (SQLException sqlException) {
+      // we actually expect this. pass.
+    } finally {
+      if (null != results) {
+        try {
+          results.close();
+        } catch (SQLException sqlE) {
+          fail("SQL Exception in ResultSet.close(): " + sqlE.toString());
+        }
+      }
+
+      manager.release();
+    }
+  }
+
+  @Test
+  public void getPrimaryKeyFromMissingTable() {
+    String primaryKey = manager.getPrimaryKey(MISSING_TABLE);
+    assertNull("Expected null pkey for missing table", primaryKey);
+  }
+
+  @Test
+  public void getPrimaryKeyFromTableWithoutKey() {
+    String primaryKey = manager.getPrimaryKey(HsqldbTestServer.getTableName());
+    assertNull("Expected null pkey for table without key", primaryKey);
+  }
+
+  // constants for getPrimaryKeyFromTable()
+  static final String TABLE_WITH_KEY = "TABLE_WITH_KEY";
+  static final String KEY_FIELD_NAME = "KEYFIELD";
+
+  @Test
+  public void getPrimaryKeyFromTable() {
+    // first, create a table with a primary key
+    Connection conn = null;
+    try {
+      conn = testServer.getConnection();
+      PreparedStatement statement = conn.prepareStatement(
+          "CREATE TABLE " + TABLE_WITH_KEY + "(" + KEY_FIELD_NAME
+              + " INT NOT NULL PRIMARY KEY, foo INT)",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+    } catch (SQLException sqlException) {
+      fail("Could not create table with primary key: "
+          + sqlException.toString());
+    } finally {
+      if (null != conn) {
+        try {
+          conn.close();
+        } catch (SQLException sqlE) {
+          LOG.warn("Got SQLException during close: " + sqlE.toString());
+        }
+      }
+    }
+
+    String primaryKey = manager.getPrimaryKey(TABLE_WITH_KEY);
+    assertEquals("Expected null pkey for table without key", primaryKey,
+        KEY_FIELD_NAME);
+  }
+
   @Test
   public void testFilteringSpecifiedColumnNamesWhenNoneSpecified() {
     SqoopOptions opts = new SqoopOptions();
diff --git a/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java b/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java
index 78103ec..82fac12 100644
--- a/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java
+++ b/src/test/org/apache/sqoop/manager/cubrid/CubridAuthTest.java
@@ -32,9 +32,9 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
diff --git a/src/test/org/apache/sqoop/manager/cubrid/CubridCompatTest.java b/src/test/org/apache/sqoop/manager/cubrid/CubridCompatTest.java
index 05c7bf1..8a075e8 100644
--- a/src/test/org/apache/sqoop/manager/cubrid/CubridCompatTest.java
+++ b/src/test/org/apache/sqoop/manager/cubrid/CubridCompatTest.java
@@ -27,8 +27,8 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.ManagerCompatTestCase;
 
 /**
  * Test the basic Cubrid connection manager with the various column types.
diff --git a/src/test/com/cloudera/sqoop/manager/CubridManagerExportTest.java b/src/test/org/apache/sqoop/manager/cubrid/CubridManagerExportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/CubridManagerExportTest.java
rename to src/test/org/apache/sqoop/manager/cubrid/CubridManagerExportTest.java
index 36aa821..4de8e40 100644
--- a/src/test/com/cloudera/sqoop/manager/CubridManagerExportTest.java
+++ b/src/test/org/apache/sqoop/manager/cubrid/CubridManagerExportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.cubrid;
 
 import java.io.BufferedWriter;
 import java.io.File;
@@ -32,12 +32,11 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.manager.CubridManager;
-import org.apache.sqoop.manager.cubrid.CubridTestUtils;
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestExport;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestExport;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/manager/CubridManagerImportTest.java b/src/test/org/apache/sqoop/manager/cubrid/CubridManagerImportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/CubridManagerImportTest.java
rename to src/test/org/apache/sqoop/manager/cubrid/CubridManagerImportTest.java
index 03763ca..addf1ae 100644
--- a/src/test/com/cloudera/sqoop/manager/CubridManagerImportTest.java
+++ b/src/test/org/apache/sqoop/manager/cubrid/CubridManagerImportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.cubrid;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -34,16 +34,17 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.sqoop.manager.ConnManager;
 import org.apache.sqoop.manager.CubridManager;
-import org.apache.sqoop.manager.cubrid.CubridTestUtils;
 import org.apache.sqoop.util.FileListing;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
diff --git a/src/test/org/apache/sqoop/manager/db2/DB2ImportAllTableWithSchemaManualTest.java b/src/test/org/apache/sqoop/manager/db2/DB2ImportAllTableWithSchemaManualTest.java
index db6e6bf..d1a6d69 100644
--- a/src/test/org/apache/sqoop/manager/db2/DB2ImportAllTableWithSchemaManualTest.java
+++ b/src/test/org/apache/sqoop/manager/db2/DB2ImportAllTableWithSchemaManualTest.java
@@ -26,7 +26,6 @@
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.ArrayList;
-import java.util.Arrays;
 
 
 import org.apache.commons.logging.Log;
@@ -42,11 +41,11 @@
 import org.junit.Test;
 
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.sqoop.util.FileListing;
 import org.apache.sqoop.util.LoggingUtils;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/manager/DB2ManagerImportManualTest.java b/src/test/org/apache/sqoop/manager/db2/DB2ManagerImportManualTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/manager/DB2ManagerImportManualTest.java
rename to src/test/org/apache/sqoop/manager/db2/DB2ManagerImportManualTest.java
index 2bc5c54..b5d47f2 100644
--- a/src/test/com/cloudera/sqoop/manager/DB2ManagerImportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/db2/DB2ManagerImportManualTest.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.db2;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -31,14 +31,15 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.sqoop.manager.Db2Manager;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
diff --git a/src/test/org/apache/sqoop/manager/db2/DB2XmlTypeImportManualTest.java b/src/test/org/apache/sqoop/manager/db2/DB2XmlTypeImportManualTest.java
index 2ae3af8..393a110 100644
--- a/src/test/org/apache/sqoop/manager/db2/DB2XmlTypeImportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/db2/DB2XmlTypeImportManualTest.java
@@ -36,10 +36,10 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
diff --git a/src/test/com/cloudera/sqoop/manager/TestHsqldbManager.java b/src/test/org/apache/sqoop/manager/hsqldb/TestHsqldbManager.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/manager/TestHsqldbManager.java
rename to src/test/org/apache/sqoop/manager/hsqldb/TestHsqldbManager.java
index 8a6bb26..745a812 100644
--- a/src/test/com/cloudera/sqoop/manager/TestHsqldbManager.java
+++ b/src/test/org/apache/sqoop/manager/hsqldb/TestHsqldbManager.java
@@ -16,17 +16,18 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.hsqldb;
 
 import java.sql.SQLException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.sqoop.manager.ConnManager;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.HsqldbTestServer;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
diff --git a/src/test/com/cloudera/sqoop/manager/DirectMySQLExportTest.java b/src/test/org/apache/sqoop/manager/mysql/DirectMySQLExportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/DirectMySQLExportTest.java
rename to src/test/org/apache/sqoop/manager/mysql/DirectMySQLExportTest.java
index 9a48788..b3570ff 100644
--- a/src/test/com/cloudera/sqoop/manager/DirectMySQLExportTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/DirectMySQLExportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.io.BufferedWriter;
 import java.io.IOException;
@@ -32,12 +32,13 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.sqoop.manager.DirectMySQLManager;
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestExport;
-import com.cloudera.sqoop.mapreduce.MySQLExportMapper;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestExport;
+import org.apache.sqoop.mapreduce.MySQLExportMapper;
 import org.junit.Ignore;
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/manager/DirectMySQLTest.java b/src/test/org/apache/sqoop/manager/mysql/DirectMySQLTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/manager/DirectMySQLTest.java
rename to src/test/org/apache/sqoop/manager/mysql/DirectMySQLTest.java
index 247ce0b..89a7fec 100644
--- a/src/test/com/cloudera/sqoop/manager/DirectMySQLTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/DirectMySQLTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -36,10 +36,13 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.DirectMySQLManager;
+import org.apache.sqoop.manager.MySQLManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
diff --git a/src/test/com/cloudera/sqoop/manager/JdbcMySQLExportTest.java b/src/test/org/apache/sqoop/manager/mysql/JdbcMySQLExportTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/manager/JdbcMySQLExportTest.java
rename to src/test/org/apache/sqoop/manager/mysql/JdbcMySQLExportTest.java
index 6bf890b..f655bcc 100644
--- a/src/test/com/cloudera/sqoop/manager/JdbcMySQLExportTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/JdbcMySQLExportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.io.IOException;
 import java.sql.Connection;
@@ -26,11 +26,12 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.manager.MySQLManager;
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestExport;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestExport;
 import org.junit.Test;
 
 import static org.junit.Assert.fail;
diff --git a/src/test/com/cloudera/sqoop/manager/MySQLAllTablesTest.java b/src/test/org/apache/sqoop/manager/mysql/MySQLAllTablesTest.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/manager/MySQLAllTablesTest.java
rename to src/test/org/apache/sqoop/manager/mysql/MySQLAllTablesTest.java
index ce4af81..baf0e2a 100644
--- a/src/test/com/cloudera/sqoop/manager/MySQLAllTablesTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySQLAllTablesTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.sql.Connection;
 import java.sql.PreparedStatement;
@@ -25,8 +25,8 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestAllTables;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestAllTables;
 
 /**
  * Test the --all-tables functionality with MySQL.
diff --git a/src/test/com/cloudera/sqoop/manager/MySQLAuthTest.java b/src/test/org/apache/sqoop/manager/mysql/MySQLAuthTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/MySQLAuthTest.java
rename to src/test/org/apache/sqoop/manager/mysql/MySQLAuthTest.java
index ed58c2b..1e2f70d 100644
--- a/src/test/com/cloudera/sqoop/manager/MySQLAuthTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySQLAuthTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -35,9 +35,10 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.manager.DirectMySQLManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
diff --git a/src/test/com/cloudera/sqoop/manager/MySQLCompatTest.java b/src/test/org/apache/sqoop/manager/mysql/MySQLCompatTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/manager/MySQLCompatTest.java
rename to src/test/org/apache/sqoop/manager/mysql/MySQLCompatTest.java
index 6539972..7e822e6 100644
--- a/src/test/com/cloudera/sqoop/manager/MySQLCompatTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySQLCompatTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.sql.Connection;
 import java.sql.PreparedStatement;
@@ -29,8 +29,8 @@
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.ManagerCompatTestCase;
 
 /**
  * Test the basic mysql connection manager with the various column types.
diff --git a/src/test/com/cloudera/sqoop/manager/MySQLFreeFormQueryTest.java b/src/test/org/apache/sqoop/manager/mysql/MySQLFreeFormQueryTest.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/manager/MySQLFreeFormQueryTest.java
rename to src/test/org/apache/sqoop/manager/mysql/MySQLFreeFormQueryTest.java
index 22547f2..f4f0b74 100644
--- a/src/test/com/cloudera/sqoop/manager/MySQLFreeFormQueryTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySQLFreeFormQueryTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.sql.Connection;
 import java.sql.PreparedStatement;
@@ -27,8 +27,8 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestFreeFormQueryImport;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestFreeFormQueryImport;
 
 /**
  * Test free form query import with the MySQL db.
diff --git a/src/test/com/cloudera/sqoop/manager/MySQLLobAvroImportTest.java b/src/test/org/apache/sqoop/manager/mysql/MySQLLobAvroImportTest.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/manager/MySQLLobAvroImportTest.java
rename to src/test/org/apache/sqoop/manager/mysql/MySQLLobAvroImportTest.java
index cd146f4..a6121c9 100644
--- a/src/test/com/cloudera/sqoop/manager/MySQLLobAvroImportTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySQLLobAvroImportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
 import java.sql.Connection;
 import java.sql.PreparedStatement;
@@ -27,8 +27,8 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.LobAvroImportTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.LobAvroImportTestCase;
 
 /**
  * Tests BLOB/CLOB import for Avro with MySQL Db.
@@ -36,7 +36,7 @@
 public class MySQLLobAvroImportTest extends LobAvroImportTestCase {
 
   public static final Log LOG = LogFactory.getLog(
-      OracleCompatTest.class.getName());
+      MySQLLobAvroImportTest.class.getName());
   private MySQLTestUtils mySQLTestUtils = new MySQLTestUtils();
 
   @Override
diff --git a/src/test/com/cloudera/sqoop/manager/MySQLTestUtils.java b/src/test/org/apache/sqoop/manager/mysql/MySQLTestUtils.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/MySQLTestUtils.java
rename to src/test/org/apache/sqoop/manager/mysql/MySQLTestUtils.java
index 77aefde..25dbe9d 100644
--- a/src/test/com/cloudera/sqoop/manager/MySQLTestUtils.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySQLTestUtils.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.mysql;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
diff --git a/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java b/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java
index 90dff97..22a6676 100644
--- a/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySqlCallExportTest.java
@@ -34,10 +34,9 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.MySQLTestUtils;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java b/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java
index 7ecc929..734499e 100644
--- a/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java
+++ b/src/test/org/apache/sqoop/manager/mysql/MySqlColumnEscapeImportTest.java
@@ -18,10 +18,9 @@
 
 package org.apache.sqoop.manager.mysql;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.MySQLTestUtils;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 import org.apache.commons.logging.Log;
diff --git a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java
index b48b379..0a6997f 100644
--- a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaExportManualTest.java
@@ -27,7 +27,7 @@
 import org.apache.sqoop.manager.DirectNetezzaManager;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 
 import static org.junit.Assert.fail;
 
diff --git a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java
index 03cef89..9365ba0 100644
--- a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatExportManualTest.java
@@ -33,7 +33,7 @@
 import org.apache.sqoop.manager.NetezzaManager;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.junit.Test;
 
 /**
diff --git a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java
index ed4ae19..c05b733 100644
--- a/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/DirectNetezzaHCatImportManualTest.java
@@ -33,7 +33,7 @@
 import org.apache.sqoop.manager.NetezzaManager;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.junit.Test;
 
 /**
diff --git a/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java
index 79946c5..95abe7a 100644
--- a/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/NetezzaExportManualTest.java
@@ -33,10 +33,10 @@
 import org.apache.sqoop.manager.NetezzaManager;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestExport;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestExport;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import static org.junit.Assert.fail;
 
diff --git a/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java b/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java
index 1adbbdb..4002c64 100644
--- a/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/netezza/NetezzaImportManualTest.java
@@ -39,11 +39,11 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
diff --git a/src/test/org/apache/sqoop/manager/netezza/NetezzaTestUtils.java b/src/test/org/apache/sqoop/manager/netezza/NetezzaTestUtils.java
index 1abdb59..00deb8f 100644
--- a/src/test/org/apache/sqoop/manager/netezza/NetezzaTestUtils.java
+++ b/src/test/org/apache/sqoop/manager/netezza/NetezzaTestUtils.java
@@ -26,7 +26,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.TestExport.ColumnGenerator;
+import org.apache.sqoop.TestExport.ColumnGenerator;
 
 /**
  * Utilities for Netezza tests.
diff --git a/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java b/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java
index 8e31c3f..bb33c35 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OraOopDataDrivenDBInputFormatConnectionCloseTest.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.junit.Before;
diff --git a/src/test/org/apache/sqoop/manager/oracle/OraOopOracleQueriesTest.java b/src/test/org/apache/sqoop/manager/oracle/OraOopOracleQueriesTest.java
index abd3329..dca2772 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OraOopOracleQueriesTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OraOopOracleQueriesTest.java
@@ -25,7 +25,7 @@
 
 import org.junit.Test;
 
-import com.cloudera.sqoop.manager.OracleUtils;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 
 /**
  * Test Oracle queries against Oracle database.
diff --git a/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java b/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java
index 56dcac6..1bae71c 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OraOopTestCase.java
@@ -26,7 +26,6 @@
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -43,8 +42,8 @@
 import org.apache.sqoop.manager.oracle.util.HadoopFiles;
 import org.apache.sqoop.manager.oracle.util.OracleData;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.manager.OracleUtils;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 
 import static org.junit.Assert.assertEquals;
 
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java
index a473f67..6d6602a 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleCallExportTest.java
@@ -34,10 +34,10 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import org.junit.Test;
 
 /**
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java
index 1e3b799..d4146dc 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleColumnEscapeImportTest.java
@@ -18,10 +18,10 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 import org.apache.commons.logging.Log;
diff --git a/src/test/com/cloudera/sqoop/manager/OracleCompatTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleCompatTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/manager/OracleCompatTest.java
rename to src/test/org/apache/sqoop/manager/oracle/OracleCompatTest.java
index 0d615e3..553096a 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleCompatTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleCompatTest.java
@@ -16,18 +16,19 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.oracle;
 
 import java.io.UnsupportedEncodingException;
 import java.sql.SQLException;
 import java.util.Formatter;
 
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.ManagerCompatTestCase;
 import org.junit.Test;
 
 import static org.junit.Assert.fail;
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleConnectionFactoryTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleConnectionFactoryTest.java
index 9e6931b..34e182f 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleConnectionFactoryTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleConnectionFactoryTest.java
@@ -32,7 +32,7 @@
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.cloudera.sqoop.manager.OracleUtils;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 
 /**
  * Test OracleConnectionFactory class including initialization statements.
diff --git a/src/test/com/cloudera/sqoop/manager/OracleExportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleExportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/OracleExportTest.java
rename to src/test/org/apache/sqoop/manager/oracle/OracleExportTest.java
index fe2e265..a880af3 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleExportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleExportTest.java
@@ -16,20 +16,22 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.oracle;
 
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.SQLException;
 
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.manager.OracleManager;
 import org.junit.After;
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestExport;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestExport;
 import org.junit.Test;
 
 import static org.junit.Assert.fail;
diff --git a/src/test/com/cloudera/sqoop/manager/OracleFreeFormQueryTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleFreeFormQueryTest.java
similarity index 89%
rename from src/test/com/cloudera/sqoop/manager/OracleFreeFormQueryTest.java
rename to src/test/org/apache/sqoop/manager/oracle/OracleFreeFormQueryTest.java
index e4f9423..bb3e7c4 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleFreeFormQueryTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleFreeFormQueryTest.java
@@ -16,16 +16,17 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.oracle;
 
 import java.sql.SQLException;
 
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestFreeFormQueryImport;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestFreeFormQueryImport;
 
 /**
  * Test free form query import with the Oracle db.
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java
index 2a908b3..4dde3d6 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleIncrementalImportTest.java
@@ -18,10 +18,10 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
diff --git a/src/test/com/cloudera/sqoop/manager/OracleLobAvroImportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleLobAvroImportTest.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/manager/OracleLobAvroImportTest.java
rename to src/test/org/apache/sqoop/manager/oracle/OracleLobAvroImportTest.java
index a23f088..525ccf4 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleLobAvroImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleLobAvroImportTest.java
@@ -16,18 +16,19 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.oracle;
 
 import java.io.UnsupportedEncodingException;
 import java.sql.SQLException;
 import java.util.Formatter;
 
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.LobAvroImportTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.LobAvroImportTestCase;
 
 import static org.junit.Assert.fail;
 
diff --git a/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleManagerTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/OracleManagerTest.java
rename to src/test/org/apache/sqoop/manager/oracle/OracleManagerTest.java
index 817141b..9251f02 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleManagerTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleManagerTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.oracle;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -36,18 +36,22 @@
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.OracleManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -551,6 +555,6 @@
     // Make sure that the session username is the same as the Oracle
     // sqoop user name
     String sessionUserName = m1.getSessionUser(c1);
-    assertEquals(OracleUtils.ORACLE_USER_NAME, sessionUserName);
+    Assert.assertEquals(OracleUtils.ORACLE_USER_NAME, sessionUserName);
   }
 }
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleSpecialCharacterTableImportTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleSpecialCharacterTableImportTest.java
index 907be49..453ad82 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleSpecialCharacterTableImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleSpecialCharacterTableImportTest.java
@@ -18,10 +18,10 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 import org.apache.commons.logging.Log;
diff --git a/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java b/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java
index 6878608..33b7d74 100644
--- a/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/OracleSplitterTest.java
@@ -18,28 +18,18 @@
 
 package org.apache.sqoop.manager.oracle;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
 import org.junit.Test;
 
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
 import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Writer;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
diff --git a/src/test/org/apache/sqoop/manager/oracle/SystemImportTest.java b/src/test/org/apache/sqoop/manager/oracle/SystemImportTest.java
index f6e5c0e..e0a0462 100644
--- a/src/test/org/apache/sqoop/manager/oracle/SystemImportTest.java
+++ b/src/test/org/apache/sqoop/manager/oracle/SystemImportTest.java
@@ -44,10 +44,10 @@
 import org.apache.sqoop.manager.oracle.util.*;
 import org.junit.Test;
 
-import com.cloudera.sqoop.lib.BlobRef;
-import com.cloudera.sqoop.lib.ClobRef;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.manager.OracleUtils;
+import org.apache.sqoop.lib.BlobRef;
+import org.apache.sqoop.lib.ClobRef;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
 
 /**
  * OraOop system tests of importing data from oracle to hadoop.
diff --git a/src/test/org/apache/sqoop/manager/oracle/TestOraOopDataDrivenDBInputFormat.java b/src/test/org/apache/sqoop/manager/oracle/TestOraOopDataDrivenDBInputFormat.java
index 7d3abfd..e98fdfe 100644
--- a/src/test/org/apache/sqoop/manager/oracle/TestOraOopDataDrivenDBInputFormat.java
+++ b/src/test/org/apache/sqoop/manager/oracle/TestOraOopDataDrivenDBInputFormat.java
@@ -25,7 +25,7 @@
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 
 import org.apache.sqoop.manager.oracle.OraOopConstants.
            OraOopOracleBlockToSplitAllocationMethod;
diff --git a/src/test/com/cloudera/sqoop/manager/OracleUtils.java b/src/test/org/apache/sqoop/manager/oracle/util/OracleUtils.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/manager/OracleUtils.java
rename to src/test/org/apache/sqoop/manager/oracle/util/OracleUtils.java
index e99dfa9..6d752aa 100644
--- a/src/test/com/cloudera/sqoop/manager/OracleUtils.java
+++ b/src/test/org/apache/sqoop/manager/oracle/util/OracleUtils.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.oracle.util;
 
 import java.sql.Connection;
 import java.sql.SQLException;
@@ -25,7 +25,8 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
 
 /**
  * Helper methods for Oracle testing.
diff --git a/src/test/com/cloudera/sqoop/manager/DirectPostgreSQLExportManualTest.java b/src/test/org/apache/sqoop/manager/postgresql/DirectPostgreSQLExportManualTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/DirectPostgreSQLExportManualTest.java
rename to src/test/org/apache/sqoop/manager/postgresql/DirectPostgreSQLExportManualTest.java
index b7c7416..22b202a 100644
--- a/src/test/com/cloudera/sqoop/manager/DirectPostgreSQLExportManualTest.java
+++ b/src/test/org/apache/sqoop/manager/postgresql/DirectPostgreSQLExportManualTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.postgresql;
 
 import java.io.IOException;
 import java.sql.Connection;
@@ -27,8 +27,8 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.JobConf;
-import com.cloudera.sqoop.TestExport;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.TestExport;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 import org.junit.Ignore;
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/manager/PGBulkloadManagerManualTest.java b/src/test/org/apache/sqoop/manager/postgresql/PGBulkloadManagerManualTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/PGBulkloadManagerManualTest.java
rename to src/test/org/apache/sqoop/manager/postgresql/PGBulkloadManagerManualTest.java
index da354bb..8855316 100644
--- a/src/test/com/cloudera/sqoop/manager/PGBulkloadManagerManualTest.java
+++ b/src/test/org/apache/sqoop/manager/postgresql/PGBulkloadManagerManualTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.postgresql;
 
 import java.io.IOException;
 import java.sql.Connection;
@@ -30,8 +30,8 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.junit.Test;
 
-import com.cloudera.sqoop.TestExport;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
+import org.apache.sqoop.TestExport;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 
 
 /**
diff --git a/src/test/com/cloudera/sqoop/manager/PostgresqlExportTest.java b/src/test/org/apache/sqoop/manager/postgresql/PostgresqlExportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/PostgresqlExportTest.java
rename to src/test/org/apache/sqoop/manager/postgresql/PostgresqlExportTest.java
index 3323d07..f86b119 100644
--- a/src/test/com/cloudera/sqoop/manager/PostgresqlExportTest.java
+++ b/src/test/org/apache/sqoop/manager/postgresql/PostgresqlExportTest.java
@@ -15,13 +15,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.postgresql;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.PostgresqlManager;
 import org.junit.Before;
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/manager/PostgresqlExternalTableImportTest.java b/src/test/org/apache/sqoop/manager/postgresql/PostgresqlExternalTableImportTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/manager/PostgresqlExternalTableImportTest.java
rename to src/test/org/apache/sqoop/manager/postgresql/PostgresqlExternalTableImportTest.java
index 29b3fdd..dd4cfb4 100644
--- a/src/test/com/cloudera/sqoop/manager/PostgresqlExternalTableImportTest.java
+++ b/src/test/org/apache/sqoop/manager/postgresql/PostgresqlExternalTableImportTest.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.postgresql;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
@@ -36,14 +36,16 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.PostgresqlManager;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 public class PostgresqlExternalTableImportTest extends ImportJobTestCase {
 
diff --git a/src/test/com/cloudera/sqoop/manager/PostgresqlImportTest.java b/src/test/org/apache/sqoop/manager/postgresql/PostgresqlImportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/PostgresqlImportTest.java
rename to src/test/org/apache/sqoop/manager/postgresql/PostgresqlImportTest.java
index ceae47c..846228a 100644
--- a/src/test/com/cloudera/sqoop/manager/PostgresqlImportTest.java
+++ b/src/test/org/apache/sqoop/manager/postgresql/PostgresqlImportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.postgresql;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -38,10 +38,12 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.PostgresqlManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java b/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
index be2b22c..15672b1 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/ManagerCompatExport.java
@@ -34,10 +34,10 @@
 import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
 import org.junit.Before;
 import org.junit.Test;
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
-import com.cloudera.sqoop.tool.ExportTool;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.tool.ExportTool;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java
index a68ed30..293da00 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeExportSequenceFileTest.java
@@ -31,11 +31,11 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.lib.SqoopRecord;
-import com.cloudera.sqoop.tool.CodeGenTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.lib.RecordParser;
+import org.apache.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.tool.CodeGenTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileTest.java
index a4d1822..520c4ac 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportDelimitedFileTest.java
@@ -31,12 +31,12 @@
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Ignore;
 import org.junit.Test;
 
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileTest.java
index 409c4ad..592a78f 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerDatatypeImportSequenceFileTest.java
@@ -33,8 +33,8 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.ManagerCompatTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.ManagerCompatTestCase;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestDataFileParser.DATATYPES;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestData.KEY_STRINGS;
 
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportTest.java
index 535e599..e6b0865 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerHiveImportTest.java
@@ -26,10 +26,10 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.hive.TestHiveImport;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.hive.TestHiveImport;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.tool.SqoopTool;
 import org.junit.After;
 import org.junit.Before;
 
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerExportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/SQLServerManagerExportTest.java
rename to src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerExportTest.java
index c87994f..b7c2b75 100644
--- a/src/test/com/cloudera/sqoop/manager/SQLServerManagerExportTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerExportTest.java
@@ -15,14 +15,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.sqlserver;
 
-import com.cloudera.sqoop.ConnFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.manager.SQLServerManager;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.After;
 import org.junit.Before;
diff --git a/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerImportTest.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/manager/SQLServerManagerImportTest.java
rename to src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerImportTest.java
index 714a592..c83c2c9 100644
--- a/src/test/com/cloudera/sqoop/manager/SQLServerManagerImportTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerImportTest.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.manager;
+package org.apache.sqoop.manager.sqlserver;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -34,14 +34,15 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.manager.SQLServerManager;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.util.FileListing;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.util.FileListing;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java
index 67d8f1b..fdf856b 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java
@@ -33,13 +33,13 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import com.cloudera.sqoop.ConnFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java
index d48de99..fb765fb 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java
@@ -23,8 +23,8 @@
 import java.sql.SQLException;
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestMultiCols;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestMultiCols;
 import org.junit.After;
 import org.junit.Test;
 
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java
index be42da3..5e89cc9 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java
@@ -36,15 +36,15 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java
index 9547d80..833ebe8 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java
@@ -36,15 +36,15 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.ReparseMapper;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.ReparseMapper;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java
index 1d570fe..e0c8d67 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java
@@ -31,14 +31,14 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByTest.java
index 4894b21..a1c2201 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerSplitByTest.java
@@ -31,14 +31,14 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereTest.java
index 2c5dbd3..11d0963 100644
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereTest.java
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerWhereTest.java
@@ -31,14 +31,14 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.SeqFileReader;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java b/src/test/org/apache/sqoop/mapreduce/TestImportJob.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java
rename to src/test/org/apache/sqoop/mapreduce/TestImportJob.java
index 6377ccd..941acb6 100644
--- a/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java
+++ b/src/test/org/apache/sqoop/mapreduce/TestImportJob.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.mapreduce;
+package org.apache.sqoop.mapreduce;
 
 import java.io.BufferedWriter;
 import java.io.IOException;
@@ -38,13 +38,13 @@
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import com.cloudera.sqoop.ConnFactory;
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.manager.ManagerFactory;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.InjectableManagerFactory;
-import com.cloudera.sqoop.testutil.InjectableConnManager;
-import com.cloudera.sqoop.tool.ImportTool;
+import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.InjectableManagerFactory;
+import org.apache.sqoop.testutil.InjectableConnManager;
+import org.apache.sqoop.tool.ImportTool;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.util.ClassLoaderStack;
diff --git a/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java b/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java
index 250ffa6..a900b1c 100644
--- a/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java
+++ b/src/test/org/apache/sqoop/mapreduce/TestJdbcExportJob.java
@@ -38,9 +38,9 @@
 import org.apache.sqoop.mapreduce.ExportJobBase.FileType;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ExportJobContext;
 
 
 /**
diff --git a/src/test/org/apache/sqoop/mapreduce/TestJobBase.java b/src/test/org/apache/sqoop/mapreduce/TestJobBase.java
index 017f984..e1781bb 100644
--- a/src/test/org/apache/sqoop/mapreduce/TestJobBase.java
+++ b/src/test/org/apache/sqoop/mapreduce/TestJobBase.java
@@ -31,8 +31,7 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.JobBase;
+import org.apache.sqoop.SqoopOptions;
 
 public class TestJobBase {
 
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java
index 8257435..951a3dc 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestBigDecimalSplitter.java
@@ -20,7 +20,6 @@
 
 import java.math.BigDecimal;
 
-import com.cloudera.sqoop.mapreduce.db.BigDecimalSplitter;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java b/src/test/org/apache/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
rename to src/test/org/apache/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
index c07a38b..9e538fd 100644
--- a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.mapreduce.db;
+package org.apache.sqoop.mapreduce.db;
 
 import java.sql.*;
 import java.io.DataInput;
@@ -35,6 +35,8 @@
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.mapreduce.DBWritable;
+import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
+import org.apache.sqoop.mapreduce.db.DBConfiguration;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java
index efd0b95..b43fc41 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestIntegerSplitter.java
@@ -20,7 +20,6 @@
 import java.sql.SQLException;
 import java.util.List;
 
-import com.cloudera.sqoop.mapreduce.db.IntegerSplitter;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestSQLServerDBRecordReader.java b/src/test/org/apache/sqoop/mapreduce/db/TestSQLServerDBRecordReader.java
index fc04a90..8b49c8d 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestSQLServerDBRecordReader.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestSQLServerDBRecordReader.java
@@ -1,10 +1,8 @@
 package org.apache.sqoop.mapreduce.db;
 
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.mapreduce.db.DBConfiguration;
-import com.cloudera.sqoop.mapreduce.db.DBInputFormat;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.RecordParser;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
index 911749f..5d9cdf0 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
@@ -21,7 +21,6 @@
 import java.sql.SQLException;
 import java.util.List;
 
-import com.cloudera.sqoop.mapreduce.db.TextSplitter;
 import org.apache.sqoop.validation.ValidationException;
 import org.junit.Test;
 
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java b/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
index 6a521bf..9eb8922 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TextSplitterHadoopConfIntegrationTest.java
@@ -25,7 +25,7 @@
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.sqoop.validation.ValidationException;
 
-import com.cloudera.sqoop.testutil.MockResultSet;
+import org.apache.sqoop.testutil.MockResultSet;
 
 import org.junit.Rule;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
index 0614154..3547294 100644
--- a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
+++ b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
@@ -51,8 +51,8 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.lib.DelimiterSet;
-import com.cloudera.sqoop.lib.LargeObjectLoader;
+import org.apache.sqoop.lib.DelimiterSet;
+import org.apache.sqoop.lib.LargeObjectLoader;
 
 public class TestMainframeDatasetFTPRecordReader {
 
diff --git a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
index ba7d240..efef056 100644
--- a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
+++ b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
@@ -43,7 +43,7 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.config.ConfigurationHelper;
 
 public class TestMainframeDatasetInputFormat {
 
diff --git a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeImportJob.java b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeImportJob.java
index 4c8f584..a133e58 100644
--- a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeImportJob.java
+++ b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeImportJob.java
@@ -30,8 +30,8 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ImportJobContext;
 
 public class TestMainframeImportJob {
 
diff --git a/src/test/com/cloudera/sqoop/metastore/JobToolTestBase.java b/src/test/org/apache/sqoop/metastore/JobToolTestBase.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/metastore/JobToolTestBase.java
rename to src/test/org/apache/sqoop/metastore/JobToolTestBase.java
index 2f46ec9..214063d 100644
--- a/src/test/com/cloudera/sqoop/metastore/JobToolTestBase.java
+++ b/src/test/org/apache/sqoop/metastore/JobToolTestBase.java
@@ -16,13 +16,13 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore;
+package org.apache.sqoop.metastore;
 
 import static org.junit.Assert.assertEquals;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -95,7 +95,7 @@
 
     private Connection getConnection(SqoopOptions options) {
         try {
-            com.cloudera.sqoop.metastore.JobData jd = new com.cloudera.sqoop.metastore.JobData(options, null);
+            org.apache.sqoop.metastore.JobData jd = new org.apache.sqoop.metastore.JobData(options, null);
             DefaultManagerFactory dmf = new DefaultManagerFactory();
             cm = dmf.accept(jd);
             return cm.getConnection();
diff --git a/src/test/com/cloudera/sqoop/metastore/MetaConnectIncrementalImportTestBase.java b/src/test/org/apache/sqoop/metastore/MetaConnectIncrementalImportTestBase.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/metastore/MetaConnectIncrementalImportTestBase.java
rename to src/test/org/apache/sqoop/metastore/MetaConnectIncrementalImportTestBase.java
index 587aaff..53f55d1 100644
--- a/src/test/com/cloudera/sqoop/metastore/MetaConnectIncrementalImportTestBase.java
+++ b/src/test/org/apache/sqoop/metastore/MetaConnectIncrementalImportTestBase.java
@@ -16,13 +16,13 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore;
+package org.apache.sqoop.metastore;
 
 import static org.junit.Assert.assertEquals;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -206,8 +206,8 @@
         options.setConnectString(metaConnectString);
         options.setUsername(metaUser);
         options.setPassword(metaPass);
-        com.cloudera.sqoop.metastore.JobData jd =
-                new com.cloudera.sqoop.metastore.JobData(options, new JobTool());
+        org.apache.sqoop.metastore.JobData jd =
+                new org.apache.sqoop.metastore.JobData(options, new JobTool());
         DefaultManagerFactory dmf = new DefaultManagerFactory();
         cm = dmf.accept(jd);
         connMeta= cm.getConnection();
diff --git a/src/test/com/cloudera/sqoop/metastore/SavedJobsTestBase.java b/src/test/org/apache/sqoop/metastore/SavedJobsTestBase.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/metastore/SavedJobsTestBase.java
rename to src/test/org/apache/sqoop/metastore/SavedJobsTestBase.java
index 3a414ea..9c9b2f4 100644
--- a/src/test/com/cloudera/sqoop/metastore/SavedJobsTestBase.java
+++ b/src/test/org/apache/sqoop/metastore/SavedJobsTestBase.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore;
+package org.apache.sqoop.metastore;
 
 import static org.apache.sqoop.metastore.GenericJobStorage.META_CONNECT_KEY;
 import static org.apache.sqoop.metastore.GenericJobStorage.META_PASSWORD_KEY;
@@ -26,13 +26,16 @@
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.VersionTool;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.VersionTool;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.manager.DefaultManagerFactory;
 import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.metastore.JobStorage;
+import org.apache.sqoop.metastore.JobStorageFactory;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -245,7 +248,7 @@
         assertEquals("Job list should start out empty", 0, jobs.size());
 
         // Create a job with extra args
-        com.cloudera.sqoop.SqoopOptions opts = new SqoopOptions();
+        SqoopOptions opts = new SqoopOptions();
         String[] args = {"-schema", "test"};
         opts.setExtraArgs(args);
         JobData data = new JobData(opts, new VersionTool());
@@ -301,11 +304,11 @@
             outData.getSqoopTool().getToolName());
   }
 
-  private com.cloudera.sqoop.metastore.JobData createTestJobData(String setTableName) throws IOException {
+  private org.apache.sqoop.metastore.JobData createTestJobData(String setTableName) throws IOException {
     SqoopOptions testOpts = new SqoopOptions();
     testOpts.setTableName(setTableName);
     ImportTool testTool = new ImportTool();
-    return new com.cloudera.sqoop.metastore.JobData(testOpts,testTool);
+    return new org.apache.sqoop.metastore.JobData(testOpts,testTool);
 
   }
 }
\ No newline at end of file
diff --git a/src/test/com/cloudera/sqoop/metastore/TestMetastoreConfigurationParameters.java b/src/test/org/apache/sqoop/metastore/TestMetastoreConfigurationParameters.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/metastore/TestMetastoreConfigurationParameters.java
rename to src/test/org/apache/sqoop/metastore/TestMetastoreConfigurationParameters.java
index a485b9a..391dc33 100644
--- a/src/test/com/cloudera/sqoop/metastore/TestMetastoreConfigurationParameters.java
+++ b/src/test/org/apache/sqoop/metastore/TestMetastoreConfigurationParameters.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore;
+package org.apache.sqoop.metastore;
 
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.HsqldbTestServer;
 import org.apache.sqoop.Sqoop;
 import org.apache.sqoop.testutil.Argument;
 import org.apache.sqoop.tool.JobTool;
diff --git a/src/test/com/cloudera/sqoop/metastore/db2/DB2JobToolTest.java b/src/test/org/apache/sqoop/metastore/db2/DB2JobToolTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/metastore/db2/DB2JobToolTest.java
rename to src/test/org/apache/sqoop/metastore/db2/DB2JobToolTest.java
index b92d36a..b2b1fb6 100644
--- a/src/test/com/cloudera/sqoop/metastore/db2/DB2JobToolTest.java
+++ b/src/test/org/apache/sqoop/metastore/db2/DB2JobToolTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.db2;
+package org.apache.sqoop.metastore.db2;
 
-import com.cloudera.sqoop.metastore.JobToolTestBase;
+import org.apache.sqoop.metastore.JobToolTestBase;
 
 /**
  * Test that the Job Tool works in DB2
diff --git a/src/test/com/cloudera/sqoop/metastore/db2/DB2MetaConnectIncrementalImportTest.java b/src/test/org/apache/sqoop/metastore/db2/DB2MetaConnectIncrementalImportTest.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/metastore/db2/DB2MetaConnectIncrementalImportTest.java
rename to src/test/org/apache/sqoop/metastore/db2/DB2MetaConnectIncrementalImportTest.java
index c1ae70c..e7969fa 100644
--- a/src/test/com/cloudera/sqoop/metastore/db2/DB2MetaConnectIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/metastore/db2/DB2MetaConnectIncrementalImportTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.db2;
+package org.apache.sqoop.metastore.db2;
 
-import com.cloudera.sqoop.metastore.MetaConnectIncrementalImportTestBase;
+import org.apache.sqoop.metastore.MetaConnectIncrementalImportTestBase;
 
 /**
  * Test that Incremental-Import values are stored correctly in DB2
diff --git a/src/test/com/cloudera/sqoop/metastore/db2/DB2SavedJobsTest.java b/src/test/org/apache/sqoop/metastore/db2/DB2SavedJobsTest.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/metastore/db2/DB2SavedJobsTest.java
rename to src/test/org/apache/sqoop/metastore/db2/DB2SavedJobsTest.java
index efeef62..caf753c 100644
--- a/src/test/com/cloudera/sqoop/metastore/db2/DB2SavedJobsTest.java
+++ b/src/test/org/apache/sqoop/metastore/db2/DB2SavedJobsTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.db2;
+package org.apache.sqoop.metastore.db2;
 
-import com.cloudera.sqoop.metastore.SavedJobsTestBase;
+import org.apache.sqoop.metastore.SavedJobsTestBase;
 import org.apache.sqoop.manager.JdbcDrivers;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbJobToolTest.java b/src/test/org/apache/sqoop/metastore/hsqldb/HsqldbJobToolTest.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbJobToolTest.java
rename to src/test/org/apache/sqoop/metastore/hsqldb/HsqldbJobToolTest.java
index 07eefee..bc829b7 100644
--- a/src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbJobToolTest.java
+++ b/src/test/org/apache/sqoop/metastore/hsqldb/HsqldbJobToolTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.hsqldb;
+package org.apache.sqoop.metastore.hsqldb;
 
-import com.cloudera.sqoop.metastore.JobToolTestBase;
+import org.apache.sqoop.metastore.JobToolTestBase;
 
 /**
  * Test that the Job Tool works in Hsqldb
diff --git a/src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbMetaConnectIncrementalImportTest.java b/src/test/org/apache/sqoop/metastore/hsqldb/HsqldbMetaConnectIncrementalImportTest.java
similarity index 91%
rename from src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbMetaConnectIncrementalImportTest.java
rename to src/test/org/apache/sqoop/metastore/hsqldb/HsqldbMetaConnectIncrementalImportTest.java
index d302bfb..9a8ee24 100644
--- a/src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbMetaConnectIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/metastore/hsqldb/HsqldbMetaConnectIncrementalImportTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.hsqldb;
+package org.apache.sqoop.metastore.hsqldb;
 
-import com.cloudera.sqoop.metastore.MetaConnectIncrementalImportTestBase;
+import org.apache.sqoop.metastore.MetaConnectIncrementalImportTestBase;
 
 /**
  * Test that Incremental-Import values are stored correctly in Hsqldb
diff --git a/src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbSavedJobsTest.java b/src/test/org/apache/sqoop/metastore/hsqldb/HsqldbSavedJobsTest.java
similarity index 93%
rename from src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbSavedJobsTest.java
rename to src/test/org/apache/sqoop/metastore/hsqldb/HsqldbSavedJobsTest.java
index 398f1a0..f072881 100644
--- a/src/test/com/cloudera/sqoop/metastore/hsqldb/HsqldbSavedJobsTest.java
+++ b/src/test/org/apache/sqoop/metastore/hsqldb/HsqldbSavedJobsTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.hsqldb;
+package org.apache.sqoop.metastore.hsqldb;
 
-import com.cloudera.sqoop.metastore.SavedJobsTestBase;
+import org.apache.sqoop.metastore.SavedJobsTestBase;
 import org.apache.sqoop.manager.JdbcDrivers;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/metastore/mysql/MySqlJobToolTest.java b/src/test/org/apache/sqoop/metastore/mysql/MySqlJobToolTest.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/metastore/mysql/MySqlJobToolTest.java
rename to src/test/org/apache/sqoop/metastore/mysql/MySqlJobToolTest.java
index 6a6bae4..2ec9648 100644
--- a/src/test/com/cloudera/sqoop/metastore/mysql/MySqlJobToolTest.java
+++ b/src/test/org/apache/sqoop/metastore/mysql/MySqlJobToolTest.java
@@ -16,10 +16,10 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.mysql;
+package org.apache.sqoop.metastore.mysql;
 
-import com.cloudera.sqoop.manager.MySQLTestUtils;
-import com.cloudera.sqoop.metastore.JobToolTestBase;
+import org.apache.sqoop.manager.mysql.MySQLTestUtils;
+import org.apache.sqoop.metastore.JobToolTestBase;
 
 /**
  * Test that the Job Tool works in MySql
diff --git a/src/test/com/cloudera/sqoop/metastore/mysql/MySqlMetaConnectIncrementalImportTest.java b/src/test/org/apache/sqoop/metastore/mysql/MySqlMetaConnectIncrementalImportTest.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/metastore/mysql/MySqlMetaConnectIncrementalImportTest.java
rename to src/test/org/apache/sqoop/metastore/mysql/MySqlMetaConnectIncrementalImportTest.java
index 3a97cfd..e19bbc8 100644
--- a/src/test/com/cloudera/sqoop/metastore/mysql/MySqlMetaConnectIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/metastore/mysql/MySqlMetaConnectIncrementalImportTest.java
@@ -16,11 +16,11 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.mysql;
+package org.apache.sqoop.metastore.mysql;
 
 
-import com.cloudera.sqoop.manager.MySQLTestUtils;
-import com.cloudera.sqoop.metastore.MetaConnectIncrementalImportTestBase;
+import org.apache.sqoop.manager.mysql.MySQLTestUtils;
+import org.apache.sqoop.metastore.MetaConnectIncrementalImportTestBase;
 
 /**
  * Test that Incremental-Import values are stored correctly in MySql
diff --git a/src/test/com/cloudera/sqoop/metastore/mysql/MySqlSavedJobsTest.java b/src/test/org/apache/sqoop/metastore/mysql/MySqlSavedJobsTest.java
similarity index 93%
rename from src/test/com/cloudera/sqoop/metastore/mysql/MySqlSavedJobsTest.java
rename to src/test/org/apache/sqoop/metastore/mysql/MySqlSavedJobsTest.java
index febb7da..e15c322 100644
--- a/src/test/com/cloudera/sqoop/metastore/mysql/MySqlSavedJobsTest.java
+++ b/src/test/org/apache/sqoop/metastore/mysql/MySqlSavedJobsTest.java
@@ -16,10 +16,10 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.mysql;
+package org.apache.sqoop.metastore.mysql;
 
-import com.cloudera.sqoop.manager.MySQLTestUtils;
-import com.cloudera.sqoop.metastore.SavedJobsTestBase;
+import org.apache.sqoop.manager.mysql.MySQLTestUtils;
+import org.apache.sqoop.metastore.SavedJobsTestBase;
 import org.apache.sqoop.manager.JdbcDrivers;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/metastore/oracle/OracleJobToolTest.java b/src/test/org/apache/sqoop/metastore/oracle/OracleJobToolTest.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/metastore/oracle/OracleJobToolTest.java
rename to src/test/org/apache/sqoop/metastore/oracle/OracleJobToolTest.java
index 4891b00..a3e61e9 100644
--- a/src/test/com/cloudera/sqoop/metastore/oracle/OracleJobToolTest.java
+++ b/src/test/org/apache/sqoop/metastore/oracle/OracleJobToolTest.java
@@ -16,10 +16,10 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.oracle;
+package org.apache.sqoop.metastore.oracle;
 
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.metastore.JobToolTestBase;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.metastore.JobToolTestBase;
 
 /**
  * Test that the Job Tool works in Oracle
diff --git a/src/test/com/cloudera/sqoop/metastore/oracle/OracleMetaConnectIncrementalImportTest.java b/src/test/org/apache/sqoop/metastore/oracle/OracleMetaConnectIncrementalImportTest.java
similarity index 91%
rename from src/test/com/cloudera/sqoop/metastore/oracle/OracleMetaConnectIncrementalImportTest.java
rename to src/test/org/apache/sqoop/metastore/oracle/OracleMetaConnectIncrementalImportTest.java
index f916a13..37beaa4 100644
--- a/src/test/com/cloudera/sqoop/metastore/oracle/OracleMetaConnectIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/metastore/oracle/OracleMetaConnectIncrementalImportTest.java
@@ -16,10 +16,10 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.oracle;
+package org.apache.sqoop.metastore.oracle;
 
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.metastore.MetaConnectIncrementalImportTestBase;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.metastore.MetaConnectIncrementalImportTestBase;
 
 /**
  * Test that Incremental-Import values are stored correctly in Oracle
diff --git a/src/test/com/cloudera/sqoop/metastore/oracle/OracleSavedJobsTest.java b/src/test/org/apache/sqoop/metastore/oracle/OracleSavedJobsTest.java
similarity index 92%
rename from src/test/com/cloudera/sqoop/metastore/oracle/OracleSavedJobsTest.java
rename to src/test/org/apache/sqoop/metastore/oracle/OracleSavedJobsTest.java
index 0f487d1..4691530 100644
--- a/src/test/com/cloudera/sqoop/metastore/oracle/OracleSavedJobsTest.java
+++ b/src/test/org/apache/sqoop/metastore/oracle/OracleSavedJobsTest.java
@@ -16,10 +16,10 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.oracle;
+package org.apache.sqoop.metastore.oracle;
 
-import com.cloudera.sqoop.manager.OracleUtils;
-import com.cloudera.sqoop.metastore.SavedJobsTestBase;
+import org.apache.sqoop.manager.oracle.util.OracleUtils;
+import org.apache.sqoop.metastore.SavedJobsTestBase;
 import org.apache.sqoop.manager.JdbcDrivers;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/metastore/postgres/PostgresJobToolTest.java b/src/test/org/apache/sqoop/metastore/postgres/PostgresJobToolTest.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/metastore/postgres/PostgresJobToolTest.java
rename to src/test/org/apache/sqoop/metastore/postgres/PostgresJobToolTest.java
index b596fc8..065e1bb 100644
--- a/src/test/com/cloudera/sqoop/metastore/postgres/PostgresJobToolTest.java
+++ b/src/test/org/apache/sqoop/metastore/postgres/PostgresJobToolTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.postgres;
+package org.apache.sqoop.metastore.postgres;
 
-import com.cloudera.sqoop.metastore.JobToolTestBase;
+import org.apache.sqoop.metastore.JobToolTestBase;
 
 /**
  * Test that the Job Tool works in PostgreSQL
diff --git a/src/test/com/cloudera/sqoop/metastore/postgres/PostgresMetaConnectIncrementalImportTest.java b/src/test/org/apache/sqoop/metastore/postgres/PostgresMetaConnectIncrementalImportTest.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/metastore/postgres/PostgresMetaConnectIncrementalImportTest.java
rename to src/test/org/apache/sqoop/metastore/postgres/PostgresMetaConnectIncrementalImportTest.java
index 21f4938..0ffbf5a 100644
--- a/src/test/com/cloudera/sqoop/metastore/postgres/PostgresMetaConnectIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/metastore/postgres/PostgresMetaConnectIncrementalImportTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.postgres;
+package org.apache.sqoop.metastore.postgres;
 
-import com.cloudera.sqoop.metastore.MetaConnectIncrementalImportTestBase;
+import org.apache.sqoop.metastore.MetaConnectIncrementalImportTestBase;
 
 /**
  * Test that Incremental-Import values are stored correctly in PostgreSQL
diff --git a/src/test/com/cloudera/sqoop/metastore/postgres/PostgresSavedJobsTest.java b/src/test/org/apache/sqoop/metastore/postgres/PostgresSavedJobsTest.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/metastore/postgres/PostgresSavedJobsTest.java
rename to src/test/org/apache/sqoop/metastore/postgres/PostgresSavedJobsTest.java
index ed06cb2..ee3f005 100644
--- a/src/test/com/cloudera/sqoop/metastore/postgres/PostgresSavedJobsTest.java
+++ b/src/test/org/apache/sqoop/metastore/postgres/PostgresSavedJobsTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.postgres;
+package org.apache.sqoop.metastore.postgres;
 
-import com.cloudera.sqoop.metastore.SavedJobsTestBase;
+import org.apache.sqoop.metastore.SavedJobsTestBase;
 import org.apache.sqoop.manager.JdbcDrivers;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerJobToolTest.java b/src/test/org/apache/sqoop/metastore/sqlserver/SqlServerJobToolTest.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerJobToolTest.java
rename to src/test/org/apache/sqoop/metastore/sqlserver/SqlServerJobToolTest.java
index e3f8bde..87d7b34 100644
--- a/src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerJobToolTest.java
+++ b/src/test/org/apache/sqoop/metastore/sqlserver/SqlServerJobToolTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.sqlserver;
+package org.apache.sqoop.metastore.sqlserver;
 
-import com.cloudera.sqoop.metastore.JobToolTestBase;
+import org.apache.sqoop.metastore.JobToolTestBase;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerMetaConnectIncrementalImportTest.java b/src/test/org/apache/sqoop/metastore/sqlserver/SqlServerMetaConnectIncrementalImportTest.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerMetaConnectIncrementalImportTest.java
rename to src/test/org/apache/sqoop/metastore/sqlserver/SqlServerMetaConnectIncrementalImportTest.java
index 3c8ac5f..f1a2a66 100644
--- a/src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerMetaConnectIncrementalImportTest.java
+++ b/src/test/org/apache/sqoop/metastore/sqlserver/SqlServerMetaConnectIncrementalImportTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.sqlserver;
+package org.apache.sqoop.metastore.sqlserver;
 
-import com.cloudera.sqoop.metastore.MetaConnectIncrementalImportTestBase;
+import org.apache.sqoop.metastore.MetaConnectIncrementalImportTestBase;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerSavedJobsTest.java b/src/test/org/apache/sqoop/metastore/sqlserver/SqlServerSavedJobsTest.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerSavedJobsTest.java
rename to src/test/org/apache/sqoop/metastore/sqlserver/SqlServerSavedJobsTest.java
index 5589f14..b37623b 100644
--- a/src/test/com/cloudera/sqoop/metastore/sqlserver/SqlServerSavedJobsTest.java
+++ b/src/test/org/apache/sqoop/metastore/sqlserver/SqlServerSavedJobsTest.java
@@ -16,9 +16,9 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.metastore.sqlserver;
+package org.apache.sqoop.metastore.sqlserver;
 
-import com.cloudera.sqoop.metastore.SavedJobsTestBase;
+import org.apache.sqoop.metastore.SavedJobsTestBase;
 import org.apache.sqoop.manager.JdbcDrivers;
 import org.apache.sqoop.manager.sqlserver.MSSQLTestUtils;
 
diff --git a/src/test/com/cloudera/sqoop/orm/TestClassWriter.java b/src/test/org/apache/sqoop/orm/TestClassWriter.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/orm/TestClassWriter.java
rename to src/test/org/apache/sqoop/orm/TestClassWriter.java
index b3a8a17..0cc07cf 100644
--- a/src/test/com/cloudera/sqoop/orm/TestClassWriter.java
+++ b/src/test/org/apache/sqoop/orm/TestClassWriter.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.orm;
+package org.apache.sqoop.orm;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -39,14 +39,14 @@
 import org.junit.Rule;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestConnFactory.DummyManager;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.testutil.DirUtil;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.TestConnFactory.DummyManager;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.testutil.DirUtil;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.rules.ExpectedException;
 
 import java.lang.reflect.Field;
diff --git a/src/test/org/apache/sqoop/orm/TestCompilationManager.java b/src/test/org/apache/sqoop/orm/TestCompilationManager.java
index b9e2608..abd72d8 100644
--- a/src/test/org/apache/sqoop/orm/TestCompilationManager.java
+++ b/src/test/org/apache/sqoop/orm/TestCompilationManager.java
@@ -17,7 +17,7 @@
  */
 package org.apache.sqoop.orm;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.junit.Before;
 import org.junit.Test;
 
diff --git a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java b/src/test/org/apache/sqoop/orm/TestParseMethods.java
similarity index 93%
rename from src/test/com/cloudera/sqoop/orm/TestParseMethods.java
rename to src/test/org/apache/sqoop/orm/TestParseMethods.java
index 017fb9f..46bb52d 100644
--- a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java
+++ b/src/test/org/apache/sqoop/orm/TestParseMethods.java
@@ -16,12 +16,11 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.orm;
+package org.apache.sqoop.orm;
 
 import java.io.IOException;
 import java.util.ArrayList;
 
-import com.cloudera.sqoop.testutil.*;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
@@ -30,15 +29,20 @@
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ExplicitSetMapper;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.testutil.ReparseMapper;
 import org.apache.sqoop.tool.BaseSqoopTool;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.config.ConfigurationHelper;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.config.ConfigurationHelper;
 
-import com.cloudera.sqoop.testutil.ExplicitSetMapper;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java b/src/test/org/apache/sqoop/testutil/BaseSqoopTestCase.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
rename to src/test/org/apache/sqoop/testutil/BaseSqoopTestCase.java
index b5d46d7..588f439 100644
--- a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java
+++ b/src/test/org/apache/sqoop/testutil/BaseSqoopTestCase.java
@@ -16,13 +16,13 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
-import com.cloudera.sqoop.ConnFactory;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.tool.ImportTool;
+import org.apache.sqoop.ConnFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.tool.ImportTool;
 import com.google.common.collect.ObjectArrays;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
diff --git a/src/test/com/cloudera/sqoop/testutil/CommonArgs.java b/src/test/org/apache/sqoop/testutil/CommonArgs.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/testutil/CommonArgs.java
rename to src/test/org/apache/sqoop/testutil/CommonArgs.java
index ea4dad5..a91addf 100644
--- a/src/test/com/cloudera/sqoop/testutil/CommonArgs.java
+++ b/src/test/org/apache/sqoop/testutil/CommonArgs.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.util.List;
 
diff --git a/src/test/com/cloudera/sqoop/testutil/DirUtil.java b/src/test/org/apache/sqoop/testutil/DirUtil.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/testutil/DirUtil.java
rename to src/test/org/apache/sqoop/testutil/DirUtil.java
index c924e3c..7ca5d53 100644
--- a/src/test/com/cloudera/sqoop/testutil/DirUtil.java
+++ b/src/test/org/apache/sqoop/testutil/DirUtil.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.File;
 
diff --git a/src/test/com/cloudera/sqoop/testutil/ExplicitSetMapper.java b/src/test/org/apache/sqoop/testutil/ExplicitSetMapper.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/testutil/ExplicitSetMapper.java
rename to src/test/org/apache/sqoop/testutil/ExplicitSetMapper.java
index 344dc6e..c1122c0 100644
--- a/src/test/com/cloudera/sqoop/testutil/ExplicitSetMapper.java
+++ b/src/test/org/apache/sqoop/testutil/ExplicitSetMapper.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.IOException;
 
@@ -34,7 +34,7 @@
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java b/src/test/org/apache/sqoop/testutil/ExportJobTestCase.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java
rename to src/test/org/apache/sqoop/testutil/ExportJobTestCase.java
index 2433275..aa5960e 100644
--- a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java
+++ b/src/test/org/apache/sqoop/testutil/ExportJobTestCase.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.IOException;
 import java.sql.Connection;
@@ -32,10 +32,10 @@
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Before;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.mapreduce.ExportOutputFormat;
-import com.cloudera.sqoop.tool.ExportTool;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.mapreduce.ExportOutputFormat;
+import org.apache.sqoop.tool.ExportTool;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
diff --git a/src/test/com/cloudera/sqoop/testutil/HsqldbTestServer.java b/src/test/org/apache/sqoop/testutil/HsqldbTestServer.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/testutil/HsqldbTestServer.java
rename to src/test/org/apache/sqoop/testutil/HsqldbTestServer.java
index ad68b61..c63a8f2 100644
--- a/src/test/com/cloudera/sqoop/testutil/HsqldbTestServer.java
+++ b/src/test/org/apache/sqoop/testutil/HsqldbTestServer.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.util.Arrays;
 
@@ -29,9 +29,9 @@
 import org.apache.commons.logging.LogFactory;
 import org.hsqldb.Server;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.HsqldbManager;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.HsqldbManager;
 
 /**
  * Create a simple hsqldb server and schema to use for testing.
diff --git a/src/test/com/cloudera/sqoop/testutil/ImportJobTestCase.java b/src/test/org/apache/sqoop/testutil/ImportJobTestCase.java
similarity index 95%
rename from src/test/com/cloudera/sqoop/testutil/ImportJobTestCase.java
rename to src/test/org/apache/sqoop/testutil/ImportJobTestCase.java
index 6368980..dbefe20 100644
--- a/src/test/com/cloudera/sqoop/testutil/ImportJobTestCase.java
+++ b/src/test/org/apache/sqoop/testutil/ImportJobTestCase.java
@@ -16,26 +16,25 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.EOFException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.orm.CompilationManager;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.util.ClassLoaderStack;
 import org.junit.Before;
 
 import static org.junit.Assert.assertEquals;
diff --git a/src/test/com/cloudera/sqoop/testutil/InjectableConnManager.java b/src/test/org/apache/sqoop/testutil/InjectableConnManager.java
similarity index 91%
rename from src/test/com/cloudera/sqoop/testutil/InjectableConnManager.java
rename to src/test/org/apache/sqoop/testutil/InjectableConnManager.java
index 3b01d28..b4917fe 100644
--- a/src/test/com/cloudera/sqoop/testutil/InjectableConnManager.java
+++ b/src/test/org/apache/sqoop/testutil/InjectableConnManager.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.IOException;
 
@@ -26,11 +26,11 @@
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.HsqldbManager;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.ImportJobBase;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.HsqldbManager;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.ImportJobBase;
+import org.apache.sqoop.util.ImportException;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/testutil/InjectableManagerFactory.java b/src/test/org/apache/sqoop/testutil/InjectableManagerFactory.java
similarity index 83%
rename from src/test/com/cloudera/sqoop/testutil/InjectableManagerFactory.java
rename to src/test/org/apache/sqoop/testutil/InjectableManagerFactory.java
index 24d8493..48c3461 100644
--- a/src/test/com/cloudera/sqoop/testutil/InjectableManagerFactory.java
+++ b/src/test/org/apache/sqoop/testutil/InjectableManagerFactory.java
@@ -16,11 +16,12 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ManagerFactory;
-import com.cloudera.sqoop.metastore.JobData;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.testutil.InjectableConnManager;
 
 /**
  * ManagerFactory that is used for testing; this accepts any
diff --git a/src/test/com/cloudera/sqoop/testutil/LobAvroImportTestCase.java b/src/test/org/apache/sqoop/testutil/LobAvroImportTestCase.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/testutil/LobAvroImportTestCase.java
rename to src/test/org/apache/sqoop/testutil/LobAvroImportTestCase.java
index 7469799..20d6151 100644
--- a/src/test/com/cloudera/sqoop/testutil/LobAvroImportTestCase.java
+++ b/src/test/org/apache/sqoop/testutil/LobAvroImportTestCase.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import org.junit.After;
 import org.junit.FixMethodOrder;
diff --git a/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java b/src/test/org/apache/sqoop/testutil/ManagerCompatTestCase.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java
rename to src/test/org/apache/sqoop/testutil/ManagerCompatTestCase.java
index 7db044c..248e100 100644
--- a/src/test/com/cloudera/sqoop/testutil/ManagerCompatTestCase.java
+++ b/src/test/org/apache/sqoop/testutil/ManagerCompatTestCase.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.UnsupportedEncodingException;
 import java.sql.Blob;
diff --git a/src/test/com/cloudera/sqoop/testutil/MockResultSet.java b/src/test/org/apache/sqoop/testutil/MockResultSet.java
similarity index 99%
rename from src/test/com/cloudera/sqoop/testutil/MockResultSet.java
rename to src/test/org/apache/sqoop/testutil/MockResultSet.java
index de7d3cb..d9313d8 100644
--- a/src/test/com/cloudera/sqoop/testutil/MockResultSet.java
+++ b/src/test/org/apache/sqoop/testutil/MockResultSet.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.ByteArrayInputStream;
 import java.io.InputStream;
diff --git a/src/test/com/cloudera/sqoop/testutil/ReparseMapper.java b/src/test/org/apache/sqoop/testutil/ReparseMapper.java
similarity index 96%
rename from src/test/com/cloudera/sqoop/testutil/ReparseMapper.java
rename to src/test/org/apache/sqoop/testutil/ReparseMapper.java
index 7e47df6..14a2c26 100644
--- a/src/test/com/cloudera/sqoop/testutil/ReparseMapper.java
+++ b/src/test/org/apache/sqoop/testutil/ReparseMapper.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.IOException;
 
@@ -32,8 +32,8 @@
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-import com.cloudera.sqoop.lib.RecordParser;
-import com.cloudera.sqoop.lib.SqoopRecord;
+import org.apache.sqoop.lib.RecordParser;
+import org.apache.sqoop.lib.SqoopRecord;
 import org.apache.hadoop.util.ReflectionUtils;
 
 
diff --git a/src/test/com/cloudera/sqoop/testutil/SeqFileReader.java b/src/test/org/apache/sqoop/testutil/SeqFileReader.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/testutil/SeqFileReader.java
rename to src/test/org/apache/sqoop/testutil/SeqFileReader.java
index 68c1ec5..8278960 100644
--- a/src/test/com/cloudera/sqoop/testutil/SeqFileReader.java
+++ b/src/test/org/apache/sqoop/testutil/SeqFileReader.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.testutil;
+package org.apache.sqoop.testutil;
 
 import java.io.IOException;
 
diff --git a/src/test/org/apache/sqoop/tool/ImportToolValidateOptionsTest.java b/src/test/org/apache/sqoop/tool/ImportToolValidateOptionsTest.java
index a85abb8..bdac437 100644
--- a/src/test/org/apache/sqoop/tool/ImportToolValidateOptionsTest.java
+++ b/src/test/org/apache/sqoop/tool/ImportToolValidateOptionsTest.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.tool;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -29,10 +29,10 @@
 
 import java.util.Arrays;
 
-import static com.cloudera.sqoop.SqoopOptions.FileLayout.SequenceFile;
-import static com.cloudera.sqoop.SqoopOptions.FileLayout.AvroDataFile;
-import static com.cloudera.sqoop.SqoopOptions.FileLayout.ParquetFile;
-import static com.cloudera.sqoop.SqoopOptions.FileLayout.TextFile;
+import static org.apache.sqoop.SqoopOptions.FileLayout.SequenceFile;
+import static org.apache.sqoop.SqoopOptions.FileLayout.AvroDataFile;
+import static org.apache.sqoop.SqoopOptions.FileLayout.ParquetFile;
+import static org.apache.sqoop.SqoopOptions.FileLayout.TextFile;
 
 @RunWith(Parameterized.class)
 public class ImportToolValidateOptionsTest {
diff --git a/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java b/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java
index ddf046e..01ad150 100644
--- a/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java
+++ b/src/test/org/apache/sqoop/tool/TestBaseSqoopTool.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.tool;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/tool/TestExportToolValidateOptions.java b/src/test/org/apache/sqoop/tool/TestExportToolValidateOptions.java
index 0018fb1..f16d187 100644
--- a/src/test/org/apache/sqoop/tool/TestExportToolValidateOptions.java
+++ b/src/test/org/apache/sqoop/tool/TestExportToolValidateOptions.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.tool;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.manager.SupportedManagers;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/tool/TestImportTool.java b/src/test/org/apache/sqoop/tool/TestImportTool.java
index 6335fb1..1c0cf4d 100644
--- a/src/test/org/apache/sqoop/tool/TestImportTool.java
+++ b/src/test/org/apache/sqoop/tool/TestImportTool.java
@@ -32,8 +32,8 @@
 
 import java.sql.Connection;
 
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.hive.HiveImport;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.hive.HiveImport;
 import org.apache.avro.Schema;
 import org.apache.sqoop.SqoopOptions;
 import org.apache.sqoop.avro.AvroSchemaMismatchException;
@@ -77,7 +77,7 @@
 
     ImportTool importTool = spy(new ImportTool("import", mock(CodeGenTool.class), false));
 
-    doReturn(true).when(importTool).init(any(com.cloudera.sqoop.SqoopOptions.class));
+    doReturn(true).when(importTool).init(any(SqoopOptions.class));
 
     Schema writtenWithSchema = mock(Schema.class);
     when(writtenWithSchema.toString()).thenReturn(writtenWithSchemaString);
@@ -85,9 +85,9 @@
     when(actualSchema.toString()).thenReturn(actualSchemaString);
 
     AvroSchemaMismatchException expectedException = new AvroSchemaMismatchException(errorMessage, writtenWithSchema, actualSchema);
-    doThrow(expectedException).when(importTool).importTable(any(com.cloudera.sqoop.SqoopOptions.class), anyString(), any(HiveImport.class));
+    doThrow(expectedException).when(importTool).importTable(any(SqoopOptions.class), anyString(), any(HiveImport.class));
 
-    com.cloudera.sqoop.SqoopOptions sqoopOptions = mock(com.cloudera.sqoop.SqoopOptions.class);
+    SqoopOptions sqoopOptions = mock(SqoopOptions.class);
     when(sqoopOptions.doHiveImport()).thenReturn(true);
 
     logMessage.expectError(expectedException.getMessage());
@@ -100,7 +100,7 @@
   @Test (expected = InvalidOptionsException.class)
   public void testExternalTableNoHiveImportThrowsException() throws InvalidOptionsException {
     String hdfsTableDir = "/data/movielens/genre";
-    com.cloudera.sqoop.SqoopOptions options = new com.cloudera.sqoop.SqoopOptions("jdbc:postgresql://localhost/movielens", "genres");
+    SqoopOptions options = new SqoopOptions("jdbc:postgresql://localhost/movielens", "genres");
     options.setHiveExternalTableDir(hdfsTableDir);
     ImportTool tool = new ImportTool("Import Tool", false);
     tool.validateHiveOptions(options);
diff --git a/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java b/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java
index d51e33e..0b0c6c3 100644
--- a/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java
+++ b/src/test/org/apache/sqoop/tool/TestMainframeImportTool.java
@@ -24,17 +24,16 @@
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.sqoop.Sqoop;
 import org.apache.sqoop.cli.RelatedOptions;
 import org.apache.sqoop.mapreduce.mainframe.MainframeConfiguration;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
diff --git a/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java b/src/test/org/apache/sqoop/tool/TestToolPlugin.java
similarity index 94%
rename from src/test/com/cloudera/sqoop/tool/TestToolPlugin.java
rename to src/test/org/apache/sqoop/tool/TestToolPlugin.java
index da1ef65..19dea22 100644
--- a/src/test/com/cloudera/sqoop/tool/TestToolPlugin.java
+++ b/src/test/org/apache/sqoop/tool/TestToolPlugin.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package com.cloudera.sqoop.tool;
+package org.apache.sqoop.tool;
 
 import java.util.ArrayList;
 import java.util.Collections;
@@ -28,12 +28,12 @@
 
 import org.apache.hadoop.util.StringUtils;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
 
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
 
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.cli.ToolOptions;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
diff --git a/src/test/org/apache/sqoop/tool/TestValidateImportOptions.java b/src/test/org/apache/sqoop/tool/TestValidateImportOptions.java
index d4084ed..9b61bd5 100644
--- a/src/test/org/apache/sqoop/tool/TestValidateImportOptions.java
+++ b/src/test/org/apache/sqoop/tool/TestValidateImportOptions.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.tool;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.manager.SupportedManagers;
 import org.apache.commons.lang.RandomStringUtils;
diff --git a/src/test/com/cloudera/sqoop/util/TestOptionsFileExpansion.java b/src/test/org/apache/sqoop/util/TestOptionsFileExpansion.java
similarity index 98%
rename from src/test/com/cloudera/sqoop/util/TestOptionsFileExpansion.java
rename to src/test/org/apache/sqoop/util/TestOptionsFileExpansion.java
index d403f3b..3fc9e60 100644
--- a/src/test/com/cloudera/sqoop/util/TestOptionsFileExpansion.java
+++ b/src/test/org/apache/sqoop/util/TestOptionsFileExpansion.java
@@ -15,16 +15,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package com.cloudera.sqoop.util;
+package org.apache.sqoop.util;
 
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 
+import org.apache.sqoop.util.OptionsFileUtil;
 import org.junit.Assert;
 
-import com.cloudera.sqoop.Sqoop;
+import org.apache.sqoop.Sqoop;
 import org.junit.Test;
 
 /**
diff --git a/src/test/com/cloudera/sqoop/util/TestSubstitutionUtils.java b/src/test/org/apache/sqoop/util/TestSubstitutionUtils.java
similarity index 97%
rename from src/test/com/cloudera/sqoop/util/TestSubstitutionUtils.java
rename to src/test/org/apache/sqoop/util/TestSubstitutionUtils.java
index b6b072f..a2ac341 100644
--- a/src/test/com/cloudera/sqoop/util/TestSubstitutionUtils.java
+++ b/src/test/org/apache/sqoop/util/TestSubstitutionUtils.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package com.cloudera.sqoop.util;
+package org.apache.sqoop.util;
 
 import org.apache.sqoop.util.SubstitutionUtils;
 import org.junit.Test;
diff --git a/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java b/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java
index f5808b2..ee04563 100644
--- a/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java
+++ b/src/test/org/apache/sqoop/validation/AbortOnFailureHandlerTest.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.validation;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
 
diff --git a/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java b/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java
index 9ba62d4..8011622 100644
--- a/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java
+++ b/src/test/org/apache/sqoop/validation/RowCountValidatorImportTest.java
@@ -18,8 +18,8 @@
 
 package org.apache.sqoop.validation;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.ImportJobTestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.tool.ImportTool;
 import org.junit.Test;
@@ -129,8 +129,7 @@
     } catch (Exception e) {
       System.out.println("e.getMessage() = " + e.getMessage());
       System.out.println("e.getClass() = " + e.getClass());
-      assertEquals(
-        com.cloudera.sqoop.SqoopOptions.InvalidOptionsException.class,
+      assertEquals(SqoopOptions.InvalidOptionsException.class,
         e.getClass());
     } finally {
       dropTableIfExists(getTableName());