SQOOP-1255: Sqoop2: Tool: Dump repository content to a repository independent format
diff --git a/docs/src/site/sphinx/Tools.rst b/docs/src/site/sphinx/Tools.rst
index ad72cd1..84cbd5f 100644
--- a/docs/src/site/sphinx/Tools.rst
+++ b/docs/src/site/sphinx/Tools.rst
@@ -34,7 +34,7 @@
 
   sqoop2-tool verify
 
-.. note:: Running tools while the Sqoop Server is also running is not recommended as it might lead to a data corruption and service disruption.
+.. note:: Stop the Sqoop Server before running Sqoop tools. Running tools while Sqoop Server is running can lead to a data corruption and service disruption.
 
 Verify
 ======
@@ -60,7 +60,8 @@
 Upgrade
 =======
 
-Upgrades all versionable components inside Sqoop2. This includes structural changes inside the repository and stored metadata. Running this tool is idempotent.
+Upgrades all versionable components inside Sqoop2. This includes structural changes inside the repository and stored metadata.
+Running this tool on Sqoop deployment that was already upgraded will have no effect.
 
 To run the ``upgrade`` tool::
 
@@ -75,3 +76,54 @@
   Tool class org.apache.sqoop.tools.tool.UpgradeTool has failed.
 
 Further details why the upgrade process has failed will be available in the Sqoop server log - same file as the Sqoop Server logs into.
+
+RepositoryDump
+==============
+
+Writes the user-created contents of the Sqoop repository to a file in JSON format. This includes connections, jobs and submissions.
+
+To run the ``repositorydump`` tool::
+
+  sqoop2-tool repositorydump -o repository.json
+
+As an option, the administrator can choose to include sensitive information such as database connection passwords in the file::
+
+  sqoop2-tool repositorydump -o repository.json --include-sensitive
+
+Upon successful execution, you should see the following message::
+
+  Tool class org.apache.sqoop.tools.tool.RepositoryDumpTool has finished correctly.
+
+If repository dump has failed, you will see the following message instead::
+
+  Tool class org.apache.sqoop.tools.tool.RepositoryDumpTool has failed.
+
+Further details why the upgrade process has failed will be available in the Sqoop server log - same file as the Sqoop Server logs into.
+
+RepositoryLoad
+==============
+
+Reads a json formatted file created by RepositoryDump and loads to current Sqoop repository.
+
+To run the ``repositoryLoad`` tool::
+
+  sqoop2-tool repositoryload -i repository.json
+
+Upon successful execution, you should see the following message::
+
+  Tool class org.apache.sqoop.tools.tool.RepositoryLoadTool has finished correctly.
+
+If repository load failed you will see the following message instead::
+
+ Tool class org.apache.sqoop.tools.tool.RepositoryLoadTool has failed.
+
+Or an exception. Further details why the upgrade process has failed will be available in the Sqoop server log - same file as the Sqoop Server logs into.
+
+.. note:: If the repository dump was created without passwords (default), the connections will not contain a password and the jobs will fail to execute. In that case you'll need to manually update the connections and set the password.
+.. note:: RepositoryLoad tool will always generate new connections, jobs and submissions from the file. Even when an identical objects already exists in repository.
+
+
+
+
+
+
diff --git a/pom.xml b/pom.xml
index 61de719..27af0c7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -88,6 +88,7 @@
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <maven.compile.source>1.6</maven.compile.source>
     <maven.compile.target>1.6</maven.compile.target>
+    <commons-cli.version>1.2</commons-cli.version>
     <commons-dbcp.version>1.4</commons-dbcp.version>
     <commons-lang.version>2.5</commons-lang.version>
     <commons-io.version>2.4</commons-io.version>
@@ -344,6 +345,16 @@
         <version>${guava.version}</version>
       </dependency>
       <dependency>
+        <groupId>commons-cli</groupId>
+        <artifactId>commons-cli</artifactId>
+        <version>${commons-cli.version}</version>
+      </dependency>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+            <version>${commons-io.version}</version>
+        </dependency>
+      <dependency>
         <groupId>javax.servlet</groupId>
         <artifactId>servlet-api</artifactId>
         <version>${servlet.version}</version>
diff --git a/tools/pom.xml b/tools/pom.xml
index 31eda1c..01e1a5f 100644
--- a/tools/pom.xml
+++ b/tools/pom.xml
@@ -34,12 +34,18 @@
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
     </dependency>
-
     <dependency>
       <groupId>org.apache.sqoop</groupId>
       <artifactId>sqoop-core</artifactId>
     </dependency>
-
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+    </dependency>
   </dependencies>
 
 </project>
diff --git a/tools/src/main/java/org/apache/sqoop/tools/tool/BuiltinTools.java b/tools/src/main/java/org/apache/sqoop/tools/tool/BuiltinTools.java
index b24cb35..13a2c5f 100644
--- a/tools/src/main/java/org/apache/sqoop/tools/tool/BuiltinTools.java
+++ b/tools/src/main/java/org/apache/sqoop/tools/tool/BuiltinTools.java
@@ -36,6 +36,8 @@
     tools = new HashMap<String, Class<? extends Tool>>();
     tools.put("upgrade", UpgradeTool.class);
     tools.put("verify", VerifyTool.class);
+    tools.put("repositorydump", RepositoryDumpTool.class);
+    tools.put("repositoryload", RepositoryLoadTool.class);
   }
 
   /**
diff --git a/tools/src/main/java/org/apache/sqoop/tools/tool/JSONConstants.java b/tools/src/main/java/org/apache/sqoop/tools/tool/JSONConstants.java
new file mode 100644
index 0000000..288cba3
--- /dev/null
+++ b/tools/src/main/java/org/apache/sqoop/tools/tool/JSONConstants.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.tools.tool;
+
+
+public final class JSONConstants {
+
+  private JSONConstants() {
+    // restrict instantiation
+  }
+
+  public static final String CONNECTOR_ID = "connector-id";
+  public static final String CONNECTOR_NAME = "connector-name";
+  public static final String ALL = "all";
+  public static final String NAME = "name";
+  public static final String CONNECTION_ID = "connection-id";
+  public static final String JOB_ID = "job-id";
+  public static final String CONNECTIONS = "connections";
+  public static final String JOBS = "jobs";
+  public static final String SUBMISSIONS = "submissions";
+  public static final String METADATA = "metadata";
+  public static final String VERSION = "version";
+  public static final String REVISION = "revision";
+  public static final String COMPILE_DATE = "compile-date";
+  public static final String COMPILE_USER = "compile-user";
+  public static final String INCLUDE_SENSITIVE = "include-sensitive";
+}
diff --git a/tools/src/main/java/org/apache/sqoop/tools/tool/RepositoryDumpTool.java b/tools/src/main/java/org/apache/sqoop/tools/tool/RepositoryDumpTool.java
new file mode 100644
index 0000000..d41b0d2
--- /dev/null
+++ b/tools/src/main/java/org/apache/sqoop/tools/tool/RepositoryDumpTool.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.tools.tool;
+
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.ParseException;
+import org.apache.log4j.Logger;
+import org.apache.sqoop.connector.ConnectorManager;
+import org.apache.sqoop.json.ConnectionBean;
+import org.apache.sqoop.json.JobBean;
+import org.apache.sqoop.json.SubmissionBean;
+import org.apache.sqoop.model.MConnector;
+import org.apache.sqoop.repository.Repository;
+import org.apache.sqoop.repository.RepositoryManager;
+import org.apache.sqoop.tools.ConfiguredTool;
+import org.apache.sqoop.common.VersionInfo;
+import static org.apache.sqoop.json.util.FormSerialization.ALL;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+
+import java.io.BufferedWriter;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Write user-created content of Sqoop repository to JSON formatted file
+ */
+public class RepositoryDumpTool extends ConfiguredTool {
+  public static final Logger LOG = Logger.getLogger(RepositoryDumpTool.class);
+
+  @Override
+  public boolean runToolWithConfiguration(String[] arguments) {
+
+    boolean skipSensitive = true;
+
+    Options options = new Options();
+    options.addOption(OptionBuilder.withLongOpt("include-sensitive")
+            .withDescription("Dump all data including sensitive information such as passwords. Passwords will be dumped in clear text")
+            .create());
+    options.addOption(OptionBuilder.isRequired()
+            .hasArg()
+            .withArgName("filename")
+            .withLongOpt("output")
+            .create('o'));
+
+    CommandLineParser parser = new GnuParser();
+
+    try {
+      CommandLine line = parser.parse(options, arguments);
+      String outputFileName = line.getOptionValue('o');
+
+      if (line.hasOption("include-sensitive")) {
+        skipSensitive = false;
+      }
+
+      BufferedWriter output = new BufferedWriter(new FileWriter(outputFileName));
+      LOG.info("Writing JSON repository dump to file " + outputFileName);
+      dump(skipSensitive).writeJSONString(output);
+      output.flush();
+      output.close();
+
+    } catch (ParseException e) {
+      LOG.error("Error parsing command line arguments:", e);
+      System.out.println("Error parsing command line arguments. Please check Server logs for details.");
+      return false;
+    } catch (IOException e) {
+      LOG.error("Can't dump Sqoop repository to file:", e);
+      System.out.println("Writing repository dump to file failed. Please check Server logs for details.");
+      return false;
+    }
+    return true;
+
+  }
+
+  private JSONObject dump(boolean skipSensitive) {
+
+    RepositoryManager.getInstance().initialize(true);
+    ConnectorManager.getInstance().initialize();
+
+    Repository repository = RepositoryManager.getInstance().getRepository();
+
+
+    JSONObject result = new JSONObject();
+
+    LOG.info("Dumping Connections with skipSensitive=" + String.valueOf(skipSensitive));
+    ConnectionBean connections = new ConnectionBean(repository.findConnections());
+    result.put(JSONConstants.CONNECTIONS, addConnectorName(connections.extract(skipSensitive)));
+
+    LOG.info("Dumping Jobs with skipSensitive=" + String.valueOf(skipSensitive));
+    JobBean jobs = new JobBean(repository.findJobs());
+    result.put(JSONConstants.JOBS, addConnectorName(jobs.extract(skipSensitive)));
+
+    LOG.info("Dumping Submissions with skipSensitive=" + String.valueOf(skipSensitive));
+    SubmissionBean submissions = new SubmissionBean(repository.findSubmissions());
+    result.put(JSONConstants.SUBMISSIONS, submissions.extract(skipSensitive));
+
+    result.put(JSONConstants.METADATA, repoMetadata(skipSensitive));
+
+    return result;
+  }
+
+  private JSONObject repoMetadata(boolean skipSensitive) {
+    JSONObject metadata = new JSONObject();
+    metadata.put(JSONConstants.VERSION, VersionInfo.getVersion());
+    metadata.put(JSONConstants.REVISION, VersionInfo.getRevision());
+    metadata.put(JSONConstants.COMPILE_DATE, VersionInfo.getDate());
+    metadata.put(JSONConstants.COMPILE_USER, VersionInfo.getUser());
+    metadata.put(JSONConstants.INCLUDE_SENSITIVE,!skipSensitive );
+
+    return metadata;
+  }
+
+  private JSONObject addConnectorName(JSONObject json) {
+    Repository repository = RepositoryManager.getInstance().getRepository();
+    ConnectorManager connectorManager = ConnectorManager.getInstance();
+
+    JSONArray results = (JSONArray) json.get(ALL);
+
+    Iterator<JSONObject> iterator = results.iterator();
+
+    while (iterator.hasNext()) {
+      JSONObject result = iterator.next();
+      Long connectorId = (Long) result.get(JSONConstants.CONNECTOR_ID);
+      result.put(JSONConstants.CONNECTOR_NAME,  connectorManager.getConnectorMetadata(connectorId).getUniqueName());
+    }
+
+    return json;
+  }
+}
diff --git a/tools/src/main/java/org/apache/sqoop/tools/tool/RepositoryLoadTool.java b/tools/src/main/java/org/apache/sqoop/tools/tool/RepositoryLoadTool.java
new file mode 100644
index 0000000..c6124da
--- /dev/null
+++ b/tools/src/main/java/org/apache/sqoop/tools/tool/RepositoryLoadTool.java
@@ -0,0 +1,408 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.tools.tool;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.io.Charsets;
+import org.apache.log4j.Logger;
+import org.apache.sqoop.common.VersionInfo;
+import org.apache.sqoop.connector.ConnectorManager;
+import org.apache.sqoop.connector.spi.MetadataUpgrader;
+import org.apache.sqoop.connector.spi.SqoopConnector;
+import org.apache.sqoop.framework.FrameworkManager;
+import org.apache.sqoop.json.ConnectionBean;
+import org.apache.sqoop.json.JobBean;
+import org.apache.sqoop.json.SubmissionBean;
+import org.apache.sqoop.model.FormUtils;
+import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MConnectionForms;
+import org.apache.sqoop.model.MConnector;
+import org.apache.sqoop.model.MForm;
+import org.apache.sqoop.model.MFramework;
+import org.apache.sqoop.model.MJob;
+import org.apache.sqoop.model.MJobForms;
+import org.apache.sqoop.model.MPersistableEntity;
+import org.apache.sqoop.model.MSubmission;
+import org.apache.sqoop.repository.Repository;
+import org.apache.sqoop.repository.RepositoryManager;
+import org.apache.sqoop.tools.ConfiguredTool;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.sqoop.utils.ClassUtils;
+import org.apache.sqoop.validation.Status;
+import org.apache.sqoop.validation.Validation;
+import org.apache.sqoop.validation.Validator;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+/**
+ * Load user-created content of Sqoop repository from a JSON formatted file
+ * The loaded connector IDs will be modified to match existing connectors
+ */
+public class RepositoryLoadTool extends ConfiguredTool {
+
+  public static final Logger LOG = Logger.getLogger(RepositoryLoadTool.class);
+
+
+
+  @Override
+  public boolean runToolWithConfiguration(String[] arguments) {
+
+
+    Options options = new Options();
+    options.addOption(OptionBuilder.isRequired()
+            .hasArg()
+            .withArgName("filename")
+            .withLongOpt("input")
+            .create('i'));
+
+    CommandLineParser parser = new GnuParser();
+
+    try {
+      CommandLine line = parser.parse(options, arguments);
+      String inputFileName = line.getOptionValue('i');
+
+      LOG.info("Reading JSON from file" + inputFileName);
+      InputStream input = new FileInputStream(inputFileName);
+      String jsonTxt = IOUtils.toString(input, Charsets.UTF_8);
+      JSONObject json =
+              (JSONObject) JSONValue.parse(jsonTxt);
+      boolean res = load(json);
+      input.close();
+      return res;
+
+    } catch (FileNotFoundException e) {
+      LOG.error("Repository dump file not found:", e);
+      System.out.println("Input file not found. Please check Server logs for details.");
+      return false;
+    } catch (IOException e) {
+      LOG.error("Unable to read repository dump file:", e);
+      System.out.println("Unable to read input file. Please check Server logs for details.");
+      return false;
+    } catch (ParseException e) {
+      LOG.error("Error parsing command line arguments:", e);
+      System.out.println("Error parsing command line arguments. Please check Server logs for details.");
+      return false;
+    }
+  }
+
+
+  private boolean load(JSONObject repo) {
+
+   // Validate that loading JSON into repository is supported
+   JSONObject metadata = (JSONObject) repo.get(JSONConstants.METADATA);
+
+   if (metadata == null) {
+     LOG.error("Malformed JSON. Key "+ JSONConstants.METADATA + " not found.");
+     return false;
+   }
+
+   if (!validateMetadata(metadata)){
+     LOG.error("Metadata of repository dump file failed validation (see error above for cause). Aborting repository load.");
+     return false;
+   }
+
+   // initialize repository as mutable
+   RepositoryManager.getInstance().initialize(false);
+   Repository repository = RepositoryManager.getInstance().getRepository();
+
+   ConnectorManager.getInstance().initialize();
+   ConnectorManager connectorManager = ConnectorManager.getInstance();
+
+   FrameworkManager.getInstance().initialize();
+   FrameworkManager frameworkManager = FrameworkManager.getInstance();
+
+   LOG.info("Loading Connections");
+
+   JSONObject jsonConns = (JSONObject) repo.get(JSONConstants.CONNECTIONS);
+
+   if (jsonConns == null) {
+     LOG.error("Malformed JSON file. Key "+ JSONConstants.CONNECTIONS + " not found.");
+     return false;
+   }
+
+   ConnectionBean connectionBean = new ConnectionBean();
+   connectionBean.restore(updateConnectorIDUsingName(jsonConns));
+
+   HashMap<Long,Long> connectionIds = new HashMap<Long, Long>();
+
+   for (MConnection connection : connectionBean.getConnections()) {
+     long oldId = connection.getPersistenceId();
+     long newId = loadConnection(connection);
+     if (newId == connection.PERSISTANCE_ID_DEFAULT) {
+       LOG.error("loading connection " + connection.getName() + " with previous ID " + oldId + " failed. Aborting repository load. Check log for details.");
+       return false;
+     }
+     connectionIds.put(oldId,newId);
+   }
+   LOG.info("Loaded " + connectionIds.size() + " connections");
+
+   LOG.info("Loading Jobs");
+   JSONObject jsonJobs = (JSONObject) repo.get(JSONConstants.JOBS);
+
+   if (jsonJobs == null) {
+     LOG.error("Malformed JSON file. Key "+ JSONConstants.JOBS + " not found.");
+     return false;
+   }
+
+   JobBean jobBean = new JobBean();
+   jobBean.restore(updateIdUsingMap(updateConnectorIDUsingName(jsonJobs), connectionIds,JSONConstants.CONNECTION_ID));
+
+   HashMap<Long,Long> jobIds = new HashMap<Long, Long>();
+   for (MJob job: jobBean.getJobs()) {
+     long oldId = job.getPersistenceId();
+     long newId = loadJob(job);
+
+     if (newId == job.PERSISTANCE_ID_DEFAULT) {
+       LOG.error("loading job " + job.getName() + " failed. Aborting repository load. Check log for details.");
+       return false;
+     }
+     jobIds.put(oldId,newId);
+
+   }
+    LOG.info("Loaded " + jobIds.size() + " jobs");
+
+   LOG.info("Loading Submissions");
+   JSONObject jsonSubmissions = (JSONObject) repo.get(JSONConstants.SUBMISSIONS);
+
+    if (jsonSubmissions == null) {
+      LOG.error("Malformed JSON file. Key "+ JSONConstants.SUBMISSIONS + " not found.");
+      return false;
+    }
+
+   SubmissionBean submissionBean = new SubmissionBean();
+   submissionBean.restore(updateIdUsingMap(jsonSubmissions,jobIds,JSONConstants.JOB_ID));
+   int submissionCount = 0;
+   for (MSubmission submission: submissionBean.getSubmissions()) {
+     resetPersistenceId(submission);
+     repository.createSubmission(submission);
+     submissionCount++;
+   }
+    LOG.info("Loaded " + submissionCount + " submissions.");
+    LOG.info("Repository load completed successfully.");
+    return true;
+  }
+
+  private void resetPersistenceId(MPersistableEntity ent) {
+    ent.setPersistenceId(ent.PERSISTANCE_ID_DEFAULT);
+  }
+
+
+
+  /**
+   * Even though the metadata contains version, revision, compile-date and compile-user
+   * We are only validating that version match for now.
+   * More interesting logic can be added later
+   */
+  private boolean validateMetadata(JSONObject metadata) {
+    String jsonVersion = (String) metadata.get(JSONConstants.VERSION);
+    Boolean includeSensitive = (Boolean) metadata.get(JSONConstants.INCLUDE_SENSITIVE);
+    String repoVersion = VersionInfo.getVersion();
+
+    if (!jsonVersion.equals(repoVersion)) {
+      LOG.error("Repository version in file (" + jsonVersion + ") does not match this version of Sqoop (" + repoVersion + ")");
+      return false;
+    }
+
+    if (!includeSensitive) {
+      LOG.warn("Loading repository which was dumped without --include-sensitive=true. " +
+              "This means some sensitive information such as passwords is not included in the dump file and will need to be manually added later.");
+    }
+
+    return true;
+  }
+
+  private long loadConnection(MConnection connection) {
+
+    //starting by pretending we have a brand new connection
+    resetPersistenceId(connection);
+
+    MetadataUpgrader upgrader = FrameworkManager.getInstance().getMetadataUpgrader();
+    MFramework framework = FrameworkManager.getInstance().getFramework();
+    Repository repository = RepositoryManager.getInstance().getRepository();
+
+    List<MForm> frameworkForms = framework.getConnectionForms().clone(false).getForms();
+    MConnectionForms newConnectionFrameworkForms = new MConnectionForms(frameworkForms);
+
+    MConnector mConnector = ConnectorManager.getInstance().getConnectorMetadata(connection.getConnectorId());
+    List<MForm> connectorForms = mConnector.getConnectionForms().clone(false).getForms();
+    MConnectionForms newConnectionConnectorForms = new MConnectionForms(connectorForms);
+
+    // upgrading the forms to make sure they match the current repository
+    upgrader.upgrade(connection.getFrameworkPart(), newConnectionFrameworkForms);
+    upgrader.upgrade(connection.getConnectorPart(), newConnectionConnectorForms);
+    MConnection newConnection = new MConnection(connection, newConnectionConnectorForms, newConnectionFrameworkForms);
+
+    // Transform form structures to objects for validations
+    SqoopConnector connector =
+            ConnectorManager.getInstance().getConnector(connection.getConnectorId());
+
+    Object connectorConfig = ClassUtils.instantiate(
+            connector.getConnectionConfigurationClass());
+    Object frameworkConfig = ClassUtils.instantiate(
+            FrameworkManager.getInstance().getConnectionConfigurationClass());
+
+    FormUtils.fromForms(
+            connection.getConnectorPart().getForms(), connectorConfig);
+    FormUtils.fromForms(
+            connection.getFrameworkPart().getForms(), frameworkConfig);
+
+    Validator connectorValidator = connector.getValidator();
+    Validator frameworkValidator = FrameworkManager.getInstance().getValidator();
+
+    Validation connectorValidation =
+            connectorValidator.validateConnection(connectorConfig);
+    Validation frameworkValidation =
+            frameworkValidator.validateConnection(frameworkConfig);
+
+    Status finalStatus = Status.getWorstStatus(connectorValidation.getStatus(),
+            frameworkValidation.getStatus());
+
+    if (finalStatus.canProceed()) {
+      repository.createConnection(newConnection);
+
+    } else {
+      LOG.error("Failed to load connection:" + connection.getName());
+      LOG.error("Status of connector forms:" + connectorValidation.getStatus().toString());
+      LOG.error("Status of framework forms:" + frameworkValidation.getStatus().toString());
+    }
+    return newConnection.getPersistenceId();
+  }
+
+  private long loadJob(MJob job) {
+    //starting by pretending we have a brand new job
+    resetPersistenceId(job);
+
+
+    MetadataUpgrader upgrader = FrameworkManager.getInstance().getMetadataUpgrader();
+    MFramework framework = FrameworkManager.getInstance().getFramework();
+    Repository repository = RepositoryManager.getInstance().getRepository();
+
+    MJob.Type jobType = job.getType();
+    List<MForm> frameworkForms = framework.getJobForms(job.getType()).clone(false).getForms();
+    MJobForms newJobFrameworkForms = new MJobForms(jobType,frameworkForms);
+
+    MConnector mConnector = ConnectorManager.getInstance().getConnectorMetadata(job.getConnectorId());
+    List<MForm> connectorForms = mConnector.getJobForms(jobType).clone(false).getForms();
+    MJobForms newJobConnectorForms = new MJobForms(jobType,connectorForms);
+
+    // upgrading the forms to make sure they match the current repository
+    upgrader.upgrade(job.getFrameworkPart(), newJobFrameworkForms);
+    upgrader.upgrade(job.getConnectorPart(), newJobConnectorForms);
+    MJob newJob = new MJob(job, newJobConnectorForms, newJobFrameworkForms);
+
+    // Transform form structures to objects for validations
+    SqoopConnector connector =
+            ConnectorManager.getInstance().getConnector(job.getConnectorId());
+
+    Object connectorConfig = ClassUtils.instantiate(
+            connector.getJobConfigurationClass(jobType));
+    Object frameworkConfig = ClassUtils.instantiate(
+            FrameworkManager.getInstance().getJobConfigurationClass(jobType));
+
+    FormUtils.fromForms(
+            job.getConnectorPart().getForms(), connectorConfig);
+    FormUtils.fromForms(
+            job.getFrameworkPart().getForms(), frameworkConfig);
+
+    Validator connectorValidator = connector.getValidator();
+    Validator frameworkValidator = FrameworkManager.getInstance().getValidator();
+
+    Validation connectorValidation =
+            connectorValidator.validateJob(jobType,connectorConfig);
+    Validation frameworkValidation =
+            frameworkValidator.validateJob(jobType,frameworkConfig);
+
+    Status finalStatus = Status.getWorstStatus(connectorValidation.getStatus(),
+            frameworkValidation.getStatus());
+
+    if (finalStatus.canProceed()) {
+      repository.createJob(newJob);
+
+    } else {
+      LOG.error("Failed to load job:" + job.getName());
+      LOG.error("Status of connector forms:" + connectorValidation.getStatus().toString());
+      LOG.error("Status of framework forms:" + frameworkValidation.getStatus().toString());
+
+    }
+    return newJob.getPersistenceId();
+
+
+  }
+
+  private JSONObject updateConnectorIDUsingName( JSONObject json) {
+    JSONArray array = (JSONArray) json.get(JSONConstants.ALL);
+
+    Repository repository = RepositoryManager.getInstance().getRepository();
+
+    List<MConnector> connectors = repository.findConnectors();
+    Map<String, Long> connectorMap = new HashMap<String, Long>();
+
+    for (MConnector connector : connectors) {
+      connectorMap.put(connector.getUniqueName(), connector.getPersistenceId());
+    }
+
+    for (Object obj : array) {
+      JSONObject object = (JSONObject) obj;
+      long connectorId = (Long) object.get(JSONConstants.CONNECTOR_ID);
+      String connectorName = (String) object.get(JSONConstants.CONNECTOR_NAME);
+      long currentConnectorId = connectorMap.get(connectorName);
+      String connectionName = (String) object.get(JSONConstants.NAME);
+
+
+      // If a given connector now has a different ID, we need to update the ID
+      if (connectorId != currentConnectorId) {
+        LOG.warn("Connection " + connectionName + " uses connector " + connectorName + ". " +
+                "Replacing previous ID " + connectorId + " with new ID " + currentConnectorId);
+
+        object.put(JSONConstants.CONNECTOR_ID, currentConnectorId);
+      }
+    }
+    return json;
+  }
+
+  private JSONObject updateIdUsingMap(JSONObject json, HashMap<Long,Long> idMap, String fieldName) {
+    JSONArray array = (JSONArray) json.get(JSONConstants.ALL);
+
+    for (Object obj : array) {
+      JSONObject object = (JSONObject) obj;
+
+      object.put(fieldName, idMap.get(object.get(fieldName)));
+    }
+
+    return json;
+  }
+
+
+
+}