creating datasets.yaml in mesos
diff --git a/common/build.gradle.orig b/common/build.gradle.orig
deleted file mode 100644
index 455468b..0000000
--- a/common/build.gradle.orig
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-plugins {
-    id 'com.github.johnrengelman.shadow' version '1.2.4'
-    id "org.jetbrains.kotlin.jvm"
-    id 'scala'
-}
-
-sourceCompatibility = 1.8
-targetCompatibility = 1.8
-
-shadowJar {
-    zip64 true
-}
-
-repositories {
-    maven {
-        url "https://plugins.gradle.org/m2/"
-    }
-    mavenCentral()
-}
-
-junitPlatform {
-    filters {
-        engines {
-            include 'spek'
-        }
-    }
-}
-
-configurations {
-    provided
-    compile.extendsFrom provided
-}
-
-dependencies {
-    compile 'org.scala-lang:scala-library:2.11.8'
-    compile group: 'com.github.nscala-time', name: 'nscala-time_2.11', version: '2.2.0'
-    compile group: 'org.slf4j', name: 'slf4j-api', version: '1.7.9'
-    compile group: 'org.slf4j', name: 'slf4j-simple', version: '1.7.9'
-    compile group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: '2.9.4'
-    compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-kotlin', version: '2.9.8'
-    compile group: 'commons-validator', name: 'commons-validator', version: '1.6'
-    compile group: 'software.amazon.awssdk', name: 's3', version: '2.5.23'
-
-    compile "org.jetbrains.kotlin:kotlin-stdlib-jdk8"
-    compile "org.jetbrains.kotlin:kotlin-reflect"
-
-    // currently we have to use this specific mesos version to prevent from
-    // clashing with spark
-    compile('org.apache.mesos:mesos:1.7.0:shaded-protobuf') {
-        exclude group: 'com.google.protobuf', module: 'protobuf-java'
-    }
-
-    compile('com.jcabi:jcabi-aether:0.10.1') {
-        exclude group: 'org.jboss.netty'
-    }
-
-    compile('org.apache.activemq:activemq-client:5.15.2') {
-        exclude group: 'org.jboss.netty'
-    }
-
-    compile group: 'org.apache.maven', name: 'maven-core', version: '3.0.5'
-    compile group: 'net.liftweb', name: 'lift-json_2.11', version: '3.2.0'
-<<<<<<< HEAD
-    
-=======
-    compile group: 'net.liftweb', name: 'lift-json_2.11', version: '3.2.0'
-
->>>>>>> origin/master
-    provided group: 'org.apache.hadoop', name: 'hadoop-yarn-client', version: '2.8.4'
-    provided group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.8.4'
-    provided group: 'org.apache.hadoop', name: 'hadoop-yarn-api', version: '2.8.4'
-    provided group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.8.4'
-
-    testCompile "gradle.plugin.com.github.maiflai:gradle-scalatest:0.14"
-    testRuntime 'org.pegdown:pegdown:1.1.0'
-    testCompile 'junit:junit:4.11'
-    testCompile 'org.scalatest:scalatest_2.11:3.0.1'
-    testCompile 'org.scala-lang:scala-library:2.11.8'
-    testCompile 'org.jetbrains.spek:spek-api:1.1.5'
-    testCompile "org.jetbrains.kotlin:kotlin-test-junit:$kotlin_version"
-    testRuntime 'org.jetbrains.spek:spek-junit-platform-engine:1.1.5'
-
-    // spek requires kotlin-reflect, can be omitted if already in the classpath
-    testRuntimeOnly "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version"
-}
-
-task copyToHome() {
-}
-
-sourceSets {
-    test {
-        resources.srcDirs += [file('src/test/resources')]
-    }
-
-    // this is done so Scala will compile before Kotlin
-    main {
-        kotlin {
-            srcDirs = ['src/main/kotlin']
-        }
-        scala {
-            srcDirs = ['src/main/kotlin','src/main/java', 'src/main/scala']
-        }
-        java {
-            srcDirs = ['src/main/java']
-        }
-    }
-}
-
-compileKotlin{
-    kotlinOptions.jvmTarget = "1.8"
-}
-compileTestKotlin {
-    kotlinOptions.jvmTarget = "1.8"
-}
-
-compileScala {
-    dependsOn compileJava
-    classpath += files(compileJava.destinationDir) + files(compileKotlin.destinationDir)
-}
-
-compileJava {
-    dependsOn compileKotlin
-}
-
-
diff --git a/frameworks/python/dispatcher/src/main/kotlin/org/apache/amaterasu/frameworks/python/dispatcher/runners/providers/PythonRunnerProviderBase.kt b/frameworks/python/dispatcher/src/main/kotlin/org/apache/amaterasu/frameworks/python/dispatcher/runners/providers/PythonRunnerProviderBase.kt
index 59f3896..8636f22 100644
--- a/frameworks/python/dispatcher/src/main/kotlin/org/apache/amaterasu/frameworks/python/dispatcher/runners/providers/PythonRunnerProviderBase.kt
+++ b/frameworks/python/dispatcher/src/main/kotlin/org/apache/amaterasu/frameworks/python/dispatcher/runners/providers/PythonRunnerProviderBase.kt
@@ -45,7 +45,7 @@
     }
 
     override fun getActionDependencies(jobId: String, actionData: ActionData): Array<String> {
-        val reqFile = File(requirementsFileName)
+        val reqFile = File("dist/$requirementsFileName")
         if (reqFile.exists()) reqFile.delete()
         val dependencies = runnerResources + mandatoryPYPIPackages
 
@@ -57,9 +57,9 @@
         return try {
             val execData = DataLoader.getExecutorData(env, conf)
             val userRequirements = execData.pyDeps?.filePaths
-            arrayOf(reqFile.path) + userRequirements!!
+            arrayOf(reqFile.name) + userRequirements!!
         } catch (e: NullPointerException) {
-            arrayOf(reqFile.path)
+            arrayOf(reqFile.name)
         }
 
     }
diff --git a/frameworks/python/pandas_runtime/dist/amaterasu_pandas-0.2.0-incubating-rc4.zip b/frameworks/python/pandas_runtime/dist/amaterasu_pandas-0.2.0-incubating-rc4.zip
index e795826..6a7200c 100644
--- a/frameworks/python/pandas_runtime/dist/amaterasu_pandas-0.2.0-incubating-rc4.zip
+++ b/frameworks/python/pandas_runtime/dist/amaterasu_pandas-0.2.0-incubating-rc4.zip
Binary files differ
diff --git a/frameworks/python/python_runtime/dist/amaterasu_python-0.2.0-incubating-rc4.zip b/frameworks/python/python_runtime/dist/amaterasu_python-0.2.0-incubating-rc4.zip
index 81e0f14..7f94f5b 100644
--- a/frameworks/python/python_runtime/dist/amaterasu_python-0.2.0-incubating-rc4.zip
+++ b/frameworks/python/python_runtime/dist/amaterasu_python-0.2.0-incubating-rc4.zip
Binary files differ
diff --git a/frameworks/spark/dispatcher/src/main/scala/org/apache/amaterasu/frameworks/spark/dispatcher/runners/providers/PySparkRunnerProvider.scala b/frameworks/spark/dispatcher/src/main/scala/org/apache/amaterasu/frameworks/spark/dispatcher/runners/providers/PySparkRunnerProvider.scala
index 8e50cb8..ee20009 100644
--- a/frameworks/spark/dispatcher/src/main/scala/org/apache/amaterasu/frameworks/spark/dispatcher/runners/providers/PySparkRunnerProvider.scala
+++ b/frameworks/spark/dispatcher/src/main/scala/org/apache/amaterasu/frameworks/spark/dispatcher/runners/providers/PySparkRunnerProvider.scala
@@ -7,6 +7,7 @@
 class PySparkRunnerProvider(val env: String, val conf: ClusterConfig) extends PythonRunnerProviderBase(env, conf) {
 
   override def getCommand(jobId: String, actionData: ActionData, env: String, executorId: String, callbackAddress: String): String = {
+
     val command = super.getCommand(jobId: String, actionData: ActionData, env: String, executorId: String, callbackAddress: String)
     log.info(s"===> Cluster manager: ${conf.mode}")
     command +
diff --git a/frameworks/spark/pyspark_runtime/dist/amaterasu_pyspark-0.2.0-incubating-rc4.zip b/frameworks/spark/pyspark_runtime/dist/amaterasu_pyspark-0.2.0-incubating-rc4.zip
index 1949dd5..fa4e0eb 100644
--- a/frameworks/spark/pyspark_runtime/dist/amaterasu_pyspark-0.2.0-incubating-rc4.zip
+++ b/frameworks/spark/pyspark_runtime/dist/amaterasu_pyspark-0.2.0-incubating-rc4.zip
Binary files differ
diff --git a/leader-common/build.gradle b/leader-common/build.gradle
index e887176..0adb406 100644
--- a/leader-common/build.gradle
+++ b/leader-common/build.gradle
@@ -79,8 +79,8 @@
     compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-kotlin', version: '2.9.8'
     
     compile group: 'org.reflections', name: 'reflections', version: '0.9.11'
-    //compile group: 'org.eclipse.jgit', name: 'org.eclipse.jgit', version: '4.2.0.201601211800-r'
-    compile group: 'org.eclipse.jgit', name: 'org.eclipse.jgit', version: '5.3.0.201903130848-r'
+    compile group: 'org.eclipse.jgit', name: 'org.eclipse.jgit', version: '4.9.10.201904181027-r'
+
     compile group: 'org.apache.activemq', name: 'activemq-broker', version: '5.15.3'
     runtime group: 'org.apache.activemq', name: 'activemq-kahadb-store', version: '5.15.3'
     compile group: 'com.importre', name: 'crayon', version: '0.1.0'
diff --git a/leader-yarn/src/main/kotlin/org/apache/amaterasu/leader/yarn/ApplicationMaster.kt b/leader-yarn/src/main/kotlin/org/apache/amaterasu/leader/yarn/ApplicationMaster.kt
index aa1e0f3..861ca3f 100644
--- a/leader-yarn/src/main/kotlin/org/apache/amaterasu/leader/yarn/ApplicationMaster.kt
+++ b/leader-yarn/src/main/kotlin/org/apache/amaterasu/leader/yarn/ApplicationMaster.kt
@@ -324,8 +324,8 @@
         val dataStoresYaml = yamlMapper.writeValueAsString(dataStores)
         writeConfigFile(dataStoresYaml, jobManager.jobId, actionData.name, "datastores.yaml")
 
-        val datesets = DataLoader.getDatasets(env)
-        writeConfigFile(datesets, jobManager.jobId, actionData.name, "datasets.yaml")
+        val datasets = DataLoader.getDatasets(env)
+        writeConfigFile(datasets, jobManager.jobId, actionData.name, "datasets.yaml")
 
         writeConfigFile("jobId: ${jobManager.jobId}\nactionName: ${actionData.name}", jobManager.jobId, actionData.name, "runtime.yaml")
 
diff --git a/leader/src/main/scala/org/apache/amaterasu/leader/mesos/schedulers/JobScheduler.scala b/leader/src/main/scala/org/apache/amaterasu/leader/mesos/schedulers/JobScheduler.scala
index f9b1060..19cb831 100755
--- a/leader/src/main/scala/org/apache/amaterasu/leader/mesos/schedulers/JobScheduler.scala
+++ b/leader/src/main/scala/org/apache/amaterasu/leader/mesos/schedulers/JobScheduler.scala
@@ -176,6 +176,8 @@
 
             writeConfigFile(s"jobId: ${jobManager.getJobId}\nactionName: ${actionData.getName}", jobManager.getJobId, actionData.getName, "runtime.yaml")
 
+            val datasets = DataLoader.getDatasets(env)
+            writeConfigFile(datasets, jobManager.getJobId, actionData.getName, "datasets.yaml")
             offersToTaskIds.put(offer.getId.getValue, taskId.getValue)
 
             // atomically adding a record for the slave, I'm storing all the actions
@@ -210,14 +212,14 @@
             val command = CommandInfo
               .newBuilder
               .setValue(commandStr)
-//              .addUris(URI.newBuilder
-//                .setValue(s"http://${sys.env("AMA_NODE")}:${config.webserver.Port}/executor-${config.version}-all.jar")
-//                .setExecutable(false)
-//                .setExtract(false)
-//                .build())
+            //              .addUris(URI.newBuilder
+            //                .setValue(s"http://${sys.env("AMA_NODE")}:${config.webserver.Port}/executor-${config.version}-all.jar")
+            //                .setExecutable(false)
+            //                .setExtract(false)
+            //                .build())
 
             // Getting framework (group) resources
-            log.info(s">>>> groupResources: ${frameworkProvider.getGroupResources}")
+            log.info(s"===> groupResources: ${frameworkProvider.getGroupResources}")
             frameworkProvider.getGroupResources.foreach(f => command.addUris(URI.newBuilder
               .setValue(s"http://${sys.env("AMA_NODE")}:${config.webserver.Port}/${f.getName}")
               .setExecutable(false)
@@ -260,7 +262,7 @@
               executable = relativePath.subpath(amaDist.toPath.getNameCount, relativePath.getNameCount)
             } else {
               val dest = new File(s"dist/${jobManager.getJobId}/${sourcePath.toString}")
-              FileUtils.moveFile(sourcePath, dest)
+              FileUtils.copyFile(sourcePath, dest)
               executable = Paths.get(jobManager.getJobId, sourcePath.toPath.toString)
             }
 
diff --git a/leader/src/main/scripts/amaterasu.properties b/leader/src/main/scripts/amaterasu.properties
index ef5242a..1d24d58 100755
--- a/leader/src/main/scripts/amaterasu.properties
+++ b/leader/src/main/scripts/amaterasu.properties
@@ -23,9 +23,7 @@
 yarn.queue=default
 yarn.jarspath=hdfs:///apps/amaterasu
 spark.home=/usr/lib/spark
-#spark.home=/opt/cloudera/parcels/SPARK2-2.1.0.cloudera2-1.cdh5.7.0.p0.171658/lib/spark2
 yarn.hadoop.home.dir=/etc/hadoop
-spark.opts.spark.yarn.am.extraJavaOptions="-Dhdp.version=2.6.1.0-129"
-spark.opts.spark.driver.extraJavaOptions="-Dhdp.version=2.6.1.0-129"
 yarn.master.memoryMB=2048
 yarn.worker.memoryMB=2048
+mesos.libPath=/usr/lib/libmesos.so
diff --git a/sdk_python/dist/amaterasu-sdk-0.2.0-incubating-rc4.zip b/sdk_python/dist/amaterasu-sdk-0.2.0-incubating-rc4.zip
index a96d996..b2d76c0 100644
--- a/sdk_python/dist/amaterasu-sdk-0.2.0-incubating-rc4.zip
+++ b/sdk_python/dist/amaterasu-sdk-0.2.0-incubating-rc4.zip
Binary files differ