Merge pull request #196 from ibm-et/SPARK_HOME_REQUIRED
Require SPARK-HOME to run and do not pack spark dependencies
diff --git a/.gitignore b/.gitignore
index 310328c..12db42c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,7 @@
**/*ivy.xml
test-output/
out/
+dist/
.ensime
.ensime_cache/
diff --git a/.travis.yml b/.travis.yml
index d2e701a..afc2969 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,19 +1,21 @@
language: scala
scala:
- "2.10.4"
+
jdk:
- oraclejdk7
- openjdk7
-script:
- - "sbt clean test -Dakka.test.timefactor=3"
- - find $HOME/.sbt -name "*.lock" | xargs rm
- - find $HOME/.ivy2 -name "ivydata-*.properties" | xargs rm
+
+script:
+ - make test-travis
+
sudo: false
+
cache:
directories:
- $HOME/.ivy2/cache
- $HOME/.sbt/boot/
-branches:
- only:
- - master
+branches:
+ only:
+ - master
\ No newline at end of file
diff --git a/Makefile b/Makefile
index e41dbcd..b0cafd1 100644
--- a/Makefile
+++ b/Makefile
@@ -14,54 +14,48 @@
# limitations under the License.
#
-.PHONY: clean build build-image dev vagrantup
+.PHONY: clean build init dev test test-travis
-# Container Properties
-KERNEL_CONTAINER?=spark-kernel
-STDIN_PORT?=48000
-SHELL_PORT?=48001
-IOPUB_PORT?=48002
-CONTROL_PORT?=48003
-HB_PORT?=48004
-IP?=0.0.0.0
+VERSION?=0.1.5
+IS_SNAPSHOT?=true
+APACHE_SPARK_VERSION?=1.5.1
+
+ENV_OPTS=APACHE_SPARK_VERSION=$(APACHE_SPARK_VERSION) VERSION=$(VERSION) IS_SNAPSHOT=$(IS_SNAPSHOT)
+
+FULL_VERSION=$(shell echo $(VERSION)`[ "$(IS_SNAPSHOT)" == "true" ] && (echo '-SNAPSHOT')` )
+ASSEMBLY_JAR=$(shell echo kernel-assembly-$(FULL_VERSION).jar )
clean:
vagrant ssh -c "cd /src/spark-kernel/ && sbt clean"
+ @-rm -r dist
-kernel/target/pack/bin/sparkkernel: vagrantup ${shell find ./*/src/main/**/*}
- vagrant ssh -c "cd /src/spark-kernel/ && sbt compile && sbt pack"
- vagrant ssh -c "cd /src/spark-kernel/kernel/target/pack && make install"
-
-build-image: IMAGE_NAME?cloudet/spark-kernel
-build-image: CACHE?=""
-build-image:
- vagrant ssh -c "cd /src/spark-kernel && docker build $(CACHE) -t $(FULL_IMAGE) ."
-
-run-image: KERNEL_CONTAINER?=spark-kernel
-run-image: STDIN_PORT?=48000
-run-image: SHELL_PORT?=48001
-run-image: IOPUB_PORT?=48002
-run-image: CONTROL_PORT?=48003
-run-image: HB_PORT?=48004
-run-image: IP?=0.0.0.0
-run-image: build-image
- vagrant ssh -c "docker rm -f $(KERNEL_CONTAINER) || true"
- vagrant ssh -c "docker run -d \
- --name=$(KERNEL_CONTAINER) \
- -e "STDIN_PORT=$(STDIN_PORT)" \
- -e "SHELL_PORT=$(SHELL_PORT)" \
- -e "IOPUB_PORT=$(IOPUB_PORT)" \
- -e "CONTROL_PORT=$(CONTROL_PORT)" \
- -e "HB_PORT=$(HB_PORT)" -e "IP=$(IP)" \
- $(FULL_IMAGE)"
-
-vagrantup:
+init:
vagrant up
-build: kernel/target/pack/bin/sparkkernel
+kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/src/main/**/*}
+kernel/target/scala-2.10/$(ASSEMBLY_JAR): ${shell find ./*/build.sbt}
+kernel/target/scala-2.10/$(ASSEMBLY_JAR): project/build.properties project/Build.scala project/Common.scala project/plugins.sbt
+ vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt kernel/assembly"
-dev: build
+build: kernel/target/scala-2.10/$(ASSEMBLY_JAR)
+
+dev: dist
vagrant ssh -c "cd ~ && ipython notebook --ip=* --no-browser"
-test: build
- vagrant ssh -c "cd /src/spark-kernel/ && sbt test"
\ No newline at end of file
+test:
+ vagrant ssh -c "cd /src/spark-kernel/ && $(ENV_OPTS) sbt compile test"
+
+dist: COMMIT=$(shell git rev-parse --short=12 --verify HEAD)
+dist: VERSION_FILE=dist/spark-kernel/VERSION
+dist: kernel/target/scala-2.10/$(ASSEMBLY_JAR)
+ @mkdir -p dist/spark-kernel/bin dist/spark-kernel/lib
+ @cp -r etc/bin/* dist/spark-kernel/bin/.
+ @cp kernel/target/scala-2.10/$(ASSEMBLY_JAR) dist/spark-kernel/lib/.
+ @echo "VERSION: $(FULL_VERSION)" > $(VERSION_FILE)
+ @echo "COMMIT: $(COMMIT)" >> $(VERSION_FILE)
+ @cd dist; tar -cvzf spark-kernel-$(FULL_VERSION).tar.gz spark-kernel
+
+test-travis:
+ $(ENV_OPTS) sbt clean test -Dakka.test.timefactor=3
+ find $(HOME)/.sbt -name "*.lock" | xargs rm
+ find $(HOME)/.ivy2 -name "ivydata-*.properties" | xargs rm
\ No newline at end of file
diff --git a/README.md b/README.md
index c9367d2..2115304 100644
--- a/README.md
+++ b/README.md
@@ -23,24 +23,25 @@
Develop
=======
-[Vagrant][vagrant] is used to simplify the development experience. It is the only requirement to be able to build and test the Spark Kernel on your development machine.
+[Vagrant][vagrant] is used to simplify the development experience. It is the only requirement to be able to build, package and test the Spark Kernel on your development machine.
-To interact with the Spark Kernel using Jupyter, run
+To build and interact with the Spark Kernel using Jupyter, run
```
make dev
```
This will start a Jupyter notebook server accessible at `http://192.168.44.44:8888`. From here you can create notebooks that use the Spark Kernel configured for local mode.
+Tests can be run by doing `make test`.
Build & Package
===============
To build and package up the Spark Kernel, run
```
-make build
+make dist
```
-The resulting package of the kernel will be located at `./kernel/target/pack`. It contains a `Makefile` that can be used to install the Spark Kernel by running `make install` within the directory. More details about building and packaging can be found [here][4].
+The resulting package of the kernel will be located at `./dist/spark-kernel-<VERSION>.tar.gz`. The uncompressed package is what is used is ran by Jupyter when doing `make dev`.
Version
diff --git a/Vagrantfile b/Vagrantfile
index 9ff2207..09132da 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -111,6 +111,7 @@
wget http://apache.arvixe.com/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.3.tgz && \
tar xvzf spark-${SPARK_VERSION}-bin-hadoop2.3.tgz && \
ln -s spark-${SPARK_VERSION}-bin-hadoop2.3 spark && \
+ export SPARK_HOME=/opt/spark && \
set_flag SPARK
fi
@@ -122,13 +123,13 @@
"display_name": "Spark 1.5.1 (Scala 2.10.4)",
"language_info": { "name": "scala" },
"argv": [
- "/home/vagrant/local/bin/sparkkernel",
+ "/src/spark-kernel/dist/spark-kernel/bin/spark-kernel",
"--profile",
"{connection_file}"
],
"codemirror_mode": "scala",
"env": {
- "JVM_OPT": "-Xms1024M -Xmx4096M -Dlog4j.logLevel=trace",
+ "SPARK_OPTS": "--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=trace",
"MAX_INTERPRETER_THREADS": "16",
"SPARK_CONFIGURATION": "spark.cores.max=4",
"CAPTURE_STANDARD_OUT": "true",
@@ -194,5 +195,7 @@
config.vm.provider :virtualbox do |vb|
vb.customize ["modifyvm", :id, "--memory", "2048"]
vb.customize ["modifyvm", :id, "--cpus", "2"]
+ vb.name = "spark-kernel-vm"
+
end
end
diff --git a/etc/bin/spark-kernel b/etc/bin/spark-kernel
new file mode 100755
index 0000000..18cce1d
--- /dev/null
+++ b/etc/bin/spark-kernel
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+#
+# Copyright 2015 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+ ``
+PROG_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+
+if [ -z "$SPARK_HOME" ]; then
+ echo "SPARK_HOME must be set to the location of a Spark distribution!"
+ exit 1
+fi
+
+echo "Starting Spark Kernel with SPARK_HOME=$SPARK_HOME"
+
+KERNEL_ASSEMBLY=`(cd ${PROG_HOME}/lib; ls -1 kernel-assembly-*.jar;)`
+
+# disable randomized hash for string in Python 3.3+
+export PYTHONHASHSEED=0
+
+exec "$SPARK_HOME"/bin/spark-submit \
+ ${SPARK_OPTS} \
+ --driver-class-path $PROG_HOME/lib/${KERNEL_ASSEMBLY} \
+ --class com.ibm.spark.SparkKernel $PROG_HOME/lib/${KERNEL_ASSEMBLY} "$@"
diff --git a/kernel-api/build.sbt b/kernel-api/build.sbt
index ca2ba01..831bd44 100644
--- a/kernel-api/build.sbt
+++ b/kernel-api/build.sbt
@@ -1,4 +1,3 @@
-import xerial.sbt.Pack._
import Common._
/*
* Copyright 2015 IBM Corp.
@@ -15,8 +14,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-pack <<= pack dependsOn compile
-
//
// SCALA INTERPRETER DEPENDENCIES
//
@@ -36,43 +33,10 @@
)
//
-// SPARK DEPENDENCIES
-//
-// NOTE: Currently, version must match deployed Spark cluster version.
-//
-
-// TODO: Mark these as provided and bring them in via the kernel project
-// so users wanting to implement a magic do not bring in Spark itself
-libraryDependencies ++= Seq(
- "org.apache.spark" %% "spark-core" % sparkVersion.value excludeAll( // Apache v2
- ExclusionRule(organization = "org.apache.hadoop"),
-
- // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
- ExclusionRule(
- organization = "org.jboss.netty",
- name = "netty"
- )
- ),
- "org.apache.spark" %% "spark-streaming" % sparkVersion.value, // Apache v2
- "org.apache.spark" %% "spark-sql" % sparkVersion.value, // Apache v2
- "org.apache.spark" %% "spark-mllib" % sparkVersion.value, // Apache v2
- "org.apache.spark" %% "spark-graphx" % sparkVersion.value, // Apache v2
- "org.apache.spark" %% "spark-repl" % sparkVersion.value excludeAll // Apache v2
- ExclusionRule(organization = "org.apache.hadoop")
-)
-
-//
-// HADOOP DEPENDENCIES
-//
-libraryDependencies ++= Seq(
- "org.apache.hadoop" % "hadoop-client" % hadoopVersion.value excludeAll
- ExclusionRule(organization = "javax.servlet")
-)
-
-//
// EXECUTION DEPENDENCIES
//
libraryDependencies += "org.apache.commons" % "commons-exec" % "1.3"
+libraryDependencies += "com.google.guava" % "guava" % "14.0.1"
//
// CLI DEPENDENCIES
diff --git a/kernel/build.sbt b/kernel/build.sbt
index 2c0d371..7b01dd0 100644
--- a/kernel/build.sbt
+++ b/kernel/build.sbt
@@ -1,5 +1,4 @@
import Common._
-import xerial.sbt.Pack._
/*
* Copyright 2014 IBM Corp.
*
@@ -15,10 +14,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-pack <<= pack dependsOn (rebuildIvyXml dependsOn deliverLocal)
-
-packArchive <<= packArchive dependsOn (rebuildIvyXml dependsOn deliverLocal)
-
//
// TEST DEPENDENCIES
//
diff --git a/kernel/project/plugins.sbt b/kernel/project/plugins.sbt
index ef39ec4..542f406 100644
--- a/kernel/project/plugins.sbt
+++ b/kernel/project/plugins.sbt
@@ -17,7 +17,3 @@
logLevel := Level.Warn
resolvers += Classpaths.sbtPluginReleases
-
-// Provides ability to create a pack containing all jars and a script to run
-// them using `sbt pack` or `sbt pack-archive` to generate a *.tar.gz file
-addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.6.1")
diff --git a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
index b3871f5..a5acbc2 100644
--- a/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
+++ b/kernel/src/main/scala/com/ibm/spark/boot/CommandLineOptions.scala
@@ -42,10 +42,6 @@
parser.accepts("profile", "path to IPython JSON connection file")
.withRequiredArg().ofType(classOf[File])
- private val _master =
- parser.accepts("master", "location of master Spark node")
- .withRequiredArg().ofType(classOf[String])
-
private val _ip =
parser.accepts("ip", "ip used to bind sockets")
.withRequiredArg().ofType(classOf[String])
@@ -139,7 +135,6 @@
}
val commandLineConfig: Config = ConfigFactory.parseMap(Map(
- "spark.master" -> get(_master),
"stdin_port" -> get(_stdin_port),
"shell_port" -> get(_shell_port),
"iopub_port" -> get(_iopub_port),
diff --git a/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala b/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
index ab6748a..703d677 100644
--- a/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
+++ b/kernel/src/test/scala/com/ibm/spark/boot/CommandLineOptionsSpec.scala
@@ -132,31 +132,6 @@
}
}
- describe("when received --master=<value>") {
- it("should error if value is not set") {
- intercept[OptionException] {
- new CommandLineOptions(Seq("--master"))
- }
- }
-
- describe("#toConfig") {
- it("should set master to specified value") {
- val expected = "test"
- val options = new CommandLineOptions(s"--master=${expected}" :: Nil)
- val config: Config = options.toConfig
-
- config.getString("spark.master") should be(expected)
- }
-
- it("should set master to local[*]") {
- val options = new CommandLineOptions(Nil)
- val config: Config = options.toConfig
-
- config.getString("spark.master") should be("local[*]")
- }
- }
- }
-
describe("when received --profile=<path>") {
it("should error if path is not set") {
intercept[OptionException] {
@@ -308,7 +283,7 @@
config.getString("ip") should be(expected)
}
- it("should set master to local[*]") {
+ it("should set ip to 127.0.0.1") {
val options = new CommandLineOptions(Nil)
val config: Config = options.toConfig
@@ -319,18 +294,15 @@
describe("when received options with surrounding whitespace") {
it("should trim whitespace") {
- val master = "test"
val url1 = "url1"
val url2 = "url2"
val options = new CommandLineOptions(Seq(
- s"--master=${master} ",
" --magic-url ", s" ${url1}\t",
"--magic-url", s" \t ${url2} \t"
))
val config: Config = options.toConfig
- config.getString("spark.master") should be(master)
config.getList("magic_urls").unwrapped.asScala should
be (Seq(url1, url2))
}
diff --git a/project/Build.scala b/project/Build.scala
index fc8dc19..9116a53 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -25,8 +25,8 @@
import sbtunidoc.Plugin.UnidocKeys._
import sbtunidoc.Plugin._
import scoverage.ScoverageSbtPlugin
-import xerial.sbt.Pack._
import com.typesafe.sbt.SbtGit.{GitKeys => git}
+import sbtassembly.AssemblyKeys._
object Build extends Build with Settings with SubProjects with TestTasks {
/**
@@ -96,9 +96,8 @@
lazy val kernel = addTestTasksToProject(Project(
id = "kernel",
base = file("kernel"),
- settings = fullSettings ++
- packSettings ++ Seq(
- packMain := Map("sparkkernel" -> "com.ibm.spark.SparkKernel")
+ settings = fullSettings ++ Seq(
+ test in assembly := {}
)
)) dependsOn(
macros % "test->test;compile->compile",
@@ -166,7 +165,7 @@
lazy val kernel_api = addTestTasksToProject(Project(
id = "kernel-api",
base = file("kernel-api"),
- settings = fullSettings ++ packSettings
+ settings = fullSettings
)) dependsOn(macros % "test->test;compile->compile")
/**
@@ -180,7 +179,7 @@
sourceGenerators in Compile <+= buildInfo,
buildInfoKeys ++= Seq[BuildInfoKey](
version, scalaVersion,
- "sparkVersion" -> Common.sparkVersion.value,
+ "sparkVersion" -> Common.sparkVersion,
"buildDate" -> {
val simpleDateFormat = new SimpleDateFormat("yyyyMMdd-HHmmss")
val now = Calendar.getInstance.getTime
@@ -210,7 +209,7 @@
lazy val protocol = addTestTasksToProject(Project(
id = "protocol",
base = file("protocol"),
- settings = fullSettings ++ buildInfoSettings ++ buildSettings ++ packSettings
+ settings = fullSettings ++ buildInfoSettings ++ buildSettings
)) dependsOn(macros % "test->test;compile->compile")
/**
diff --git a/project/Common.scala b/project/Common.scala
index 73e5395..cef0d45 100644
--- a/project/Common.scala
+++ b/project/Common.scala
@@ -22,6 +22,7 @@
object Common {
// Parameters for publishing to artifact repositories
+ val versionNumber = Properties.envOrElse("VERSION", "0.0.0-dev")
val snapshot = Properties.envOrElse("IS_SNAPSHOT","true").toBoolean
val repoPort = Properties.envOrElse("REPO_PORT","")
val repoHost = Properties.envOrElse("REPO_HOST","")
@@ -30,7 +31,7 @@
val repoEndpoint = Properties.envOrElse("REPO_ENDPOINT", if(snapshot) "/nexus/content/repositories/snapshots/" else "/nexus/content/repositories/releases/")
val repoUrl = Properties.envOrElse("REPO_URL", s"http://${repoHost}:${repoPort}${repoEndpoint}")
- private val versionNumber = "0.1.5"
+
private val buildOrganization = "com.ibm.spark"
private val buildVersion =
if (snapshot) s"$versionNumber-SNAPSHOT"
@@ -38,8 +39,11 @@
private val buildScalaVersion = "2.10.4"
private val buildSbtVersion = "0.13.7"
+
+
// Global dependencies provided to all projects
- private val buildLibraryDependencies = Seq(
+ private var buildLibraryDependencies = Seq(
+
// Needed to force consistent typesafe config with play json and spark
"com.typesafe" % "config" % "1.2.1",
"org.slf4j" % "slf4j-log4j12" % "1.7.5" % "test",
@@ -49,12 +53,30 @@
"org.mockito" % "mockito-all" % "1.9.5" % "test" // MIT
)
- lazy val sparkVersion = settingKey[String]("The Apache Spark version to use")
-
- lazy val hadoopVersion = settingKey[String]("The Apache Hadoop version to use")
-
// The prefix used for our custom artifact names
private val artifactPrefix = "ibm-spark"
+ lazy val sparkVersion = {
+ val sparkEnvironmentVariable = "APACHE_SPARK_VERSION"
+ val defaultSparkVersion = "1.5.1"
+
+ val _sparkVersion = Properties.envOrNone(sparkEnvironmentVariable)
+
+ if (_sparkVersion.isEmpty) {
+ scala.Console.out.println(
+ s"""
+ |[INFO] Using default Apache Spark $defaultSparkVersion!
+ """.stripMargin.trim.replace('\n', ' '))
+ defaultSparkVersion
+ } else {
+ val version = _sparkVersion.get
+ scala.Console.out.println(
+ s"""
+ |[INFO] Using Apache Spark $version provided from
+ |$sparkEnvironmentVariable!
+ """.stripMargin.trim.replace('\n', ' '))
+ version
+ }
+ }
val settings: Seq[Def.Setting[_]] = Seq(
organization := buildOrganization,
@@ -63,50 +85,6 @@
sbtVersion := buildSbtVersion,
libraryDependencies ++= buildLibraryDependencies,
isSnapshot := snapshot,
- sparkVersion := {
- val sparkEnvironmentVariable = "APACHE_SPARK_VERSION"
- val defaultSparkVersion = "1.5.1"
-
- val _sparkVersion = Properties.envOrNone(sparkEnvironmentVariable)
-
- if (_sparkVersion.isEmpty) {
- scala.Console.out.println(
- s"""
- |[INFO] Using default Apache Spark $defaultSparkVersion!
- """.stripMargin.trim.replace('\n', ' '))
- defaultSparkVersion
- } else {
- val version = _sparkVersion.get
- scala.Console.out.println(
- s"""
- |[INFO] Using Apache Spark $version provided from
- |$sparkEnvironmentVariable!
- """.stripMargin.trim.replace('\n', ' '))
- version
- }
- },
- hadoopVersion := {
- val hadoopEnvironmentVariable = "APACHE_HADOOP_VERSION"
- val defaultHadoopVersion = "2.3.0"
-
- val _hadoopVersion = Properties.envOrNone(hadoopEnvironmentVariable)
-
- if (_hadoopVersion.isEmpty) {
- scala.Console.out.println(
- s"""
- |[INFO] Using default Apache Hadoop $defaultHadoopVersion!
- """.stripMargin.trim.replace('\n', ' '))
- defaultHadoopVersion
- } else {
- val version = _hadoopVersion.get
- scala.Console.out.println(
- s"""
- |[INFO] Using Apache Hadoop $version provided from
- |$hadoopEnvironmentVariable!
- """.stripMargin.trim.replace('\n', ' '))
- version
- }
- },
scalacOptions in (Compile, doc) ++= Seq(
// Ignore packages (for Scaladoc) not from our project
@@ -162,6 +140,22 @@
compile <<= (compile in Compile) dependsOn (rebuildIvyXml dependsOn deliverLocal)
) ++ rebuildIvyXmlSettings // Include our rebuild ivy xml settings
+
+ buildLibraryDependencies ++= Seq( "org.apache.spark" %% "spark-core" % "1.5.1" % "provided" excludeAll( // Apache v2
+
+ // Exclude netty (org.jboss.netty is for 3.2.2.Final only)
+ ExclusionRule(
+ organization = "org.jboss.netty",
+ name = "netty"
+ )
+ ),
+ "org.apache.spark" %% "spark-streaming" % sparkVersion % "provided",
+ "org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
+ "org.apache.spark" %% "spark-mllib" % sparkVersion % "provided",
+ "org.apache.spark" %% "spark-graphx" % sparkVersion % "provided",
+ "org.apache.spark" %% "spark-repl" % sparkVersion % "provided"
+ )
+
// ==========================================================================
// = REBUILD IVY XML SETTINGS BELOW
// ==========================================================================
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 4efa32c..7c01ef5 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -29,12 +29,8 @@
// `sbt dependencyTree`; there are other commands provided as well
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
-// Provides ability to create a pack containing all jars and a script to run them
-// using `sbt pack` or `sbt pack-archive` to generate a *.tar.gz file
-addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.6.1")
-
-// Provides the ability to package our project as a docker image
-addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "0.5.2")
+// Provides abilit to create an uber-jar
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.0")
// Provides a generated build info object to sync between build and application
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.3.2")
diff --git a/protocol/build.sbt b/protocol/build.sbt
index 36f655a..e705600 100644
--- a/protocol/build.sbt
+++ b/protocol/build.sbt
@@ -1,4 +1,3 @@
-import xerial.sbt.Pack._
/*
* Copyright 2014 IBM Corp.
*
@@ -14,15 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-pack <<= pack dependsOn compile
-
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
//
// JSON DEPENDENCIES
//
libraryDependencies ++= Seq(
- "com.typesafe.play" %% "play-json" % "2.3.6", // Apache v2
+ "com.typesafe.play" %% "play-json" % "2.3.6" excludeAll( // Apache v2
+ ExclusionRule(organization = "com.fasterxml.jackson.core")
+ ),
"org.slf4j" % "slf4j-api" % "1.7.5" // MIT
)
diff --git a/protocol/project/plugins.sbt b/protocol/project/plugins.sbt
index 79742c8..7d420a0 100644
--- a/protocol/project/plugins.sbt
+++ b/protocol/project/plugins.sbt
@@ -13,6 +13,3 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-// Provides ability to create a pack containing all jars and a script to run
-// them using `sbt pack` or `sbt pack-archive` to generate a *.tar.gz file
-addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.6.1")
\ No newline at end of file
diff --git a/resources/compile/reference.conf b/resources/compile/reference.conf
index d64d2b8..a01a88c 100644
--- a/resources/compile/reference.conf
+++ b/resources/compile/reference.conf
@@ -35,7 +35,6 @@
transport = "tcp"
signature_scheme = "hmac-sha256"
key = ""
-spark.master = "local[*]"
ivy_local = "/tmp/.ivy2"
ivy_local = ${?IVY_LOCAL}