[TOREE-487][TOREE-488] Remove PySpark and SparkR interpreters

Instead, please use a supported kernel such IPython or IRKernel

Closes #166
diff --git a/Dockerfile b/Dockerfile
index fa18621..ca4d26b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -26,7 +26,7 @@
     npm install -g bower
 
 # for pyspark demos
-ENV APACHE_SPARK_VERSION 2.0.0
+ENV APACHE_SPARK_VERSION 2.2.0
 
 RUN apt-get -y update && \
     apt-get -y install software-properties-common
diff --git a/Makefile b/Makefile
index 6fec64f..4619ff3 100644
--- a/Makefile
+++ b/Makefile
@@ -173,7 +173,7 @@
 
 define JUPYTER_COMMAND
 pip install toree-$(BASE_VERSION).tar.gz
-jupyter toree install --interpreters=PySpark,SQL,Scala,SparkR
+jupyter toree install --interpreters=Scala,SQL
 cd /srv/toree/etc/examples/notebooks
 jupyter notebook --ip=* --no-browser
 endef
@@ -197,7 +197,7 @@
 		$(SYSTEM_TEST_IMAGE) \
 		bash -c "(cd /srv/system-test-resources && python -m http.server 8000 &) && \
 		rm -rf /home/jovyan/.local/share/jupyter/kernels/apache_toree_scala/ && \
-		pip install /srv/toree-pip/toree*.tar.gz && jupyter toree install --interpreters=PySpark,Scala,SparkR && \
+		pip install /srv/toree-pip/toree*.tar.gz && jupyter toree install --interpreters=Scala && \
 		pip install nose jupyter_kernel_test && python /srv/test_toree.py"
 
 
diff --git a/build.sbt b/build.sbt
index 1bafcf2..78669df 100644
--- a/build.sbt
+++ b/build.sbt
@@ -122,10 +122,10 @@
 lazy val root = (project in file("."))
   .settings(name := "toree")
   .aggregate(
-    macros,protocol,plugins,communication,kernelApi,client,scalaInterpreter,sqlInterpreter,pysparkInterpreter,sparkrInterpreter,kernel
+    macros,protocol,plugins,communication,kernelApi,client,scalaInterpreter,sqlInterpreter,kernel
   )
   .dependsOn(
-    macros,protocol,communication,kernelApi,client,scalaInterpreter,sqlInterpreter,pysparkInterpreter,sparkrInterpreter,kernel
+    macros,protocol,communication,kernelApi,client,scalaInterpreter,sqlInterpreter,kernel
   )
 
 /**
@@ -188,20 +188,6 @@
   .dependsOn(plugins, protocol, kernelApi, scalaInterpreter)
 
 /**
-* Project represents the Python interpreter used by the Spark Kernel.
-*/
-lazy val pysparkInterpreter = (project in file("pyspark-interpreter"))
-  .settings(name := "toree-pyspark-interpreter")
-  .dependsOn(plugins, protocol, kernelApi)
-
-/**
-* Project represents the R interpreter used by the Spark Kernel.
-*/
-lazy val sparkrInterpreter = (project in file("sparkr-interpreter"))
-  .settings(name := "toree-sparkr-interpreter")
-  .dependsOn(plugins, protocol, kernelApi)
-
-/**
 * Project representing the kernel code for the Spark Kernel backend.
 */
 lazy val kernel = (project in file("kernel"))
@@ -211,9 +197,7 @@
     protocol % "test->test;compile->compile",
     communication % "test->test;compile->compile",
     kernelApi % "test->test;compile->compile",
-    pysparkInterpreter % "test->test;compile->compile",
     scalaInterpreter % "test->test;compile->compile",
-    sparkrInterpreter % "test->test;compile->compile",
     sqlInterpreter % "test->test;compile->compile"
   )
 
diff --git a/etc/examples/notebooks/sqlcontext_sharing.ipynb b/etc/examples/notebooks/sqlcontext_sharing.ipynb
deleted file mode 100644
index 0b514d3..0000000
--- a/etc/examples/notebooks/sqlcontext_sharing.ipynb
+++ /dev/null
@@ -1,155 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# SQLContext Sharing <a name=\"top\"></a>"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "This example shows how Toree enables sharing of the SQLContext across the variety of languages that it supports (Scala, Python, R, SQL). To demostrate, this notebook will load data using one language and read it from another. Refer to the [Spark documentation](http://spark.apache.org/docs/latest/sql-programming-guide.html) for details about the DataFrame and SQL APIs."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "<div class=\"alert alert-info\" role=\"alert\" style=\"margin-top: 10px\">\n",
-    "<p><strong>Note</strong><p>\n",
-    "\n",
-    "<p>Due to an issue installing R and running it using DockerMachine, we are not able to show an example with R.</p>\n",
-    "</div>"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "**Table of Contents**\n",
-    "\n",
-    "1. [Create a DataFrame in Scala](#create-in-scala)\n",
-    "2. [Read DataFrame in Python](#read-in-python)\n",
-    "3. [Create a DataFrame in Python](#create-in-python)\n",
-    "4. [Read DataFrame in Scala](#read-in-scala)\n",
-    "5. [Read DataFrame in SQL](#read-in-sql)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Create a DataFrame in Scala <a name=\"create-in-scala\"></a><span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "val people = spark.read.json(\"people.json\")\n",
-    "people.createOrReplaceTempView(\"people\")\n",
-    "people.show()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Read DataFrame in Python <a name=\"read-in-python\"></a> <span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "%%PySpark\n",
-    "people = spark.table(\"people\")\n",
-    "people.show()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Create a DataFrame in Python <a name=\"create-in-python\"></a> <span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "%%PySpark\n",
-    "cars = spark.read.json(\"cars.json\")\n",
-    "cars.createOrReplaceTempView(\"cars\")\n",
-    "cars.show()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Read DataFrame in Scala <a name=\"read-in-scala\"></a><span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "val cars = spark.table(\"cars\")\n",
-    "cars.show()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Read DataFrame in SQL <a name=\"read-in-sql\"></a><span style=\"float: right; font-size: 0.5em\"><a href=\"#top\">Top</a></span>"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "%%sql\n",
-    "select * from cars where manufacturer == 'Audi'"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Apache Toree - Scala",
-   "language": "scala",
-   "name": "apache_toree_scala"
-  },
-  "language_info": {
-   "file_extension": ".scala",
-   "name": "scala",
-   "version": "2.11.8"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
diff --git a/etc/pip_install/toree/toreeapp.py b/etc/pip_install/toree/toreeapp.py
index c4dc11d..b42e46e 100644
--- a/etc/pip_install/toree/toreeapp.py
+++ b/etc/pip_install/toree/toreeapp.py
@@ -27,10 +27,8 @@
 from jupyter_client.kernelspec import KernelSpec
 
 INTERPRETER_LANGUAGES = {
-    'PySpark' : 'python',
-    'SparkR' : 'r',
-    'SQL' : 'sql',
-    'Scala' : 'scala'
+    'Scala' : 'scala',
+    'SQL' : 'sql'
 }
 
 PYTHON_PATH = 'PYTHONPATH'
@@ -50,7 +48,7 @@
     jupyter toree install --spark_opts='--master=local[4]'
     jupyter toree install --kernel_name=toree_special
     jupyter toree install --toree_opts='--spark-context-initialization-mode none'
-    jupyter toree install --interpreters=PySpark,SQL
+    jupyter toree install --interpreters=SQL
     jupyter toree install --python=python
     '''
 
diff --git a/etc/tools/.rat-excludes b/etc/tools/.rat-excludes
index 1f22c9d..0f33b21 100644
--- a/etc/tools/.rat-excludes
+++ b/etc/tools/.rat-excludes
@@ -18,9 +18,7 @@
 
 # Files from sparkr which should not be changed
 .lintr
-package-sparkR.sh
 .*md
-.*sparkr-interpreter/src/main/.*
 NAMESPACE
 DESCRIPTION
 .*gitignore
diff --git a/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala b/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala
index 39376ae..59c0437 100644
--- a/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala
+++ b/kernel/src/main/scala/org/apache/toree/boot/CommandLineOptions.scala
@@ -204,8 +204,6 @@
   private def interpreterPlugins: Option[java.util.List[String]] = {
     //val defaults = getAll(_default_interpreter_plugin).getOrElse(List())
     //val defaults = List[String](
-    //  "PySpark:org.apache.toree.kernel.interpreter.pyspark.PySparkInterpreter",
-    //  "SparkR:org.apache.toree.kernel.interpreter.sparkr.SparkRInterpreter",
     //  "SQL:org.apache.toree.kernel.interpreter.sql.SqlInterpreter"
     //)
 
diff --git a/pyspark-interpreter/build.sbt b/pyspark-interpreter/build.sbt
deleted file mode 100644
index ac89909..0000000
--- a/pyspark-interpreter/build.sbt
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-libraryDependencies ++= Dependencies.sparkAll.value
diff --git a/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py b/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py
deleted file mode 100644
index 04d1666..0000000
--- a/pyspark-interpreter/src/main/resources/PySpark/pyspark_runner.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import sys
-import getopt
-import traceback
-import re
-import ast
-
-print("PYTHON::: Starting imports")
-from py4j.java_gateway import java_import, JavaGateway, GatewayClient
-print("PYTHON::: Py4J imported")
-from py4j.protocol import Py4JJavaError
-from pyspark.conf import SparkConf
-from pyspark.context import SparkContext
-from pyspark.rdd import RDD
-from pyspark.files import SparkFiles
-from pyspark.storagelevel import StorageLevel
-from pyspark.accumulators import Accumulator, AccumulatorParam
-from pyspark.broadcast import Broadcast
-from pyspark.serializers import MarshalSerializer, PickleSerializer
-
-from time import sleep
-
-# for back compatibility
-from pyspark.sql import SparkSession, DataFrame, Row
-
-client = GatewayClient(port=int(sys.argv[1]))
-sparkVersion = sys.argv[2]
-
-print("PYTHON:: Starting gateway")
-if re.match("^1\.[456]\..*$", sparkVersion) or re.match("^2\..*$", sparkVersion):
-    gateway = JavaGateway(client, auto_convert=True)
-else:
-    gateway = JavaGateway(client)
-print("PYTHON:: Gateway started")
-
-java_import(gateway.jvm, "org.apache.spark.SparkEnv")
-java_import(gateway.jvm, "org.apache.spark.SparkConf")
-java_import(gateway.jvm, "org.apache.spark.api.java.*")
-java_import(gateway.jvm, "org.apache.spark.api.python.*")
-java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
-
-bridge = gateway.entry_point
-state = bridge.state()
-state.markReady()
-
-if sparkVersion.startswith("1.2"):
-    java_import(gateway.jvm, "org.apache.spark.sql.SparkSession")
-    java_import(gateway.jvm, "org.apache.spark.sql.hive.HiveContext")
-    java_import(gateway.jvm, "org.apache.spark.sql.hive.LocalHiveContext")
-    java_import(gateway.jvm, "org.apache.spark.sql.hive.TestHiveContext")
-elif sparkVersion.startswith("1.3"):
-    java_import(gateway.jvm, "org.apache.spark.sql.*")
-    java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
-elif re.match("^1\.[456]\..*$", sparkVersion):
-    java_import(gateway.jvm, "org.apache.spark.sql.*")
-    java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
-elif re.match("^2\..*$", sparkVersion):
-    java_import(gateway.jvm, "org.apache.spark.sql.*")
-
-java_import(gateway.jvm, "scala.Tuple2")
-
-conf = None
-sc = None
-spark = None
-code_info = None
-
-class Logger(object):
-    def __init__(self):
-        self.out = ""
-
-    def write(self, message):
-        state.sendOutput(code_info.codeId(), message)
-        self.out = self.out + message
-
-    def get(self):
-        return self.out
-
-    def reset(self):
-        self.out = ""
-        
-    def flush(self):
-    	pass
-
-output = Logger()
-sys.stdout = output
-sys.stderr = output
-
-
-class Kernel(object):
-    def __init__(self, jkernel):
-        self._jvm_kernel = jkernel
-
-    def __getattr__(self, name):
-        return self._jvm_kernel.__getattribute__(name)
-
-    def __dir__(self):
-        parent = super().__dir__()
-        return parent + [x for x in self._jvm_kernel.__dir__() if x not in parent]
-
-    def createSparkContext(self, config):
-        global conf, sc, sqlContext
-
-        jconf = gateway.jvm.org.apache.spark.SparkConf(False)
-        for key,value in config.getAll():
-            jconf.set(key, value)
-        self._jvm_kernel.createSparkContext(jconf)
-        conf = None
-        sc = None
-        sqlContext = None
-
-        self.refreshContext()
-
-    def refreshContext(self):
-        global conf, sc, spark
-
-        # This is magic. Please look away. I was never here (prevents multiple gateways being instantiated)
-        with SparkContext._lock:
-            if not SparkContext._gateway:
-                SparkContext._gateway = gateway
-                SparkContext._jvm = gateway.jvm
-
-        if sc is None:
-            jsc = self._jvm_kernel.javaSparkContext()
-            if jsc is not None:
-                jconf = self._jvm_kernel.sparkConf()
-                conf = SparkConf(_jvm=gateway.jvm, _jconf=jconf)
-                sc = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
-
-        if spark is None:
-            jspark = self._jvm_kernel.sparkSession()
-            if jspark is not None and sc is not None:
-                spark = SparkSession(sc, jsparkSession=jspark)
-
-kernel = Kernel(bridge.kernel())
-
-while True:
-    try:
-        next_code_info = state.nextCode()
-
-        # If code is not available, try again later
-        if next_code_info is None:
-            sleep(1)
-            continue
-            
-        code_info = next_code_info
-
-        code_lines = code_info.code().split("\n")
-        final_code = None
-
-        for s in code_lines:
-            if s is None or len(s.strip()) == 0:
-                continue
-
-            # skip comment
-            if s.strip().startswith("#"):
-                continue
-
-            if final_code:
-                final_code += "\n" + s
-            else:
-                final_code = s
-
-        # Ensure the appropriate variables are set in the module namespace
-        kernel.refreshContext()
-
-        if final_code:
-            '''Parse the final_code to an AST parse tree.  If the last node is an expression (where an expression
-            can be a print function or an operation like 1+1) turn it into an assignment where temp_val = last expression.
-            The modified parse tree will get executed.  If the variable temp_val introduced is not none then we have the
-            result of the last expression and should return it as an execute result.  The sys.stdout sendOutput logic
-            gets triggered on each logger message to support long running code blocks instead of bulk'''
-            ast_parsed = ast.parse(final_code)
-            the_last_expression_to_assign_temp_value = None
-            if isinstance(ast_parsed.body[-1], ast.Expr):
-                new_node = (ast.Assign(targets=[ast.Name(id='the_last_expression_to_assign_temp_value', ctx=ast.Store())], value=ast_parsed.body[-1].value))
-                ast_parsed.body[-1] = ast.fix_missing_locations(new_node)
-            compiled_code = compile(ast_parsed, "<string>", "exec")
-            eval(compiled_code)
-            if the_last_expression_to_assign_temp_value is not None:
-                state.markSuccess(code_info.codeId(), str(the_last_expression_to_assign_temp_value))
-            else:
-                state.markSuccess(code_info.codeId(), "")
-            del the_last_expression_to_assign_temp_value
-
-    except Py4JJavaError:
-        excInnerError = traceback.format_exc() # format_tb() does not return the inner exception
-        innerErrorStart = excInnerError.find("Py4JJavaError:")
-        if innerErrorStart > -1:
-            excInnerError = excInnerError[innerErrorStart:]
-        state.markFailure(code_info.codeId(), excInnerError + str(sys.exc_info()))
-    except:
-        state.markFailure(code_info.codeId(), traceback.format_exc())
-
-    output.reset()
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala
deleted file mode 100644
index ae6e15a..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkBridge.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import org.apache.toree.interpreter.broker.{BrokerBridge, BrokerState}
-import org.apache.toree.kernel.api.KernelLike
-
-/**
- * Represents constants for the PySpark bridge.
- */
-object PySparkBridge {
-  /** Represents the maximum amount of code that can be queued for Python. */
-  val MaxQueuedCode = 500
-
-  /**
-   * Creates a new PySparkBridge instance.
-   *
-   * @param brokerState The container of broker state to expose
-   * @param kernel The kernel API to expose through the bridge
-   *
-   * @return The new PySpark bridge
-   */
-  def apply(
-    brokerState: BrokerState,
-    kernel: KernelLike
-  ): PySparkBridge = {
-    new PySparkBridge(
-      _brokerState = brokerState,
-      _kernel = kernel
-    )
-  }
-}
-
-/**
- * Represents the API available to PySpark to act as the bridge for data
- * between the JVM and Python.
- *
- * @param _brokerState The container of broker state to expose
- * @param _kernel The kernel API to expose through the bridge
- */
-class PySparkBridge private (
-  private val _brokerState: BrokerState,
-  private val _kernel: KernelLike
-) extends BrokerBridge(_brokerState, _kernel) {
-  override val brokerName: String = "PySpark"
-}
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkException.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkException.scala
deleted file mode 100644
index 377ff3f..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkException.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import org.apache.toree.interpreter.broker.BrokerException
-
-/**
- * Represents a generic PySpark exception.
- *
- * @param message The message to associate with the exception
- */
-class PySparkException(message: String) extends BrokerException(message)
-
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala
deleted file mode 100644
index 3ff3a52..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkInterpreter.scala
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import java.net.URL
-
-import org.apache.toree.interpreter.Results.Result
-import org.apache.toree.interpreter._
-import org.apache.toree.kernel.api.KernelLike
-import org.slf4j.LoggerFactory
-import py4j.GatewayServer
-
-import scala.concurrent.Await
-import scala.concurrent.duration._
-import scala.tools.nsc.interpreter.{InputStream, OutputStream}
-
-/**
- * Represents an interpreter interface to PySpark. Requires a properly-set
- * SPARK_HOME, PYTHONPATH pointing to Spark's Python source, and py4j installed
- * where it is accessible to the Spark Kernel.  Optionally specify PYTHON_EXEC
- * to override the default python executable "python'
- *
- */
-class PySparkInterpreter(
-) extends Interpreter {
-  /** Maximum time to wait for the python kernel to be readu */
-  private val WAIT_DURATION: Long = java.util.concurrent.TimeUnit.SECONDS.toMillis(50)
-
-  private val PythonExecEnv = "PYTHON_EXEC"
-  private lazy val pythonExecutable = Option(System.getenv(PythonExecEnv)).getOrElse("python")
-  private val logger = LoggerFactory.getLogger(this.getClass)
-  private var _kernel:KernelLike = _
-
-  // TODO: Replace hard-coded maximum queue count
-  /** Represents the state used by this interpreter's Python instance. */
-  private lazy val pySparkState = new PySparkState(500)
-
-  /** Represents the bridge used by this interpreter's Python interface. */
-  private lazy val pySparkBridge = PySparkBridge(
-    pySparkState,
-    _kernel
-  )
-
-
-  /** Represents the interface for Python to talk to JVM Spark components. */
-  private lazy val gatewayServer = new GatewayServer(pySparkBridge, 0)
-
-  /** Represents the process handler used for the PySpark process. */
-  private lazy val pySparkProcessHandler: PySparkProcessHandler =
-    new PySparkProcessHandler(
-      pySparkBridge,
-      restartOnFailure = true,
-      restartOnCompletion = true
-    )
-
-  private lazy val pySparkService = new PySparkService(
-    pythonExecutable,
-    gatewayServer,
-    pySparkBridge,
-    pySparkProcessHandler
-  )
-  private lazy val pySparkTransformer = new PySparkTransformer
-
-  /**
-   * Initializes the interpreter.
-    *
-    * @param kernel The kernel
-   * @return The newly initialized interpreter
-   */
-  override def init(kernel: KernelLike): Interpreter = {
-    _kernel = kernel
-    this
-  }
-
-  /**
-   * Executes the provided code with the option to silence output.
-    *
-    * @param code The code to execute
-   * @param silent Whether or not to execute the code silently (no output)
-   * @return The success/failure of the interpretation and the output from the
-   *         execution or the failure
-   */
-  override def interpret(code: String, silent: Boolean, output: Option[OutputStream]):
-    (Result, Either[ExecuteOutput, ExecuteFailure]) = {
-    if (!pySparkService.isRunning) pySparkService.start()
-
-    val futureResult = pySparkTransformer.transformToInterpreterResult(
-      pySparkService.submitCode(code, output)
-    )
-
-    Await.result(futureResult, Duration.Inf)
-  }
-
-  /**
-   * Starts the interpreter, initializing any internal state.
-    *
-    * @return A reference to the interpreter
-   */
-  override def start(): Interpreter = {
-    pySparkService.start()
-
-    this
-  }
-
-  /**
-   * Stops the interpreter, removing any previous internal state.
-    *
-    * @return A reference to the interpreter
-   */
-  override def stop(): Interpreter = {
-    pySparkService.stop()
-
-    this
-  }
-
-  /**
-   * Returns the class loader used by this interpreter.
-   *
-   * @return The runtime class loader used by this interpreter
-   */
-  override def classLoader: ClassLoader = this.getClass.getClassLoader
-
-  // Unsupported (but can be invoked)
-  override def lastExecutionVariableName: Option[String] = None
-
-  // Unsupported (but can be invoked)
-  override def read(variableName: String): Option[AnyRef] = None
-
-  // Unsupported
-  override def updatePrintStreams(in: InputStream, out: OutputStream, err: OutputStream): Unit = ???
-
-  // Unsupported
-  override def interrupt(): Interpreter = ???
-
-  // Unsupported
-  override def bind(variableName: String, typeName: String, value: Any, modifiers: List[String]): Unit = ???
-
-  // Unsupported
-  override def addJars(jars: URL*): Unit = ???
-
-  // Unsupported
-  override def doQuietly[T](body: => T): T = ???
-
-  override def languageInfo: LanguageInfo = {
-    import scala.sys.process._
-
-    // Issue a subprocess call to grab the python version.  This is better than polling a child process.
-    val version = Seq(
-      pythonExecutable,
-      "-c",
-      "import sys; print('{s.major}.{s.minor}.{s.micro}'.format(s=sys.version_info))").!!
-
-    LanguageInfo(
-      "python",
-      version = version,
-      fileExtension = Some(".py"),
-      pygmentsLexer = Some("python"),
-      mimeType = Some("text/x-ipython"),
-      codemirrorMode = Some("text/x-ipython"))
-  }
-}
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkProcess.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkProcess.scala
deleted file mode 100644
index a97a243..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkProcess.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import java.io.{FileOutputStream, File}
-
-import org.apache.toree.interpreter.broker.BrokerProcess
-import org.apache.commons.exec.environment.EnvironmentUtils
-import org.apache.commons.exec._
-import org.apache.commons.io.IOUtils
-import org.apache.spark.SparkContext
-import org.slf4j.LoggerFactory
-import sys.process._
-
-/**
- * Represents the Python process used to evaluate PySpark code.
- *
- * @param pythonProcessName name of python process
- * @param pySparkBridge The bridge to use to retrieve kernel output streams
- *                      and the Spark version to be verified
- * @param pySparkProcessHandler The handler to use when the process fails or
- *                              completes
- * @param port The port to provide to the PySpark process to use to connect
- *             back to the JVM
- * @param sparkVersion The version of Spark that the process will be using
- */
-class PySparkProcess(
-  private val pythonProcessName: String,
-  private val pySparkBridge: PySparkBridge,
-  private val pySparkProcessHandler: PySparkProcessHandler,
-  private val port: Int,
-  private val sparkVersion: String
-) extends BrokerProcess(
-  processName = pythonProcessName,
-  entryResource = "PySpark/pyspark_runner.py",
-  otherResources = Nil,
-  brokerBridge = pySparkBridge,
-  brokerProcessHandler = pySparkProcessHandler,
-  arguments = Seq(port.toString, sparkVersion)
-) {
-
-  override val brokerName: String = "PySpark"
-  private val logger = LoggerFactory.getLogger(this.getClass)
-
-  private val sparkHome = Option(System.getenv("SPARK_HOME"))
-    .orElse(Option(System.getProperty("spark.home")))
-  private val pythonPath = Option(System.getenv("PYTHONPATH"))
-
-  assert(sparkHome.nonEmpty, "PySpark process requires Spark Home to be set!")
-  if (pythonPath.isEmpty) logger.warn("PYTHONPATH not provided for PySpark!")
-
-  /**
-   * Creates a new process environment to be used for environment variable
-   * retrieval by the new process.
-   *
-   * @return The map of environment variables and their respective values
-   */
-  override protected def newProcessEnvironment(): Map[String, String] = {
-    val baseEnvironment = super.newProcessEnvironment()
-
-    import java.io.File.pathSeparator
-
-    val baseSparkHome = sparkHome.get
-    val basePythonPath = pythonPath.getOrElse("")
-    val updatedPythonPath =
-      (basePythonPath.split(pathSeparator) :+ s"$baseSparkHome/python/")
-        .map(_.trim)
-        .filter(_.nonEmpty)
-        .map(new File(_))
-        .distinct
-        .mkString(pathSeparator)
-
-    // Note: Adding the new map values should override the old ones
-    baseEnvironment ++ Map(
-      "SPARK_HOME" -> baseSparkHome,
-      "PYTHONPATH" -> updatedPythonPath
-    )
-  }
-
-  override protected def copyResourceToTmp(resource: String): String = {
-    val destination = super.copyResourceToTmp(resource)
-    if (System.getProperty("os.name").equals("z/OS")){
-        tagPySparkResource(destination)
-    }
-    destination 
-  }
-
-  private def tagPySparkResource(destPath: String): Unit = {
-      val exitCode = Seq("chtag", "-t", "-c", "ISO8859-1", destPath).!
-      if (exitCode != 0) logger.warn("PySpark resource was not tagged correctly.")
-  }
-}
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkProcessHandler.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkProcessHandler.scala
deleted file mode 100644
index 95a7828..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkProcessHandler.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import org.apache.toree.interpreter.broker.BrokerProcessHandler
-
-/**
- * Represents the handler for events triggered by the PySpark process.
- *
- * @param pySparkBridge The bridge to reset when the process fails or completes
- * @param restartOnFailure If true, restarts the process if it fails
- * @param restartOnCompletion If true, restarts the process if it completes
- */
-class PySparkProcessHandler(
-  private val pySparkBridge: PySparkBridge,
-  private val restartOnFailure: Boolean,
-  private val restartOnCompletion: Boolean
-  ) extends BrokerProcessHandler(
-  pySparkBridge,
-  restartOnFailure,
-  restartOnCompletion
-) {
-  override val brokerName: String = "PySpark"
-}
-
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkService.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkService.scala
deleted file mode 100644
index 5a82f19..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkService.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import org.apache.toree.interpreter.broker.BrokerService
-import org.apache.toree.kernel.interpreter.pyspark.PySparkTypes._
-import org.slf4j.LoggerFactory
-import py4j.GatewayServer
-
-import scala.concurrent.Future
-import scala.tools.nsc.interpreter.OutputStream
-
-/**
- * Represents the service that provides the high-level interface between the
- * JVM and Python.
- *
- * @param pythonProcessName name of python process
- * @param gatewayServer The backend to start to communicate between the JVM and
- *                      Python
- * @param pySparkBridge The bridge to use for communication between the JVM and
- *                      Python
- * @param pySparkProcessHandler The handler used for events that occur with
- *                              the PySpark process
- */
-class PySparkService(
-  private val pythonProcessName: String,
-  private val gatewayServer: GatewayServer,
-  private val pySparkBridge: PySparkBridge,
-  private val pySparkProcessHandler: PySparkProcessHandler
-) extends BrokerService {
-  private val logger = LoggerFactory.getLogger(this.getClass)
-  @volatile private var _isRunning: Boolean = false
-  override def isRunning: Boolean = _isRunning
-
-
-  /** Represents the process used to execute Python code via the bridge. */
-  private lazy val pySparkProcess = {
-    val p = new PySparkProcess(
-      pythonProcessName,
-      pySparkBridge,
-      pySparkProcessHandler,
-      gatewayServer.getListeningPort,
-      org.apache.spark.SPARK_VERSION
-    )
-
-    // Update handlers to correctly reset and restart the process
-    pySparkProcessHandler.setResetMethod(message => {
-      p.stop()
-      pySparkBridge.state.reset(message)
-    })
-    pySparkProcessHandler.setRestartMethod(() => p.start())
-
-    p
-  }
-
-  /** Starts the PySpark service. */
-  def start(): Unit = {
-    // Start without forking the gateway server (needs to have access to
-    // SparkContext in current JVM)
-    logger.debug("Starting gateway server")
-    gatewayServer.start()
-
-    val port = gatewayServer.getListeningPort
-    logger.debug(s"Gateway server running on port $port")
-
-    // Start the Python process used to execute code
-    logger.debug("Launching process to execute Python code")
-    pySparkProcess.start()
-
-    _isRunning = true
-  }
-
-  /**
-   * Submits code to the PySpark service to be executed and return a result.
-   *
-   * @param code The code to execute
-   *
-   * @return The result as a future to eventually return
-   */
-  def submitCode(code: Code, kernelOutputStream: Option[OutputStream]): Future[CodeResults] = {
-    pySparkBridge.state.pushCode(code, kernelOutputStream)
-  }
-
-  /** Stops the running PySpark service. */
-  def stop(): Unit = {
-    // Stop the Python process used to execute code
-    pySparkProcess.stop()
-
-    // Stop the server used as an entrypoint for Python
-    gatewayServer.shutdown()
-
-    _isRunning = false
-  }
-}
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkState.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkState.scala
deleted file mode 100644
index 495380a..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkState.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import org.apache.toree.interpreter.broker.BrokerState
-
-/**
- * Represents the state structure of PySpark.
- *
- * @param maxQueuedCode The maximum amount of code to support being queued
- *                      at the same time for PySpark execution
- */
-class PySparkState(private val maxQueuedCode: Int)
-  extends BrokerState(maxQueuedCode)
\ No newline at end of file
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkTransformer.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkTransformer.scala
deleted file mode 100644
index 70905e4..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkTransformer.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import org.apache.toree.interpreter.broker.BrokerTransformer
-
-/**
- * Represents a utility that can transform raw PySpark information to
- * kernel information.
- */
-class PySparkTransformer extends BrokerTransformer
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkTypes.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkTypes.scala
deleted file mode 100644
index 7ba9496..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/PySparkTypes.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.pyspark
-
-import org.apache.toree.interpreter.broker.BrokerTypesProvider
-
-/**
- * Represents all types associated with the PySpark interface.
- */
-object PySparkTypes extends BrokerTypesProvider
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/package.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/package.scala
deleted file mode 100644
index 2ffb725..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/pyspark/package.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter
-
-import org.apache.toree.interpreter.broker.{BrokerCode, BrokerPromise}
-
-/**
- * Contains aliases to broker types.
- */
-package object pyspark {
-  /**
-   * Represents a promise made regarding the completion of PySpark code
-   * execution.
-   */
-  type PySparkPromise = BrokerPromise
-
-  /**
-   * Represents a block of PyPython code to be evaluated.
-   */
-  type PySparkCode = BrokerCode
-}
diff --git a/pyspark-interpreter/src/main/scala/org/apache/toree/magic/builtin/PySpark.scala b/pyspark-interpreter/src/main/scala/org/apache/toree/magic/builtin/PySpark.scala
deleted file mode 100644
index c6abb53..0000000
--- a/pyspark-interpreter/src/main/scala/org/apache/toree/magic/builtin/PySpark.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.magic.builtin
-
-import org.apache.toree.interpreter.{ExecuteError, ExecuteAborted}
-import org.apache.toree.kernel.interpreter.pyspark.{PySparkInterpreter, PySparkException}
-import org.apache.toree.magic.{MagicOutput, CellMagic}
-import org.apache.toree.magic.dependencies.IncludeKernel
-import org.apache.toree.plugins.annotations.Event
-
-/**
- * Represents the magic interface to use the PySpark interpreter.
- */
-class PySpark extends CellMagic with IncludeKernel {
-  @Event(name = "pyspark")
-  override def execute(code: String): MagicOutput = {
-    val pySpark = kernel.interpreter("PySpark")
-
-    if (pySpark.isEmpty || pySpark.get == null)
-      throw new PySparkException("PySpark is not available!")
-
-    pySpark.get match {
-      case pySparkInterpreter: PySparkInterpreter =>
-        val (_, output) = pySparkInterpreter.interpret(code)
-        output match {
-          case Left(executeOutput) =>
-            MagicOutput(executeOutput.toSeq:_*)
-          case Right(executeFailure) => executeFailure match {
-            case executeAborted: ExecuteAborted =>
-              throw new PySparkException("PySpark code was aborted!")
-            case executeError: ExecuteError =>
-              throw new PySparkException(executeError.value)
-          }
-        }
-      case otherInterpreter =>
-        val className = otherInterpreter.getClass.getName
-        throw new PySparkException(s"Invalid PySpark interpreter: $className")
-    }
-  }
-}
-
diff --git a/resources/compile/reference.conf b/resources/compile/reference.conf
index 1c98865..25e04fb 100644
--- a/resources/compile/reference.conf
+++ b/resources/compile/reference.conf
@@ -63,8 +63,6 @@
 
 default_interpreter_plugin = [
   "Scala:org.apache.toree.kernel.interpreter.scala.ScalaInterpreter",
-  "PySpark:org.apache.toree.kernel.interpreter.pyspark.PySparkInterpreter",
-  "SparkR:org.apache.toree.kernel.interpreter.sparkr.SparkRInterpreter",
   "SQL:org.apache.toree.kernel.interpreter.sql.SqlInterpreter"
 ]
 
diff --git a/resources/test/reference.conf b/resources/test/reference.conf
index 66741d4..9817d7f 100644
--- a/resources/test/reference.conf
+++ b/resources/test/reference.conf
@@ -62,8 +62,6 @@
 
 default_interpreter_plugin = [
   "Scala:org.apache.toree.kernel.interpreter.scala.ScalaInterpreter",
-  "PySpark:org.apache.toree.kernel.interpreter.pyspark.PySparkInterpreter",
-  "SparkR:org.apache.toree.kernel.interpreter.sparkr.SparkRInterpreter",
   "SQL:org.apache.toree.kernel.interpreter.sql.SqlInterpreter"
 ]
 
diff --git a/sparkr-interpreter/build.sbt b/sparkr-interpreter/build.sbt
deleted file mode 100644
index ac89909..0000000
--- a/sparkr-interpreter/build.sbt
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-libraryDependencies ++= Dependencies.sparkAll.value
diff --git a/sparkr-interpreter/src/main/resources/README.md b/sparkr-interpreter/src/main/resources/README.md
deleted file mode 100644
index 9194c6e..0000000
--- a/sparkr-interpreter/src/main/resources/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-<!--
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
--->
-
-Spark Kernel adaptation of SparkR
-=================================
-
-Presently, the following APIs are made private in SparkR that are used by the
-kernel to provide a form of communicate suitable for use as an interpreter:
-
-1. SparkR only has an `init()` method that connects to the backend service for
-   R _and_ creates a SparkContext instance. That I am aware, there is no other
-   way to currently use SparkR. Because of this, a new method labelled
-   `sparkR.connect()` is used that retrieves the existing port under the
-   environment variable _EXISTING\_SPARKR\_BACKEND\_PORT_. This method is
-   located in `sparkR.R` and is exported via the following:
-   
-        export("sparkR.connect")
-
-2. SparkR low-level methods to communicate with the backend were marked private,
-   but are used to communicate with our own bridge. If you need to use these invoke them with
-   
-        SparkR:::isInstanceOf
-        SparkR:::callJMethod
-        SparkR:::callJStatic
-        SparkR:::newJObject
-        SparkR:::removeJObject
-        SparkR:::isRemoveMethod
-        SparkR:::invokeJava
-
-3. `org.apache.spark.api.r.RBackend` is marked as limited access to the
-   package scope of `org.apache.spark.api.r`
-   
-       - To circumvent, use a reflective wrapping under 
-         `org.apache.toree.kernel.interpreter.r.ReflectiveRBackend`
diff --git a/sparkr-interpreter/src/main/resources/kernelR/sparkr_runner.R b/sparkr-interpreter/src/main/resources/kernelR/sparkr_runner.R
deleted file mode 100644
index a4fe3c7..0000000
--- a/sparkr-interpreter/src/main/resources/kernelR/sparkr_runner.R
+++ /dev/null
@@ -1,156 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-#
-
-# Initialize our global environment
-.runnerEnv <- new.env()
-
-# Set our script to have its working directory where it currently resides
-# http://stackoverflow.com/questions/1815606/rscript-determine-path-of-the-executing-script
-initial.options <- commandArgs(trailingOnly = FALSE)
-file.arg.name <- "--file="
-script.name <- sub(
-  file.arg.name,
-  "",
-  initial.options[grep(file.arg.name, initial.options)]
-)
-script.basename <- dirname(script.name)
-setwd(script.basename)
-
-# TODO: Use this library instead of the forked SparkR once they either
-#       a) allow us to connect and use an existing Spark Context
-#       b) allow us to have access to the .sparkREnv to do our own work
-#
-#       and provide access in some form to the methods used to access the JVM
-# Add the SparkR library to our list
-.libPaths(c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib"), .libPaths()))
-library(SparkR)
-
-# Bring in other dependencies not exposed in standard SparkR
-source("sparkr_runner_utils.R")
-.sparkREnv <- SparkR:::.sparkREnv
-rm(".sparkRcon", envir = .sparkREnv)
-
-sparkR.connect <- function() {
-  if (SparkR:::connExists(.sparkREnv)) {
-    print("Connection to SparkR backend has already been established!")
-    return()
-  }
-
-  # Only allow connecting to an existing backend
-  existingPort <- Sys.getenv("EXISTING_SPARKR_BACKEND_PORT", "")
-  if (existingPort != "") {
-    backendPort <- existingPort
-  } else {
-    stop("No existing backend port found!")
-  }
-  print(c("ExistingPort:", existingPort))
-
-  # Connect to the backend service
-  connectionTimeout <- as.numeric(Sys.getenv("SPARKR_BACKEND_CONNECTION_TIMEOUT", "6000"))
-  .sparkREnv$backendPort <- backendPort
-  tryCatch({
-    SparkR:::connectBackend("localhost", backendPort, connectionTimeout)
-  }, error = function(err) {
-    stop("Failed to connect JVM: ", err)
-  })
-
-  # Set the start time to identify jobjs
-  # Seconds resolution is good enough for this purpose, so use ints
-  assign(".scStartTime", as.integer(Sys.time()), envir = .sparkREnv)
-
-  # Register a finalizer to sleep 1 seconds on R exit to make RStudio happy
-  reg.finalizer(.sparkREnv, function(x) { Sys.sleep(1) }, onexit = TRUE)
-}
-
-# Connect to the backend
-sparkR.connect()
-
-# Retrieve the bridge used to perform actions on the JVM
-bridge <- SparkR:::callJStatic(
-  "org.apache.toree.kernel.interpreter.sparkr.SparkRBridge", "sparkRBridge"
-)
-
-# Retrieve the state used to pull code off the JVM and push results back
-state <- SparkR:::callJMethod(bridge, "state")
-
-# Acquire the kernel API instance to expose
-kernel <- SparkR:::callJMethod(bridge, "kernel")
-assign("kernel", kernel, .runnerEnv)
-
-# Acquire the SparkContext instance to expose
-#sc <- SparkR:::callJMethod(bridge, "javaSparkContext")
-#assign("sc", sc, .runnerEnv)
-sc <- NULL
-
-# Acquire the SQLContext instance to expose
-#sqlContext <- SparkR:::callJMethod(bridge, "sqlContext")
-#sqlContext <- SparkR:::callJMethod(kernel, "sqlContext")
-#assign("sqlContext", sqlContext, .runnerEnv)
-
-# TODO: Is there a way to control input/output (maybe use sink)
-repeat {
-  # Load the conainer of the code
-  codeContainer <- SparkR:::callJMethod(state, "nextCode")
-
-  # If not valid result, wait 1 second and try again
-  if (!class(codeContainer) == "jobj") {
-    Sys.sleep(1)
-    next()
-  }
-
-  # Retrieve the code id (for response) and code
-  codeId <- SparkR:::callJMethod(codeContainer, "codeId")
-  code <- SparkR:::callJMethod(codeContainer, "code")
-
-  if (is.null(sc)) {
-    sc <- SparkR:::callJMethod(kernel, "javaSparkContext")
-    if(!is.null(sc)) {
-      assign("sc", sc, .runnerEnv)
-      spark <- SparkR:::callJMethod(kernel, "sparkSession")
-      assign("spark", spark, .runnerEnv)
-    }
-  }
-  print(paste("Received Id", codeId, "Code", code))
-
-  # Parse the code into an expression to be evaluated
-  codeExpr <- parse(text = code)
-  print(paste("Code expr", codeExpr))
-
-  tryCatch({
-    # Evaluate the code provided and capture the result as a string
-    result <- capture.output(eval(codeExpr, envir = .runnerEnv))
-    print(paste("Result type", class(result), length(result)))
-    print(paste("Success", codeId, result))
-
-    # Mark the execution as a success and send back the result
-    # If output is null/empty, ensure that we can send it (otherwise fails)
-    if (is.null(result) || length(result) <= 0) {
-      print("Marking success with no output")
-      SparkR:::callJMethod(state, "markSuccess", codeId)
-    } else {
-      # Clean the result before sending it back
-      cleanedResult <- trimws(flatten(result, shouldTrim = FALSE))
-
-      print(paste("Marking success with output:", cleanedResult))
-      SparkR:::callJMethod(state, "markSuccess", codeId, cleanedResult)
-    }
-  }, error = function(ex) {
-    # Mark the execution as a failure and send back the error
-    print(paste("Failure", codeId, toString(ex)))
-    SparkR:::callJMethod(state, "markFailure", codeId, toString(ex))
-  })
-}
diff --git a/sparkr-interpreter/src/main/resources/kernelR/sparkr_runner_utils.R b/sparkr-interpreter/src/main/resources/kernelR/sparkr_runner_utils.R
deleted file mode 100644
index 65e7f3c..0000000
--- a/sparkr-interpreter/src/main/resources/kernelR/sparkr_runner_utils.R
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License
-#
-
-#
-# Reduces a collection of character vectors to a single character vector of
-# length 1
-#
-# obj: The object representing the character vector to flatten
-# shouldTrim: If true, will trim each individual element
-# sepCharacter: Used as the separator between combined strings
-#
-flatten <- function(obj, shouldTrim = TRUE, sepCharacter = "\n") {
-  Reduce(function(x, y) {
-    flattenedX <- if (length(x) > 1) flatten(x) else x
-    flattenedY <- if (length(y) > 1) flatten(y) else y
-
-    finalX <- if (shouldTrim) trimws(flattenedX) else flattenedX
-    finalY <- if (shouldTrim) trimws(flattenedY) else flattenedY
-
-    paste(finalX, finalY, sep = sepCharacter)
-  }, obj)
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/ReflectiveRBackend.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/ReflectiveRBackend.scala
deleted file mode 100644
index 6a1a6f7..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/ReflectiveRBackend.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-/**
- * Provides reflective access into the backend R component that is not
- * publicly accessible.
- */
-class ReflectiveRBackend {
-  private val rBackendClass = Class.forName("org.apache.spark.api.r.RBackend")
-  private val rBackendInstance = rBackendClass.newInstance()
-
-  /**
-   * Initializes the underlying RBackend service.
-   *
-   * @return The port used by the service
-   */
-  def init(cl: ClassLoader): Int = {
-    val runMethod = rBackendClass.getDeclaredMethod("init")
-    Thread.currentThread().setContextClassLoader(cl)
-    runMethod.invoke(rBackendInstance).asInstanceOf[Int]
-  }
-
-  /** Blocks until the service has finished. */
-  def run(): Unit = {
-    val runMethod = rBackendClass.getDeclaredMethod("run")
-
-    runMethod.invoke(rBackendInstance)
-  }
-
-  /** Closes the underlying RBackend service. */
-  def close(): Unit = {
-    val runMethod = rBackendClass.getDeclaredMethod("close")
-
-    runMethod.invoke(rBackendInstance)
-  }
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala
deleted file mode 100644
index a9cfc63..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRBridge.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import org.apache.toree.interpreter.broker.{BrokerBridge, BrokerState}
-import org.apache.toree.kernel.api.KernelLike
-
-/**
- * Represents constants for the SparkR bridge.
- */
-object SparkRBridge {
-  /** Represents the maximum amount of code that can be queued for Python. */
-  val MaxQueuedCode = 500
-
-  /** Contains the bridge used by the current R process. */
-  @volatile private var _sparkRBridge: Option[SparkRBridge] = None
-
-  /** Allows kernel to set bridge dynamically. */
-  private[sparkr] def sparkRBridge_=(newSparkRBridge: SparkRBridge): Unit = {
-    _sparkRBridge = Some(newSparkRBridge)
-  }
-
-  /** Clears the bridge currently hosted statically. */
-  private[sparkr] def reset(): Unit = _sparkRBridge = None
-
-  /** Must be exposed in a static location for RBackend to access. */
-  def sparkRBridge: SparkRBridge = {
-    assert(_sparkRBridge.nonEmpty, "SparkRBridge has not been initialized!")
-    _sparkRBridge.get
-  }
-
-  /**
-   * Creates a new SparkRBridge instance.
-   *
-   * @param brokerState The container of broker state to expose
-   * @param kernel The kernel API to expose through the bridge
-   *
-   * @return The new SparkR bridge
-   */
-  def apply(
-    brokerState: BrokerState,
-    kernel: KernelLike
-    ): SparkRBridge = {
-    new SparkRBridge(
-      _brokerState = brokerState,
-      _kernel = kernel
-    )
-  }
-}
-
-/**
- * Represents the API available to SparkR to act as the bridge for data
- * between the JVM and R.
- *
- * @param _brokerState The container of broker state to expose
- * @param _kernel The kernel API to expose through the bridge
- */
-class SparkRBridge private (
-  private val _brokerState: BrokerState,
-  private val _kernel: KernelLike
-) extends BrokerBridge(_brokerState, _kernel) {
-  override val brokerName: String = "SparkR"
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRException.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRException.scala
deleted file mode 100644
index 2c56eb1..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRException.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import org.apache.toree.interpreter.broker.BrokerException
-
-/**
- * Represents a generic SparkR exception.
- *
- * @param message The message to associate with the exception
- */
-class SparkRException(message: String) extends BrokerException(message)
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala
deleted file mode 100644
index 1c330bf..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRInterpreter.scala
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import java.net.URL
-
-import org.apache.toree.interpreter.Results.Result
-import org.apache.toree.interpreter._
-import org.apache.toree.kernel.api.KernelLike
-import org.apache.toree.kernel.BuildInfo
-import org.slf4j.LoggerFactory
-
-import scala.concurrent.Await
-import scala.concurrent.duration._
-import scala.tools.nsc.interpreter.{InputStream, OutputStream}
-
-/**
- * Represents an interpreter interface to SparkR. Requires a properly-set
- * SPARK_HOME pointing to a binary distribution (needs packaged SparkR library)
- * and an implementation of R on the path.
- *
- */
-class SparkRInterpreter(
-) extends Interpreter {
-  private val logger = LoggerFactory.getLogger(this.getClass)
-  private var _kernel: KernelLike = _
-  private val rScriptExecutable = "Rscript"
-
-  // TODO: Replace hard-coded maximum queue count
-  /** Represents the state used by this interpreter's R instance. */
-  private lazy val sparkRState = new SparkRState(500)
-
-  /** Represents the bridge used by this interpreter's R instance. */
-  private lazy val sparkRBridge = SparkRBridge(
-    sparkRState,
-    _kernel
-  )
-
-  /** Represents the interface for R to talk to JVM Spark components. */
-  private lazy val rBackend = new ReflectiveRBackend
-
-  /** Represents the process handler used for the SparkR process. */
-  private lazy val sparkRProcessHandler: SparkRProcessHandler =
-    new SparkRProcessHandler(
-      sparkRBridge,
-      restartOnFailure = true,
-      restartOnCompletion = true
-    )
-
-  private lazy val sparkRService = new SparkRService(
-    rScriptExecutable,
-    rBackend,
-    sparkRBridge,
-    sparkRProcessHandler
-  )
-  private lazy val sparkRTransformer = new SparkRTransformer
-
-  override def init(kernel: KernelLike): Interpreter = {
-    _kernel = kernel
-    this
-  }
-
-  /**
-   * Executes the provided code with the option to silence output.
-   * @param code The code to execute
-   * @param silent Whether or not to execute the code silently (no output)
-   * @return The success/failure of the interpretation and the output from the
-   *         execution or the failure
-   */
-  override def interpret(code: String, silent: Boolean, output: Option[OutputStream]):
-    (Result, Either[ExecuteOutput, ExecuteFailure]) =
-  {
-    if (!sparkRService.isRunning) sparkRService.start()
-
-    val futureResult = sparkRTransformer.transformToInterpreterResult(
-      sparkRService.submitCode(code, kernelOutputStream = output)
-    )
-
-    Await.result(futureResult, Duration.Inf)
-  }
-
-  /**
-   * Starts the interpreter, initializing any internal state.
-   * @return A reference to the interpreter
-   */
-  override def start(): Interpreter = {
-    sparkRService.start()
-
-    this
-  }
-
-  /**
-   * Stops the interpreter, removing any previous internal state.
-   * @return A reference to the interpreter
-   */
-  override def stop(): Interpreter = {
-    sparkRService.stop()
-
-    this
-  }
-
-  /**
-   * Returns the class loader used by this interpreter.
-   *
-   * @return The runtime class loader used by this interpreter
-   */
-  override def classLoader: ClassLoader = this.getClass.getClassLoader
-
-  // Unsupported (but can be invoked)
-  override def lastExecutionVariableName: Option[String] = None
-
-  // Unsupported (but can be invoked)
-  override def read(variableName: String): Option[AnyRef] = None
-
-  // Unsupported
-  override def updatePrintStreams(in: InputStream, out: OutputStream, err: OutputStream): Unit = ???
-
-  // Unsupported
-  override def interrupt(): Interpreter = ???
-
-  // Unsupported
-  override def bind(variableName: String, typeName: String, value: Any, modifiers: List[String]): Unit = ???
-
-  // Unsupported
-  override def addJars(jars: URL*): Unit = ???
-
-  // Unsupported
-  override def doQuietly[T](body: => T): T = ???
-
-  override def languageInfo = {
-    import sys.process._
-
-    // Issue a subprocess call to grab the R version.  This is better than polling a child process.
-    val version = Seq(
-      rScriptExecutable,
-      "-e",
-      "cat(R.version$major, '.', R.version$minor, sep='', fill=TRUE)").!!
-
-    LanguageInfo(
-      "R", version = version,
-      fileExtension = Some(".R"),
-      pygmentsLexer = Some("r"),
-      mimeType = Some("text/x-rsrc"),
-      codemirrorMode = Some("text/x-rsrc"))
-  }
-
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRProcess.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRProcess.scala
deleted file mode 100644
index d1c145a..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRProcess.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import org.apache.toree.interpreter.broker.BrokerProcess
-import scala.collection.JavaConverters._
-
-/**
- * Represents the R process used to evaluate SparkR code.
- *
- * @param processName The name of the Rscript process to run.
- * @param sparkRBridge The bridge to use to retrieve kernel output streams
- *                      and the Spark version to be verified
- * @param sparkRProcessHandler The handler to use when the process fails or
- *                             completes
- * @param port The port to provide to the SparkR process to use to connect
- *             back to the JVM
- */
-class SparkRProcess(
-  processName: String,
-  private val sparkRBridge: SparkRBridge,
-  private val sparkRProcessHandler: SparkRProcessHandler,
-  private val port: Int
-) extends BrokerProcess(
-  processName = processName,
-  entryResource = "kernelR/sparkr_runner.R",
-  otherResources = Seq("kernelR/sparkr_runner_utils.R"),
-  brokerBridge = sparkRBridge,
-  brokerProcessHandler = sparkRProcessHandler,
-  arguments = Seq(
-    "--default-packages=datasets,utils,grDevices,graphics,stats,methods"
-  )
-) {
-  override val brokerName: String = "SparkR"
-  private val sparkHome = Option(System.getenv("SPARK_HOME"))
-    .orElse(Option(System.getProperty("spark.home")))
-
-  assert(sparkHome.nonEmpty, "SparkR process requires Spark Home to be set!")
-
-  /**
-   * Creates a new process environment to be used for environment variable
-   * retrieval by the new process.
-   *
-   * @return The map of environment variables and their respective values
-   */
-  override protected def newProcessEnvironment(): Map[String, String] = {
-    val baseEnvironment = super.newProcessEnvironment()
-
-    // Note: Adding the new map values should override the old ones
-    baseEnvironment ++ Map(
-      "SPARK_HOME"                    -> sparkHome.get,
-      "EXISTING_SPARKR_BACKEND_PORT"  -> port.toString
-    )
-  }
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRProcessHandler.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRProcessHandler.scala
deleted file mode 100644
index 6dd93e8..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRProcessHandler.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import org.apache.toree.interpreter.broker.BrokerProcessHandler
-
-/**
- * Represents the handler for events triggered by the SparkR process.
- *
- * @param sparkRBridge The bridge to reset when the process fails or completes
- * @param restartOnFailure If true, restarts the process if it fails
- * @param restartOnCompletion If true, restarts the process if it completes
- */
-class SparkRProcessHandler(
-  private val sparkRBridge: SparkRBridge,
-  private val restartOnFailure: Boolean,
-  private val restartOnCompletion: Boolean
-) extends BrokerProcessHandler(
-  sparkRBridge,
-  restartOnFailure,
-  restartOnCompletion
-) {
-  override val brokerName: String = "SparkR"
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala
deleted file mode 100644
index 350aee0..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRService.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import java.util.concurrent.{Semaphore, TimeUnit}
-
-import org.apache.toree.interpreter.broker.BrokerService
-import org.apache.toree.kernel.interpreter.sparkr.SparkRTypes.{Code, CodeResults}
-import org.slf4j.LoggerFactory
-
-import scala.concurrent.Future
-import scala.tools.nsc.interpreter._
-
-/**
- * Represents the service that provides the high-level interface between the
- * JVM and R.
- *
- * @param processName The name of the Rscript process to run.
- * @param rBackend The backend to start to communicate between the JVM and R
- * @param sparkRBridge The bridge to use for communication between the JVM and R
- * @param sparkRProcessHandler The handler used for events that occur with the
- *                             SparkR process
- */
-class SparkRService(
-  processName: String,
-  private val rBackend: ReflectiveRBackend,
-  private val sparkRBridge: SparkRBridge,
-  private val sparkRProcessHandler: SparkRProcessHandler
-) extends BrokerService {
-  private val logger = LoggerFactory.getLogger(this.getClass)
-  @volatile private var rBackendPort: Int = -1
-  @volatile private var _isRunning: Boolean = false
-  override def isRunning: Boolean = _isRunning
-
-  /** Represents the process used to execute R code via the bridge. */
-  private lazy val sparkRProcess: SparkRProcess = {
-    val p = new SparkRProcess(
-      processName,
-      sparkRBridge,
-      sparkRProcessHandler,
-      rBackendPort
-    )
-
-    // Update handlers to correctly reset and restart the process
-    sparkRProcessHandler.setResetMethod(message => {
-      p.stop()
-      sparkRBridge.state.reset(message)
-    })
-    sparkRProcessHandler.setRestartMethod(() => p.start())
-
-    p
-  }
-
-  /** Starts the SparkR service. */
-  override def start(): Unit = {
-    logger.debug("Initializing statically-accessible SparkR bridge")
-    SparkRBridge.sparkRBridge = sparkRBridge
-
-    val initialized = new Semaphore(0)
-    val classLoader = SparkRBridge.getClass.getClassLoader
-    import scala.concurrent.ExecutionContext.Implicits.global
-    val rBackendRun = Future {
-      logger.debug("Initializing RBackend")
-      rBackendPort = rBackend.init(classLoader)
-      logger.debug(s"RBackend running on port $rBackendPort")
-      initialized.release()
-      logger.debug("Running RBackend")
-      rBackend.run()
-      logger.debug("RBackend has finished")
-    }
-
-    // Wait for backend to start before starting R process to connect
-    val backendTimeout =
-      sys.env.getOrElse("SPARKR_BACKEND_TIMEOUT", "120").toInt
-    if (initialized.tryAcquire(backendTimeout, TimeUnit.SECONDS)) {
-      // Start the R process used to execute code
-      logger.debug("Launching process to execute R code")
-      sparkRProcess.start()
-      _isRunning = true
-    } else {
-      // Unable to initialize, so throw an exception
-      throw new SparkRException(
-        s"Unable to initialize R backend in $backendTimeout seconds!")
-    }
-  }
-
-  /**
-   * Submits code to the SparkR service to be executed and return a result.
-   *
-   * @param code The code to execute
-   *
-   * @return The result as a future to eventually return
-   */
-  override def submitCode(code: Code, kernelOutputStream: Option[OutputStream]): Future[CodeResults] = {
-    sparkRBridge.state.pushCode(code, kernelOutputStream)
-  }
-
-  /** Stops the running SparkR service. */
-  override def stop(): Unit = {
-    // Stop the R process used to execute code
-    sparkRProcess.stop()
-
-    // Stop the server used as an entrypoint for R
-    rBackend.close()
-
-    // Clear the bridge
-    SparkRBridge.reset()
-
-    _isRunning = false
-  }
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRState.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRState.scala
deleted file mode 100644
index 2f7ddc7..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRState.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import org.apache.toree.interpreter.broker.BrokerState
-
-/**
- * Represents the state structure of SparkR.
- *
- * @param maxQueuedCode The maximum amount of code to support being queued
- *                      at the same time for SparkR execution
- */
-class SparkRState(private val maxQueuedCode: Int)
-  extends BrokerState(maxQueuedCode)
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRTransformer.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRTransformer.scala
deleted file mode 100644
index c35e342..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRTransformer.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import org.apache.toree.interpreter.broker.BrokerTransformer
-
-/**
- * Represents the transformer used by SparkR.
- */
-class SparkRTransformer extends BrokerTransformer
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRTypes.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRTypes.scala
deleted file mode 100644
index 370704f..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/SparkRTypes.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter.sparkr
-
-import org.apache.toree.interpreter.broker.BrokerTypesProvider
-
-/**
- * Represents all types associated with the SparkR interface.
- */
-object SparkRTypes extends BrokerTypesProvider
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/package.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/package.scala
deleted file mode 100644
index e5d3709..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/kernel/interpreter/sparkr/package.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.kernel.interpreter
-
-import org.apache.toree.interpreter.broker.{BrokerCode, BrokerPromise}
-
-/**
- * Contains aliases to broker types.
- */
-package object sparkr {
-  /**
-   * Represents a promise made regarding the completion of SparkR code
-   * execution.
-   */
-  type SparkRPromise = BrokerPromise
-
-  /**
-   * Represents a block of SparkR code to be evaluated.
-   */
-  type SparkRCode = BrokerCode
-}
diff --git a/sparkr-interpreter/src/main/scala/org/apache/toree/magic/builtin/SparkR.scala b/sparkr-interpreter/src/main/scala/org/apache/toree/magic/builtin/SparkR.scala
deleted file mode 100644
index 361ea20..0000000
--- a/sparkr-interpreter/src/main/scala/org/apache/toree/magic/builtin/SparkR.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements.  See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License
- */
-package org.apache.toree.magic.builtin
-
-import org.apache.toree.interpreter.{ExecuteError, ExecuteAborted}
-import org.apache.toree.kernel.interpreter.sparkr.{SparkRInterpreter, SparkRException}
-import org.apache.toree.magic.{MagicOutput, CellMagic}
-import org.apache.toree.magic.dependencies.IncludeKernel
-import org.apache.toree.plugins.annotations.Event
-
-/**
- * Represents the magic interface to use the SparkR interpreter.
- */
-class SparkR extends CellMagic with IncludeKernel {
-
-  @Event(name = "sparkr")
-  override def execute(code: String): MagicOutput = {
-    val sparkR = kernel.interpreter("SparkR")
-
-    if (sparkR.isEmpty || sparkR.get == null)
-      throw new SparkRException("SparkR is not available!")
-
-    sparkR.get match {
-      case sparkRInterpreter: SparkRInterpreter =>
-        val (_, output) = sparkRInterpreter.interpret(code)
-        output match {
-          case Left(executeOutput) =>
-            MagicOutput(executeOutput.toSeq:_*)
-          case Right(executeFailure) => executeFailure match {
-            case executeAborted: ExecuteAborted =>
-              throw new SparkRException("SparkR code was aborted!")
-            case executeError: ExecuteError =>
-              throw new SparkRException(executeError.value)
-          }
-        }
-      case otherInterpreter =>
-        val className = otherInterpreter.getClass.getName
-        throw new SparkRException(s"Invalid SparkR interpreter: $className")
-    }
-  }
-}
diff --git a/test_toree.py b/test_toree.py
index 82b515a..0856b9a 100644
--- a/test_toree.py
+++ b/test_toree.py
@@ -138,47 +138,5 @@
 
         return reply, output_msgs
 
-class ToreePythonKernelTests(jupyter_kernel_test.KernelTests):
-    # Required --------------------------------------
-
-    # The name identifying an installed kernel to run the tests against
-    kernel_name = "apache_toree_pyspark"
-
-    # language_info.name in a kernel_info_reply should match this
-    language_name = "python"
-
-    # Optional --------------------------------------
-
-    # Code in the kernel's language to write "hello, world" to stdout
-    # These tests fail randomly on travis.....
-    # code_hello_world = "print(\"hello, world\")"
-
-    # Samples of code which generate a result value (ie, some text
-    # displayed as Out[n])
-    code_execute_result = [
-        {'code': '6*7', 'result': '42'}
-    ]
-
-class ToreeSparkRKernelTests(jupyter_kernel_test.KernelTests):
-    # Required --------------------------------------
-
-    # The name identifying an installed kernel to run the tests against
-    kernel_name = "apache_toree_sparkr"
-
-    # language_info.name in a kernel_info_reply should match this
-    language_name = "R"
-
-    # Optional --------------------------------------
-
-    # Code in the kernel's language to write "hello, world" to stdout
-    # Something weird goes on with R.	
-    # code_hello_world = r'write("hello, world", stdout())' 
-
-    # Samples of code which generate a result value (ie, some text
-    # displayed as Out[n])
-    code_execute_result = [
-        {'code': '6*7', 'result': '[1] 42'}
-    ]
-
 if __name__ == '__main__':
     unittest.main()