[TOREE-451] Remove Scala 2.10 files
Removed Scala 2.10 specific source code. Also, updated SBT
from 0.13.16 to 1.0.3. Updated plugins in project/plugins.sbt
as a result of upgrading to newer version of SBT.
Closes #145
diff --git a/.travis.yml b/.travis.yml
index d237c4f..98039a7 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -17,7 +17,7 @@
language: scala
scala:
- - "2.10.4"
+ - "2.11"
env:
global:
- COURSIER_NO_TERM=1
diff --git a/README.md b/README.md
index 89276d1..584a37e 100644
--- a/README.md
+++ b/README.md
@@ -39,7 +39,8 @@
Try It
======
-A version of Toree is deployed as part of the [Try Jupyter!][try-jupyter] site. Select `Scala 2.10.4 (Spark 1.4.1)` under the `New` dropdown. Note that this version only supports `Scala`.
+A version of Toree is deployed as part of the [Try Jupyter!][try-jupyter] site. Select `Apache Toree - Scala` under
+the `New` dropdown. Note that this version only supports `Scala`.
Develop
=======
diff --git a/build.sbt b/build.sbt
index c294421..32ea4b9 100644
--- a/build.sbt
+++ b/build.sbt
@@ -75,7 +75,8 @@
resolvers in ThisBuild ++= Seq(
"Apache Snapshots" at "http://repository.apache.org/snapshots/",
"Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
- "Jitpack" at "https://jitpack.io"
+ "Jitpack" at "https://jitpack.io",
+ "bintray-sbt-plugins" at "http://dl.bintray.com/sbt/sbt-plugin-releases"
)
updateOptions in ThisBuild := updateOptions.value.withCachedResolution(true)
libraryDependencies in ThisBuild ++= Seq(
diff --git a/etc/examples/notebooks/magic-tutorial.ipynb b/etc/examples/notebooks/magic-tutorial.ipynb
index 67156cf..5b4c383 100644
--- a/etc/examples/notebooks/magic-tutorial.ipynb
+++ b/etc/examples/notebooks/magic-tutorial.ipynb
@@ -941,9 +941,11 @@
"name": "apache_toree_scala"
},
"language_info": {
- "name": "scala"
+ "file_extension": ".scala",
+ "name": "scala",
+ "version": "2.11.8"
}
},
"nbformat": 4,
"nbformat_minor": 0
-}
\ No newline at end of file
+}
diff --git a/macros/build.sbt b/macros/build.sbt
index 65eda08..897b258 100644
--- a/macros/build.sbt
+++ b/macros/build.sbt
@@ -16,10 +16,3 @@
*/
// Do not export a jar for this or publish anything (should serve just as a pre-processor)
libraryDependencies += Dependencies.scalaReflect.value
-
-libraryDependencies ++= (
- if (scalaVersion.value.startsWith("2.10"))
- List("org.scalamacros" %% "quasiquotes" % "2.1.0")
- else
- Nil
-)
diff --git a/plugins/src/test/scala-2.10/org/apache/toree/plugins/dependencies/ClassLoaderHelper.scala b/plugins/src/test/scala-2.10/org/apache/toree/plugins/dependencies/ClassLoaderHelper.scala
deleted file mode 100644
index 54f066f..0000000
--- a/plugins/src/test/scala-2.10/org/apache/toree/plugins/dependencies/ClassLoaderHelper.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License
- */
-package org.apache.toree.plugins.dependencies
-
-import java.lang.{ClassLoader => JClassLoader}
-import java.net.URL
-
-import scala.tools.nsc.util.ScalaClassLoader.URLClassLoader
-
-/**
- * Created by mariu on 2016-07-16.
- */
-object ClassLoaderHelper {
-
- def URLClassLoader(urls: Seq[URL], parent: JClassLoader) = {
- new URLClassLoader(urls, parent)
- }
-}
diff --git a/project/build.properties b/project/build.properties
index 9d799ec..ff28b3d 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -15,4 +15,4 @@
# limitations under the License
#
-sbt.version=0.13.16
+sbt.version=1.0.3
diff --git a/project/plugins.sbt b/project/plugins.sbt
index b15f852..0d3ac7e 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -21,13 +21,13 @@
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.1")
// Provides abilit to create an uber-jar
-addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.4")
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.5")
// Provides a generated build info object to sync between build and application
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.7.0")
// Used for signing jars published via `sbt publish-signed`
-addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
+addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0")
// Provides the ability to generate dependency graphs
-addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.8.2")
+addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.0")
diff --git a/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/ScalaInterpreterSpecific.scala b/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/ScalaInterpreterSpecific.scala
deleted file mode 100644
index b8a8535..0000000
--- a/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/ScalaInterpreterSpecific.scala
+++ /dev/null
@@ -1,418 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License
- */
-
-package org.apache.toree.kernel.interpreter.scala
-
-import java.io.{BufferedReader, ByteArrayOutputStream, InputStreamReader, PrintStream}
-import java.net.{URL, URLClassLoader}
-import java.nio.charset.Charset
-import java.util.concurrent.ExecutionException
-
-import org.apache.spark.SparkContext
-import org.apache.spark.repl.{SparkCommandLine, SparkIMain, SparkJLineCompletion}
-import org.apache.spark.sql.SQLContext
-import org.apache.toree.global.StreamState
-import org.apache.toree.interpreter._
-import org.apache.toree.interpreter.imports.printers.{WrapperConsole, WrapperSystem}
-import org.apache.toree.kernel.api.{KernelLike, KernelOptions}
-import org.apache.toree.utils.{MultiOutputStream, TaskManager}
-import org.slf4j.LoggerFactory
-
-import scala.annotation.tailrec
-import scala.concurrent.{Await, Future}
-import scala.language.reflectiveCalls
-import scala.tools.nsc.backend.JavaPlatform
-import scala.tools.nsc.interpreter.{IR, InputStream, JPrintWriter, OutputStream}
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{ClassPath, MergedClassPath}
-import scala.tools.nsc.{Global, Settings, io}
-import scala.util.{Try => UtilTry}
-
-/**
- * Provides Scala version-specific features needed for the interpreter.
- */
-trait ScalaInterpreterSpecific { this: ScalaInterpreter =>
- private val ExecutionExceptionName = "lastException"
-
- var sparkIMain: SparkIMain = _
- protected var jLineCompleter: SparkJLineCompletion = _
-
- val _runtimeClassloader =
- new URLClassLoader(Array(), _thisClassloader) {
- def addJar(url: URL) = this.addURL(url)
- }
-
- protected def newSparkIMain(
- settings: Settings, out: JPrintWriter
- ): SparkIMain = {
- val s = new SparkIMain(settings, out)
- s.initializeSynchronous()
- s
- }
-
- /**
- * Adds jars to the runtime and compile time classpaths. Does not work with
- * directories or expanding star in a path.
- * @param jars The list of jar locations
- */
- override def addJars(jars: URL*): Unit = {
- // Enable Scala class support
- reinitializeSymbols()
-
- jars.foreach(_runtimeClassloader.addJar)
- updateCompilerClassPath(jars : _*)
-
- // Refresh all of our variables
- refreshDefinitions()
- }
-
- // TODO: Need to figure out a better way to compare the representation of
- // an annotation (contained in AnnotationInfo) with various annotations
- // like scala.transient
- protected def convertAnnotationsToModifiers(
- annotationInfos: List[Global#AnnotationInfo]
- ) = annotationInfos map {
- case a if a.toString == "transient" => "@transient"
- case a =>
- logger.debug(s"Ignoring unknown annotation: $a")
- ""
- } filterNot {
- _.isEmpty
- }
-
- protected def convertScopeToModifiers(scopeSymbol: Global#Symbol) = {
- (if (scopeSymbol.isImplicit) "implicit" else "") ::
- Nil
- }
-
- protected def buildModifierList(termNameString: String) = {
- import scala.language.existentials
- val termSymbol = sparkIMain.symbolOfTerm(termNameString)
-
-
- convertAnnotationsToModifiers(
- if (termSymbol.hasAccessorFlag) termSymbol.accessed.annotations
- else termSymbol.annotations
- ) ++ convertScopeToModifiers(termSymbol)
- }
-
- protected def refreshDefinitions(): Unit = {
- sparkIMain.definedTerms.foreach(termName => {
- val termNameString = termName.toString
- val termTypeString = sparkIMain.typeOfTerm(termNameString).toLongString
- sparkIMain.valueOfTerm(termNameString) match {
- case Some(termValue) =>
- val modifiers = buildModifierList(termNameString)
- logger.debug(s"Rebinding of $termNameString as " +
- s"${modifiers.mkString(" ")} $termTypeString")
- UtilTry(sparkIMain.beSilentDuring {
- sparkIMain.bind(
- termNameString, termTypeString, termValue, modifiers
- )
- })
- case None =>
- logger.debug(s"Ignoring rebinding of $termNameString")
- }
- })
- }
-
- protected def reinitializeSymbols(): Unit = {
- val global = sparkIMain.global
- import global._
- new Run // Initializes something needed for Scala classes
- }
-
- protected def updateCompilerClassPath( jars: URL*): Unit = {
- require(!sparkIMain.global.forMSIL) // Only support JavaPlatform
-
- val platform = sparkIMain.global.platform.asInstanceOf[JavaPlatform]
-
- val newClassPath = mergeJarsIntoClassPath(platform, jars:_*)
- logger.debug(s"newClassPath: ${newClassPath}")
-
- // TODO: Investigate better way to set this... one thought is to provide
- // a classpath in the currentClassPath (which is merged) that can be
- // replaced using updateClasspath, but would that work more than once?
- val fieldSetter = platform.getClass.getMethods
- .find(_.getName.endsWith("currentClassPath_$eq")).get
- fieldSetter.invoke(platform, Some(newClassPath))
-
- // Reload all jars specified into our compiler
- sparkIMain.global.invalidateClassPathEntries(jars.map(_.getPath): _*)
- }
-
- protected def mergeJarsIntoClassPath(platform: JavaPlatform, jars: URL*): MergedClassPath[AbstractFile] = {
- // Collect our new jars and add them to the existing set of classpaths
- val allClassPaths = (
- platform.classPath
- .asInstanceOf[MergedClassPath[AbstractFile]].entries
- ++
- jars.map(url =>
- platform.classPath.context.newClassPath(
- io.AbstractFile.getFile(url.getPath))
- )
- ).distinct
-
- // Combine all of our classpaths (old and new) into one merged classpath
- new MergedClassPath(
- allClassPaths,
- platform.classPath.context
- )
- }
-
- /**
- * Binds a variable in the interpreter to a value.
- * @param variableName The name to expose the value in the interpreter
- * @param typeName The type of the variable, must be the fully qualified class name
- * @param value The value of the variable binding
- * @param modifiers Any annotation, scoping modifiers, etc on the variable
- */
- override def bind(
- variableName: String,
- typeName: String,
- value: Any,
- modifiers: List[String]
- ): Unit = {
- require(sparkIMain != null)
- sparkIMain.bind(variableName, typeName, value, modifiers)
- }
-
- /**
- * Executes body and will not print anything to the console during the execution
- * @param body The function to execute
- * @tparam T The return type of body
- * @return The return value of body
- */
- override def doQuietly[T](body: => T): T = {
- require(sparkIMain != null)
- sparkIMain.beQuietDuring[T](body)
- }
-
- /**
- * Stops the interpreter, removing any previous internal state.
- * @return A reference to the interpreter
- */
- override def stop(): Interpreter = {
- logger.info("Shutting down interpreter")
-
- // Shut down the task manager (kills current execution
- if (taskManager != null) taskManager.stop()
- taskManager = null
-
- // Erase our completer
- jLineCompleter = null
-
- // Close the entire interpreter (loses all state)
- if (sparkIMain != null) sparkIMain.close()
- sparkIMain = null
-
- this
- }
-
- /**
- * Returns the name of the variable created from the last execution.
- * @return Some String name if a variable was created, otherwise None
- */
- override def lastExecutionVariableName: Option[String] = {
- require(sparkIMain != null)
-
- // TODO: Get this API method changed back to public in Apache Spark
- val lastRequestMethod = classOf[SparkIMain].getDeclaredMethod("lastRequest")
- lastRequestMethod.setAccessible(true)
-
- val request =
- lastRequestMethod.invoke(sparkIMain).asInstanceOf[SparkIMain#Request]
-
- val mostRecentVariableName = sparkIMain.mostRecentVar
-
- request.definedNames.map(_.toString).find(_ == mostRecentVariableName)
- }
-
- /**
- * Mask the Console and System objects with our wrapper implementations
- * and dump the Console methods into the public namespace (similar to
- * the Predef approach).
- * @param in The new input stream
- * @param out The new output stream
- * @param err The new error stream
- */
- override def updatePrintStreams(
- in: InputStream,
- out: OutputStream,
- err: OutputStream
- ): Unit = {
- val inReader = new BufferedReader(new InputStreamReader(in))
- val outPrinter = new PrintStream(out)
- val errPrinter = new PrintStream(err)
-
- sparkIMain.beQuietDuring {
- sparkIMain.bind(
- "Console", classOf[WrapperConsole].getName,
- new WrapperConsole(inReader, outPrinter, errPrinter),
- List("""@transient""")
- )
- sparkIMain.bind(
- "System", classOf[WrapperSystem].getName,
- new WrapperSystem(in, out, err),
- List("""@transient""")
- )
- sparkIMain.addImports("Console._")
- }
- }
-
- /**
- * Retrieves the contents of the variable with the provided name from the
- * interpreter.
- * @param variableName The name of the variable whose contents to read
- * @return An option containing the variable contents or None if the
- * variable does not exist
- */
- override def read(variableName: String): Option[AnyRef] = {
- require(sparkIMain != null)
- val variable = sparkIMain.valueOfTerm(variableName)
- if (variable == null || variable.isEmpty) None
- else variable
- }
-
- /**
- * Starts the interpreter, initializing any internal state.
- * You must call init before running this function.
- *
- * @return A reference to the interpreter
- */
- override def start(): Interpreter = {
- require(sparkIMain == null && taskManager == null)
-
- taskManager = newTaskManager()
-
- logger.debug("Initializing task manager")
- taskManager.start()
-
- sparkIMain =
- newSparkIMain(settings, new JPrintWriter(lastResultOut, true))
-
-
- //logger.debug("Initializing interpreter")
- //sparkIMain.initializeSynchronous()
-
- logger.debug("Initializing completer")
- jLineCompleter = new SparkJLineCompletion(sparkIMain)
-
- sparkIMain.beQuietDuring {
- //logger.info("Rerouting Console and System related input and output")
- //updatePrintStreams(System.in, multiOutputStream, multiOutputStream)
-
-// ADD IMPORTS generates too many classes, client is responsible for adding import
- logger.debug("Adding org.apache.spark.SparkContext._ to imports")
- sparkIMain.addImports("org.apache.spark.SparkContext._")
- }
-
- this
- }
-
- /**
- * Attempts to perform code completion via the <TAB> command.
- * @param code The current cell to complete
- * @param pos The cursor position
- * @return The cursor position and list of possible completions
- */
- override def completion(code: String, pos: Int): (Int, List[String]) = {
- require(jLineCompleter != null)
-
- logger.debug(s"Attempting code completion for ${code}")
- val regex = """[0-9a-zA-Z._]+$""".r
- val parsedCode = (regex findAllIn code).mkString("")
-
- logger.debug(s"Attempting code completion for ${parsedCode}")
- val result = jLineCompleter.completer().complete(parsedCode, pos)
-
- (result.cursor, result.candidates)
- }
-
- protected def newSettings(args: List[String]): Settings =
- new SparkCommandLine(args).settings
-
- protected def interpretAddTask(code: String, silent: Boolean): Future[IR.Result] = {
- if (sparkIMain == null) throw new IllegalArgumentException("Cannot interpret on a stopped interpreter")
-
- taskManager.add {
- // Add a task using the given state of our streams
- StreamState.withStreams {
- if (silent) {
- sparkIMain.beSilentDuring {
- sparkIMain.interpret(code)
- }
- } else {
- sparkIMain.interpret(code)
- }
- }
- }
- }
-
- protected def interpretMapToResultAndExecuteInfo(
- future: Future[(Results.Result, String)]
- ): Future[(Results.Result, Either[ExecuteOutput, ExecuteFailure])] = {
- import scala.concurrent.ExecutionContext.Implicits.global
- future map {
- case (Results.Success, output) => (Results.Success, Left(output))
- case (Results.Incomplete, output) => (Results.Incomplete, Left(output))
- case (Results.Aborted, output) => (Results.Aborted, Right(null))
- case (Results.Error, output) =>
- val x = sparkIMain.valueOfTerm(ExecutionExceptionName)
- (
- Results.Error,
- Right(
- interpretConstructExecuteError(
- sparkIMain.valueOfTerm(ExecutionExceptionName),
- output
- )
- )
- )
- }
- }
-
- protected def interpretConstructExecuteError(
- value: Option[AnyRef],
- output: String
- ) = value match {
- // Runtime error
- case Some(e) if e != null =>
- val ex = e.asInstanceOf[Throwable]
- // Clear runtime error message
- sparkIMain.directBind(
- ExecutionExceptionName,
- classOf[Throwable].getName,
- null
- )
- ExecuteError(
- ex.getClass.getName,
- ex.getLocalizedMessage,
- ex.getStackTrace.map(_.toString).toList
- )
- // Compile time error, need to check internal reporter
- case _ =>
- if (sparkIMain.isReportingErrors)
- // TODO: This wrapper is not needed when just getting compile
- // error that we are not parsing... maybe have it be purely
- // output and have the error check this?
- ExecuteError(
- "Compile Error", output, List()
- )
- else
- ExecuteError("Unknown", "Unable to retrieve error!", List())
- }
-}
diff --git a/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/SparkIMainProducerLike.scala b/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/SparkIMainProducerLike.scala
deleted file mode 100644
index 882f632..0000000
--- a/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/SparkIMainProducerLike.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License
- */
-
-package org.apache.toree.kernel.interpreter.scala
-
-import org.apache.spark.repl.SparkIMain
-
-import scala.tools.nsc.interpreter.JPrintWriter
-import scala.tools.nsc.{Settings, interpreter}
-
-trait SparkIMainProducerLike {
- /**
- * Constructs a new instance of SparkIMain.
- *
- * @param settings The settings associated with the SparkIMain instance
- * @param out The output writer associated with the SparkIMain instance
- *
- * @return The new SparkIMain instance
- */
- def newSparkIMain(settings: Settings, out: interpreter.JPrintWriter): SparkIMain
-}
-
-trait StandardSparkIMainProducer extends SparkIMainProducerLike {
- override def newSparkIMain(
- settings: Settings, out: JPrintWriter
- ): SparkIMain = {
- val s = new SparkIMain(settings, out)
- s.initializeSynchronous()
-
- s
- }
-}
\ No newline at end of file
diff --git a/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/StandardSettingsProducer.scala b/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/StandardSettingsProducer.scala
deleted file mode 100644
index 2bd4887..0000000
--- a/scala-interpreter/src/main/scala-2.10/org/apache/toree/kernel/interpreter/scala/StandardSettingsProducer.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License
- */
-package org.apache.toree.kernel.interpreter.scala
-
-import org.apache.spark.repl.SparkCommandLine
-
-import scala.tools.nsc.Settings
-
-/**
- * Created by mniekerk on 7/15/16.
- */
-
-
-trait StandardSettingsProducer extends SettingsProducerLike {
- override def newSettings(args: List[String]): Settings =
- new SparkCommandLine(args).settings
-}
diff --git a/scala-interpreter/src/test/scala-2.10/scala/ScalaInterpreterSpec.scala b/scala-interpreter/src/test/scala-2.10/scala/ScalaInterpreterSpec.scala
deleted file mode 100644
index 616c68f..0000000
--- a/scala-interpreter/src/test/scala-2.10/scala/ScalaInterpreterSpec.scala
+++ /dev/null
@@ -1,405 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License
- */
-
-package org.apache.toree.kernel.interpreter.scala
-
-import java.io.{File, InputStream, OutputStream}
-import java.net.{URLClassLoader, URL}
-
-import org.apache.toree.interpreter.Results.Result
-import org.apache.toree.interpreter._
-import org.apache.toree.utils.TaskManager
-import org.apache.spark.SparkConf
-import org.apache.toree.kernel.interpreter.scala._
-import org.apache.spark.repl.SparkIMain
-import org.mockito.Matchers._
-import org.mockito.Mockito._
-import org.mockito.invocation.InvocationOnMock
-import org.mockito.stubbing.Answer
-import org.scalatest.mock.MockitoSugar
-import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
-
-import scala.concurrent.Future
-import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.{JPrintWriter, IR}
-import scala.tools.nsc.util.ClassPath
-
-class ScalaInterpreterSpec extends FunSpec
- with Matchers with MockitoSugar with BeforeAndAfter
-{
- private var interpreter: ScalaInterpreter = _
- private var interpreterNoPrintStreams: ScalaInterpreter = _
- private var mockSparkIMain: SparkIMain = _
- private var mockTaskManager: TaskManager = _
- private var mockSettings: Settings = _
-
- trait StubbedUpdatePrintStreams extends Interpreter {
- override def updatePrintStreams(
- in: InputStream,
- out: OutputStream,
- err: OutputStream
- ): Unit = {}
- }
-
- trait SingleLineInterpretLineRec extends StubbedStartInterpreter {
- override protected def interpretRec(lines: List[String], silent: Boolean, results: (Result, Either[ExecuteOutput, ExecuteFailure])): (Result, Either[ExecuteOutput, ExecuteFailure]) =
- interpretLine(lines.mkString("\n"))
- }
-
- trait StubbedInterpretAddTask extends StubbedStartInterpreter {
- override protected def interpretAddTask(code: String, silent: Boolean) =
- mock[Future[IR.Result]]
- }
-
- trait StubbedInterpretMapToCustomResult extends StubbedStartInterpreter {
- override protected def interpretMapToCustomResult(future: Future[IR.Result]) =
- mock[Future[Results.Result with Product with Serializable]]
- }
-
- trait StubbedInterpretMapToResultAndOutput extends StubbedStartInterpreter {
- override protected def interpretMapToResultAndOutput(future: Future[Results.Result]) =
- mock[Future[(Results.Result, String)]]
- }
-
- trait StubbedInterpretMapToResultAndExecuteInfo extends StubbedStartInterpreter {
- override protected def interpretMapToResultAndExecuteInfo(future: Future[(Results.Result, String)]) =
- mock[Future[(
- Results.Result with Product with Serializable,
- Either[ExecuteOutput, ExecuteFailure] with Product with Serializable
- )]]
- }
-
- trait StubbedInterpretConstructExecuteError extends StubbedStartInterpreter {
- override protected def interpretConstructExecuteError(value: Option[AnyRef], output: String) =
- mock[ExecuteError]
- }
-
- class StubbedStartInterpreter
- extends ScalaInterpreter
- {
- override def newSparkIMain(settings: Settings, out: JPrintWriter): SparkIMain = mockSparkIMain
- override def newTaskManager(): TaskManager = mockTaskManager
- override def newSettings(args: List[String]): Settings = mockSettings
-
- // Stubbed out (not testing this)
- override protected def updateCompilerClassPath(jars: URL*): Unit = {}
-
- override protected def reinitializeSymbols(): Unit = {}
-
- override protected def refreshDefinitions(): Unit = {}
- }
-
- before {
- mockSparkIMain = mock[SparkIMain]
-
- mockTaskManager = mock[TaskManager]
-
- val mockSettingsClasspath = mock[Settings#PathSetting]
- doNothing().when(mockSettingsClasspath).value_=(any[Settings#PathSetting#T])
-
- mockSettings = mock[Settings]
- doReturn(mockSettingsClasspath).when(mockSettings).classpath
- doNothing().when(mockSettings).embeddedDefaults(any[ClassLoader])
-
- interpreter = new StubbedStartInterpreter
-
- interpreterNoPrintStreams =
- new StubbedStartInterpreter with StubbedUpdatePrintStreams
- }
-
- after {
- mockSparkIMain = null
- mockTaskManager = null
- mockSettings = null
- interpreter = null
- }
-
- describe("ScalaInterpreter") {
- describe("#addJars") {
- it("should add each jar URL to the runtime classloader") {
- // Needed to access runtimeClassloader method
- import scala.language.reflectiveCalls
-
- // Create a new interpreter exposing the internal runtime classloader
- val itInterpreter = new StubbedStartInterpreter {
- // Expose the runtime classloader
- def runtimeClassloader = _runtimeClassloader
- }
-
- val url = new URL("file://expected")
- itInterpreter.addJars(url)
-
- itInterpreter.runtimeClassloader.getURLs should contain (url)
- }
-
- it("should add each jar URL to the interpreter classpath") {
- val url = new URL("file://expected")
- interpreter.addJars(url)
- }
- }
-
- describe("#buildClasspath") {
- it("should return classpath based on classloader hierarchy") {
- // Needed to access runtimeClassloader method
- import scala.language.reflectiveCalls
-
- // Create a new interpreter exposing the internal runtime classloader
- val itInterpreter = new StubbedStartInterpreter
-
- val parentUrls = Array(
- new URL("file:/some/dir/a.jar"),
- new URL("file:/some/dir/b.jar"),
- new URL("file:/some/dir/c.jar")
- )
-
- val theParentClassloader = new URLClassLoader(parentUrls, null)
-
- val urls = Array(
- new URL("file:/some/dir/1.jar"),
- new URL("file:/some/dir/2.jar"),
- new URL("file:/some/dir/3.jar")
- )
-
- val theClassloader = new URLClassLoader(urls, theParentClassloader)
-
- val expected = ClassPath.join((parentUrls ++ urls).map(_.toString) :_*)
-
- itInterpreter.buildClasspath(theClassloader) should be(expected)
- }
- }
-
- describe("#interrupt") {
- it("should fail a require if the interpreter is not started") {
- intercept[IllegalArgumentException] {
- interpreter.interrupt()
- }
- }
-
- it("should call restart() on the task manager") {
- interpreterNoPrintStreams.start()
-
- interpreterNoPrintStreams.interrupt()
-
- verify(mockTaskManager).restart()
- }
- }
-
- // TODO: Provide testing for the helper functions that return various
- // mapped futures -- this was too difficult for me to figure out
- // in a short amount of time
- describe("#interpret") {
- it("should fail if not started") {
- intercept[IllegalArgumentException] {
- interpreter.interpret("val x = 3")
- }
- }
-
- it("should add a new task to the task manager") {
- var taskManagerAddCalled = false
- val itInterpreter =
- new StubbedStartInterpreter
- with SingleLineInterpretLineRec
- with StubbedUpdatePrintStreams
- //with StubbedInterpretAddTask
- with StubbedInterpretMapToCustomResult
- with StubbedInterpretMapToResultAndOutput
- with StubbedInterpretMapToResultAndExecuteInfo
- with StubbedInterpretConstructExecuteError
- with TaskManagerProducerLike
- {
- // Must override this way since cannot figure out the signature
- // to verify this as a mock
- override def newTaskManager(): TaskManager = new TaskManager {
- override def add[T](taskFunction: => T): Future[T] = {
- taskManagerAddCalled = true
- mock[TaskManager].add(taskFunction)
- }
- }
- }
-
- itInterpreter.start()
-
- itInterpreter.interpret("val x = 3")
-
- taskManagerAddCalled should be (true)
- }
- }
-
- describe("#start") {
- it("should initialize the task manager") {
- interpreterNoPrintStreams.start()
-
- verify(mockTaskManager).start()
- }
-
- // TODO: Figure out how to trigger sparkIMain.beQuietDuring { ... }
- /*it("should add an import for SparkContext._") {
- interpreterNoPrintStreams.start()
-
- verify(mockSparkIMain).addImports("org.apache.spark.SparkContext._")
- }*/
- }
-
- describe("#stop") {
- describe("when interpreter already started") {
- it("should stop the task manager") {
- interpreterNoPrintStreams.start()
- interpreterNoPrintStreams.stop()
-
- verify(mockTaskManager).stop()
- }
-
- it("should stop the SparkIMain") {
- interpreterNoPrintStreams.start()
- interpreterNoPrintStreams.stop()
-
- verify(mockSparkIMain).close()
- }
- }
- }
-
- describe("#updatePrintStreams") {
- // TODO: Figure out how to trigger sparkIMain.beQuietDuring { ... }
- }
-
-// describe("#classServerUri") {
-// it("should fail a require if the interpreter is not started") {
-// intercept[IllegalArgumentException] {
-// interpreter.classServerURI
-// }
-// }
-
-// TODO: Find better way to test this
-// it("should invoke the underlying SparkIMain implementation") {
- // Using hack to access private class
-// val securityManagerClass =
-// java.lang.Class.forName("org.apache.spark.SecurityManager")
-// val httpServerClass =
-// java.lang.Class.forName("org.apache.spark.HttpServer")
-// val httpServerConstructor = httpServerClass.getDeclaredConstructor(
-// classOf[SparkConf], classOf[File], securityManagerClass, classOf[Int],
-// classOf[String])
-// val httpServer = httpServerConstructor.newInstance(
-// null, null, null, 0: java.lang.Integer, "")
-//
-// // Return the server instance (cannot mock a private class)
-// // NOTE: Can mock the class through reflection, but cannot verify
-// // a method was called on it since treated as type Any
-// //val mockHttpServer = org.mockito.Mockito.mock(httpServerClass)
-// doAnswer(new Answer[String] {
-// override def answer(invocation: InvocationOnMock): String = {
-// val exceptionClass =
-// java.lang.Class.forName("org.apache.spark.ServerStateException")
-// val exception = exceptionClass
-// .getConstructor(classOf[String])
-// .newInstance("")
-// .asInstanceOf[Exception]
-// throw exception
-// }
-// }
-// ).when(mockSparkIMain)
-
-// interpreterNoPrintStreams.start()
-
- // Not going to dig so deeply that we actually start a web server for
- // this to work... just throwing this specific exception proves that
- // we have called the uri method of the server
-// try {
-// interpreterNoPrintStreams.classServerURI
-// fail()
-// } catch {
-// // Have to catch this way because... of course... the exception is
-// // also private
-// case ex: Throwable =>
-// ex.getClass.getName should be ("org.apache.spark.ServerStateException")
-// }
-// }
-// }
-
- describe("#read") {
- it("should fail a require if the interpreter is not started") {
- intercept[IllegalArgumentException] {
- interpreter.read("someVariable")
- }
- }
-
- it("should execute the underlying valueOfTerm method") {
- interpreter.start()
- interpreter.read("someVariable")
-
- verify(mockSparkIMain).valueOfTerm(anyString())
- }
- }
-
- describe("#doQuietly") {
- it("should fail a require if the interpreter is not started") {
- intercept[IllegalArgumentException] {
- interpreter.doQuietly {}
- }
- }
-
- // TODO: Figure out how to verify sparkIMain.beQuietDuring { ... }
- /*it("should invoke the underlying SparkIMain implementation") {
- interpreterNoPrintStreams.start()
- interpreterNoPrintStreams.doQuietly {}
-
- verify(mockSparkIMain).beQuietDuring(any[IR.Result])
- }*/
- }
-
- describe("#bind") {
- it("should fail a require if the interpreter is not started") {
- intercept[IllegalArgumentException] {
- interpreter.bind("", "", null, null)
- }
- }
-
- it("should invoke the underlying SparkIMain implementation") {
- interpreterNoPrintStreams.start()
- interpreterNoPrintStreams.bind("", "", null, null)
-
- verify(mockSparkIMain).bind(
- anyString(), anyString(), any[Any], any[List[String]])
- }
- }
-
- describe("#truncateResult") {
- it("should truncate result of res result") {
- // Results that match
- interpreter.truncateResult("res7: Int = 38") should be("38")
- interpreter.truncateResult("res7: Int = 38",true) should be("Int = 38")
- interpreter.truncateResult("res4: String = \nVector(1\n, 2\n)") should be ("Vector(1\n, 2\n)")
- interpreter.truncateResult("res4: String = \nVector(1\n, 2\n)",true) should be ("String = Vector(1\n, 2\n)")
- interpreter.truncateResult("res123") should be("")
- interpreter.truncateResult("res1") should be("")
- // Results that don't match
- interpreter.truncateResult("resabc: Int = 38") should be("")
- }
-
- it("should truncate res results that have tuple values") {
- interpreter.truncateResult("res0: (String, Int) = (hello,1)") should
- be("(hello,1)")
- }
-
- it("should truncate res results that have parameterized types") {
- interpreter.truncateResult(
- "res0: Class[_ <: (String, Int)] = class scala.Tuple2"
- ) should be("class scala.Tuple2")
- }
- }
- }
-}