| #!/usr/bin/env bash |
| |
| # |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| # |
| |
| SCALA_VERSION=2.9.3 |
| |
| # Figure out where the Scala framework is installed |
| FWDIR="$(cd `dirname $0`; pwd)" |
| |
| # Export this as SPARK_HOME |
| export SPARK_HOME="$FWDIR" |
| |
| # Load environment variables from conf/spark-env.sh, if it exists |
| if [ -e $FWDIR/conf/spark-env.sh ] ; then |
| . $FWDIR/conf/spark-env.sh |
| fi |
| |
| if [ -z "$1" ]; then |
| echo "Usage: run-example <example-class> [<args>]" >&2 |
| exit 1 |
| fi |
| |
| # Figure out the JAR file that our examples were packaged into. This includes a bit of a hack |
| # to avoid the -sources and -doc packages that are built by publish-local. |
| EXAMPLES_DIR="$FWDIR"/examples |
| SPARK_EXAMPLES_JAR="" |
| if [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then |
| # Use the JAR from the SBT build |
| export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar` |
| fi |
| if [ -e "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar ]; then |
| # Use the JAR from the Maven build |
| # TODO: this also needs to become an assembly! |
| export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar` |
| fi |
| if [[ -z $SPARK_EXAMPLES_JAR ]]; then |
| echo "Failed to find Spark examples assembly in $FWDIR/examples/target" >&2 |
| echo "You need to build Spark with sbt/sbt assembly before running this program" >&2 |
| exit 1 |
| fi |
| |
| # Since the examples JAR ideally shouldn't include spark-core (that dependency should be |
| # "provided"), also add our standard Spark classpath, built using compute-classpath.sh. |
| CLASSPATH=`$FWDIR/bin/compute-classpath.sh` |
| CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH" |
| |
| # Find java binary |
| if [ -n "${JAVA_HOME}" ]; then |
| RUNNER="${JAVA_HOME}/bin/java" |
| else |
| if [ `command -v java` ]; then |
| RUNNER="java" |
| else |
| echo "JAVA_HOME is not set" >&2 |
| exit 1 |
| fi |
| fi |
| |
| if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then |
| echo -n "Spark Command: " |
| echo "$RUNNER" -cp "$CLASSPATH" "$@" |
| echo "========================================" |
| echo |
| fi |
| |
| exec "$RUNNER" -cp "$CLASSPATH" "$@" |