| #!/usr/bin/env bash |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| # This file contains environment variables required to run Spark. Copy it as |
| # spark-env.sh and edit that to configure Spark for your site. |
| # |
| # The following variables can be set in this file: |
| # - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node |
| # - MESOS_NATIVE_LIBRARY, to point to your libmesos.so if you use Mesos |
| # - SPARK_JAVA_OPTS, to set node-specific JVM options for Spark. Note that |
| # we recommend setting app-wide options in the application's driver program. |
| # Examples of node-specific options : -Dspark.local.dir, GC options |
| # Examples of app-wide options : -Dspark.serializer |
| # |
| # If using the standalone deploy mode, you can also set variables for it here: |
| # - SPARK_MASTER_IP, to bind the master to a different IP address or hostname |
| # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports |
| # - SPARK_WORKER_CORES, to set the number of cores to use on this machine |
| # - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g) |
| # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT |
| # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node |
| |
| |
| ### Let's run everything with JVM runtime, instead of Scala |
| export SPARK_LAUNCH_WITH_SCALA=0 |
| export SPARK_LIBRARY_PATH=${SPARK_HOME}/lib |
| export SCALA_LIBRARY_PATH=${SPARK_HOME}/lib |
| export SPARK_MASTER_WEBUI_PORT=<%= master_ui_port %> |
| export SPARK_MASTER_PORT=<%= master_port %> |
| |
| ### Comment above 2 lines and uncomment the following if |
| ### you want to run with scala version, that is included with the package |
| #export SCALA_HOME=${SCALA_HOME:-/usr/lib/spark/scala} |
| #export PATH=$PATH:$SCALA_HOME/bin |
| |
| ### change the following to specify a real cluster's Master host |
| export STANDALONE_SPARK_MASTER_HOST=<%= master_host %> |
| |
| |