blob: e23041ac884894bf7144fbbfadd0098f9b4d34bd [file] [log] [blame]
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
if [ -z $PIO_HOME ] ; then
PIO_FILE=$(readlink -f $0 2>/dev/null)
if [ $? = 0 ] ; then
export PIO_HOME="$(cd $(dirname $PIO_FILE)/..; pwd)"
else
CURRENT_DIR=`pwd`
TARGET_FILE="$0"
cd "$(dirname "$TARGET_FILE")"
TARGET_FILE=$(basename "$TARGET_FILE")
while [ -L "$TARGET_FILE" ]
do
TARGET_FILE=$(readlink "$TARGET_FILE")
cd "$(dirname "$TARGET_FILE")"
TARGET_FILE=$(basename "$TARGET_FILE")
done
export PIO_HOME="$(cd $(dirname "$TARGET_FILE")/..; pwd -P)"
cd "$CURRENT_DIR"
fi
fi
if [ -z $PIO_CONF_DIR ] ; then
export PIO_CONF_DIR="${PIO_HOME}/conf"
if [ ! -d $PIO_CONF_DIR ] ; then
export PIO_CONF_DIR="/etc/predictionio"
if [ ! -d $PIO_CONF_DIR ] ; then
echo "PIO_CONF_DIR is not found."
exit 1
fi
fi
fi
. ${PIO_HOME}/bin/load-pio-env.sh
if [[ "$1" == "--with-spark" ]]
then
echo "Starting the PIO shell with the Apache Spark Shell."
# Get paths of assembly jars to pass to spark-shell
. ${PIO_HOME}/bin/compute-classpath.sh
shift
${SPARK_HOME}/bin/spark-shell --jars ${ASSEMBLY_JARS} $@
elif [[ "$1" == "--with-pyspark" ]]
then
echo "Starting the PIO shell with the Apache Spark Shell."
# Get paths of assembly jars to pass to pyspark
. ${PIO_HOME}/bin/compute-classpath.sh
shift
export PYTHONPATH=${PIO_HOME}/python
${SPARK_HOME}/bin/pyspark --jars ${ASSEMBLY_JARS} $@
else
echo -e "\033[0;33mStarting the PIO shell without Apache Spark.\033[0m"
echo -e "\033[0;33mIf you need the Apache Spark library, run 'pio-shell --with-spark [spark-submit arguments...]'.\033[0m"
cd ${PIO_HOME}
./sbt/sbt console
fi