blob: 9916fef9bb867a363c450be63c7eabda3c1c2819 [file] [log] [blame]
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Shell script for starting the carbondata sql CLI
# Enter posix mode for bash
set -o posix
export CLASS="org.apache.spark.sql.hive.cli.CarbonSQLCLIDriver"
# Figure out where Spark is installed
if [ -z "$SPARK_HOME" ]
then
echo "\$SPARK_HOME is not set"
fi
export FWDIR=$SPARK_HOME
export CARBON_SOURCE="$(cd "`dirname "$0"`"/..; pwd)"
ASSEMBLY_DIR="$CARBON_SOURCE/assembly/target/scala-2.11"
GREP_OPTIONS=
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^apache-carbondata.*\.jar$" | wc -l)"
if [ "$num_jars" -eq "0" -a -z "$ASSEMBLY_DIR" ]; then
echo "Failed to find Carbondata assembly in $ASSEMBLY_DIR." 1>&2
echo "You need to build Carbondata before running this program." 1>&2
exit 1
fi
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^apache-carbondata.*\.jar$" || true)"
if [ "$num_jars" -gt "1" ]; then
echo "Found multiple Carbondata assembly jars in $ASSEMBLY_DIR:" 1>&2
echo "$ASSEMBLY_JARS" 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi
ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"
export JAR="$ASSEMBLY_JAR"
export CARBON_HOME=$CARBON_SOURCE
function usage {
if [ -n "$1" ]; then
echo "$1"
fi
echo "Usage: ./bin/carbon-spark-sql [options] [cli option]"
pattern="usage"
pattern+="\|Spark assembly has been built"
pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
pattern+="\|Spark Command: "
pattern+="\|--help"
pattern+="\|======="
"$FWDIR"/bin/spark-sql --help 2>&1 | grep -v Usage 1>&2
exit "$2"
}
export -f usage
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage "" 0
fi
#split options and cli_options, final submit example is spark-submit options --class xxx xxx.jar cli_options
options=""
while true ; do
case "$1" in
-d|--define|-f|-i|--hiveconf|--hivevar|--database|-e|-S|--silent) cmd="$1"; break;;
-v|--verbose) options="$options $1" ; shift ;;
-*) options="$options $1 $2" ; shift 2 ;;
*) break ;;
esac
done
echo "$FWDIR"/bin/spark-submit $options --class "$CLASS" "$JAR" "$@"
exec "$FWDIR"/bin/spark-submit $options --class "$CLASS" "$JAR" "$@"