blob: 4dc721c07da9613d22bbb4d50ff7126f9d6c0dae [file] [log] [blame]
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Resolve our absolute path
# resolve links - $0 may be a softlink
this="${BASH_SOURCE-$0}"
while [ -h "$this" ]; do
ls=`ls -ld "$this"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
this="$link"
else
this=`dirname "$this"`/"$link"
fi
done
# convert relative path to absolute path
bin=`dirname "$this"`
script=`basename "$this"`
bin=`unset CDPATH; cd "$bin"; pwd`
this="$bin/$script"
if [ -e "$bin/../libexec/hcat-config.sh" ]; then
. "$bin"/../libexec/hcat-config.sh
else
. "$bin"/hcat-config.sh
fi
# filter debug command line parameter
debug=false
dump_classpath=false
for f in $@; do
if [[ $f = "-secretDebugCmd" ]]; then
debug=true
else
remaining="${remaining} $f"
fi
if [[ $f = "-classpath" ]]; then
dump_classpath=true
fi
done
# check for hive in the path
HIVE_IN_PATH=`which hive 2>/dev/null`
if [ -f ${HIVE_IN_PATH} ]; then
#dir of hive scrip
HIVE_DIR=`dirname "$HIVE_IN_PATH"`
#one level up for base dir
HIVE_DIR=`dirname "$HIVE_DIR"`
fi
# HIVE_HOME env variable overrides hive in the path
HIVE_HOME=${HIVE_HOME:-$HIVE_DIR}
if [ "$HIVE_HOME" == "" ]; then
echo "Cannot find hive installation: \$HIVE_HOME must be set or hive must be in the path";
exit 4;
fi
if [ "$HIVE_HOME" == '/usr' ] || [ "$HIVE_HOME" == '/usr/' ]; then
#this would be a hive rpm install, bigtop rpm has HIVE_HOME dir
#structure in /usr/lib/hive. Use that if the dir structure looks good
if [ -d '/usr/lib/hive/conf' ] && [ -d '/usr/lib/hive/lib' ]; then
HIVE_HOME='/usr/lib/hive/';
fi
fi
HIVE_LIB_DIR=${HIVE_HOME}/lib
if [ ! -d "$HIVE_LIB_DIR" ]; then
echo "Cannot find lib dir within HIVE_HOME : $HIVE_LIB_DIR";
exit 4;
fi
HIVE_CONF_DIR=${HIVE_HOME}/conf
if [ ! -d "$HIVE_CONF_DIR" ]; then
echo "Cannot find conf dir within HIVE_HOME : $HIVE_CONF_DIR";
exit 4;
fi
# Find our hcatalog jar
shopt -s extglob
if (( `ls -1 $HCAT_PREFIX/share/hcatalog/hcatalog-[0-9]*.jar | wc -l` > 1 )) ; then
echo "Error: found more than one hcatalog jar in $HCAT_PREFIX/share/hcatalog"
exit 1
fi
HCAT_JAR=`ls $HCAT_PREFIX/share/hcatalog/hcatalog-[0-9]*.jar`
# Find the storage-handler jars.
for jar in ${HCAT_PREFIX}/share/hcatalog/lib/*.jar ; do
HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$jar
done
# Add all of the other jars to our classpath
for jar in ${HIVE_LIB_DIR}/*.jar ; do
HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$jar
done
# Put external jars, hcat jar, and config file in the classpath
HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${HCAT_CLASSPATH}:${HCAT_JAR}:${HIVE_CONF_DIR}
if [ -n "$HBASE_CONF_DIR" ] && [ -d $HBASE_CONF_DIR ]; then
HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${HBASE_CONF_DIR}
fi
export HADOOP_CLASSPATH=$HADOOP_CLASSPATH
export HADOOP_OPTS=$HADOOP_OPTS
# run it
if [ "$debug" == "true" ]; then
echo "Would run:"
echo "exec $HADOOP_PREFIX/bin/hadoop jar $HCAT_JAR org.apache.hcatalog.cli.HCatCli $remaining"
echo "with HADOOP_CLASSPATH set to ($HADOOP_CLASSPATH)"
echo "and HADOOP_OPTS set to ($HADOOP_OPTS)"
elif [ "$dump_classpath" == "true" ]; then
echo $HADOOP_CLASSPATH
else
exec $HADOOP_PREFIX/bin/hadoop jar $HCAT_JAR org.apache.hcatalog.cli.HCatCli "$@"
fi