| #!/usr/bin/env bash |
| |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| bin=`which $0` |
| bin=`dirname ${bin}` |
| bin=`cd "$bin"; pwd` |
| |
| DEFAULT_LIBEXEC_DIR="$bin"/../libexec |
| |
| HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} |
| if [ -e ${HADOOP_LIBEXEC_DIR}/mapred-config.sh ]; then |
| . ${HADOOP_LIBEXEC_DIR}/mapred-config.sh |
| else |
| . "$bin/mapred-config.sh" |
| fi |
| |
| function print_usage(){ |
| echo "Usage: mapred [--config confdir] [--loglevel loglevel] COMMAND" |
| echo " where COMMAND is one of:" |
| echo " pipes run a Pipes job" |
| echo " job manipulate MapReduce jobs" |
| echo " queue get information regarding JobQueues" |
| echo " classpath prints the class path needed for running" |
| echo " mapreduce subcommands" |
| echo " historyserver run job history servers as a standalone daemon" |
| echo " distcp <srcurl> <desturl> copy file or directories recursively" |
| echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive" |
| echo " hsadmin job history server admin interface" |
| echo "" |
| echo "Most commands print help when invoked w/o parameters." |
| } |
| |
| if [ $# = 0 ]; then |
| print_usage |
| exit |
| fi |
| |
| COMMAND=$1 |
| shift |
| |
| case $COMMAND in |
| # usage flags |
| --help|-help|-h) |
| print_usage |
| exit |
| ;; |
| esac |
| |
| if [ "$COMMAND" = "job" ] ; then |
| CLASS=org.apache.hadoop.mapred.JobClient |
| HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| elif [ "$COMMAND" = "queue" ] ; then |
| CLASS=org.apache.hadoop.mapred.JobQueueClient |
| HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| elif [ "$COMMAND" = "pipes" ] ; then |
| CLASS=org.apache.hadoop.mapred.pipes.Submitter |
| HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| elif [ "$COMMAND" = "sampler" ] ; then |
| CLASS=org.apache.hadoop.mapred.lib.InputSampler |
| HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| elif [ "$COMMAND" = "classpath" ] ; then |
| echo -n |
| elif [ "$COMMAND" = "historyserver" ] ; then |
| CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer |
| HADOOP_OPTS="$HADOOP_OPTS -Dmapred.jobsummary.logger=${HADOOP_JHS_LOGGER:-INFO,console} $HADOOP_JOB_HISTORYSERVER_OPTS" |
| if [ "$HADOOP_JOB_HISTORYSERVER_HEAPSIZE" != "" ]; then |
| JAVA_HEAP_MAX="-Xmx""$HADOOP_JOB_HISTORYSERVER_HEAPSIZE""m" |
| fi |
| elif [ "$COMMAND" = "mradmin" ] \ |
| || [ "$COMMAND" = "jobtracker" ] \ |
| || [ "$COMMAND" = "tasktracker" ] \ |
| || [ "$COMMAND" = "groups" ] ; then |
| echo "Sorry, the $COMMAND command is no longer supported." |
| echo "You may find similar functionality with the \"yarn\" shell command." |
| print_usage |
| exit 1 |
| elif [ "$COMMAND" = "distcp" ] ; then |
| CLASS=org.apache.hadoop.tools.DistCp |
| CLASSPATH=${CLASSPATH}:${TOOL_PATH} |
| HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| elif [ "$COMMAND" = "archive" ] ; then |
| CLASS=org.apache.hadoop.tools.HadoopArchives |
| CLASSPATH=${CLASSPATH}:${TOOL_PATH} |
| HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| elif [ "$COMMAND" = "hsadmin" ] ; then |
| CLASS=org.apache.hadoop.mapreduce.v2.hs.client.HSAdmin |
| HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" |
| else |
| echo $COMMAND - invalid command |
| print_usage |
| exit 1 |
| fi |
| |
| # for developers, add mapred classes to CLASSPATH |
| if [ -d "$HADOOP_MAPRED_HOME/build/classes" ]; then |
| CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/classes |
| fi |
| if [ -d "$HADOOP_MAPRED_HOME/build/webapps" ]; then |
| CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build |
| fi |
| if [ -d "$HADOOP_MAPRED_HOME/build/test/classes" ]; then |
| CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/test/classes |
| fi |
| if [ -d "$HADOOP_MAPRED_HOME/build/tools" ]; then |
| CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/tools |
| fi |
| |
| # for releases, add core mapred jar & webapps to CLASSPATH |
| if [ -d "$HADOOP_PREFIX/${MAPRED_DIR}/webapps" ]; then |
| CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/${MAPRED_DIR} |
| fi |
| for f in $HADOOP_MAPRED_HOME/${MAPRED_DIR}/*.jar; do |
| CLASSPATH=${CLASSPATH}:$f; |
| done |
| |
| # Need YARN jars also |
| for f in $HADOOP_YARN_HOME/${YARN_DIR}/*.jar; do |
| CLASSPATH=${CLASSPATH}:$f; |
| done |
| |
| # add libs to CLASSPATH |
| for f in $HADOOP_MAPRED_HOME/${MAPRED_LIB_JARS_DIR}/*.jar; do |
| CLASSPATH=${CLASSPATH}:$f; |
| done |
| |
| # add modules to CLASSPATH |
| for f in $HADOOP_MAPRED_HOME/modules/*.jar; do |
| CLASSPATH=${CLASSPATH}:$f; |
| done |
| |
| if [ "$COMMAND" = "classpath" ] ; then |
| if [ "$#" -gt 0 ]; then |
| CLASS=org.apache.hadoop.util.Classpath |
| else |
| if $cygwin; then |
| CLASSPATH=$(cygpath -p -w "$CLASSPATH" 2>/dev/null) |
| fi |
| echo $CLASSPATH |
| exit 0 |
| fi |
| fi |
| |
| # cygwin path translation |
| if $cygwin; then |
| CLASSPATH=$(cygpath -p -w "$CLASSPATH" 2>/dev/null) |
| HADOOP_LOG_DIR=$(cygpath -w "$HADOOP_LOG_DIR" 2>/dev/null) |
| HADOOP_PREFIX=$(cygpath -w "$HADOOP_PREFIX" 2>/dev/null) |
| HADOOP_CONF_DIR=$(cygpath -w "$HADOOP_CONF_DIR" 2>/dev/null) |
| HADOOP_COMMON_HOME=$(cygpath -w "$HADOOP_COMMON_HOME" 2>/dev/null) |
| HADOOP_HDFS_HOME=$(cygpath -w "$HADOOP_HDFS_HOME" 2>/dev/null) |
| HADOOP_YARN_HOME=$(cygpath -w "$HADOOP_YARN_HOME" 2>/dev/null) |
| HADOOP_MAPRED_HOME=$(cygpath -w "$HADOOP_MAPRED_HOME" 2>/dev/null) |
| fi |
| |
| HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}" |
| |
| export CLASSPATH |
| exec "$JAVA" -Dproc_$COMMAND $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@" |