| #!/usr/bin/env bash |
| |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| # The name of the script being executed. |
| HADOOP_SHELL_EXECNAME="hadoop" |
| MYNAME="${BASH_SOURCE-$0}" |
| |
| ## @description build up the hadoop command's usage text. |
| ## @audience public |
| ## @stability stable |
| ## @replaceable no |
| function hadoop_usage |
| { |
| hadoop_add_option "buildpaths" "attempt to add class files from build tree" |
| hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in worker mode" |
| hadoop_add_option "loglevel level" "set the log4j level for this command" |
| hadoop_add_option "hosts filename" "list of hosts to use in worker mode" |
| hadoop_add_option "workers" "turn on worker mode" |
| |
| hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability" |
| hadoop_add_subcommand "classpath" client "prints the class path needed to get the Hadoop jar and the required libraries" |
| hadoop_add_subcommand "conftest" client "validate configuration XML files" |
| hadoop_add_subcommand "credential" client "interact with credential providers" |
| hadoop_add_subcommand "daemonlog" admin "get/set the log level for each daemon" |
| hadoop_add_subcommand "dtutil" client "operations related to delegation tokens" |
| hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables" |
| hadoop_add_subcommand "fs" client "run a generic filesystem user client" |
| hadoop_add_subcommand "jar <jar>" client "run a jar file. NOTE: please use \"yarn jar\" to launch YARN applications, not this command." |
| hadoop_add_subcommand "jnipath" client "prints the java.library.path" |
| hadoop_add_subcommand "kerbname" client "show auth_to_local principal conversion" |
| hadoop_add_subcommand "key" client "manage keys via the KeyProvider" |
| hadoop_add_subcommand "registrydns" daemon "run the registry DNS server" |
| hadoop_add_subcommand "trace" client "view and modify Hadoop tracing settings" |
| hadoop_add_subcommand "version" client "print the version" |
| hadoop_add_subcommand "kdiag" client "Diagnose Kerberos Problems" |
| hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true |
| } |
| |
| ## @description Default command handler for hadoop command |
| ## @audience public |
| ## @stability stable |
| ## @replaceable no |
| ## @param CLI arguments |
| function hadoopcmd_case |
| { |
| subcmd=$1 |
| shift |
| |
| case ${subcmd} in |
| balancer|datanode|dfs|dfsadmin|dfsgroups| \ |
| namenode|secondarynamenode|fsck|fetchdt|oiv| \ |
| portmap|nfs3) |
| hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated." |
| subcmd=${subcmd/dfsgroups/groups} |
| hadoop_error "WARNING: Attempting to execute replacement \"hdfs ${subcmd}\" instead." |
| hadoop_error "" |
| #try to locate hdfs and if present, delegate to it. |
| if [[ -f "${HADOOP_HDFS_HOME}/bin/hdfs" ]]; then |
| exec "${HADOOP_HDFS_HOME}/bin/hdfs" \ |
| --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then |
| exec "${HADOOP_HOME}/bin/hdfs" \ |
| --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| else |
| hadoop_error "HADOOP_HDFS_HOME not found!" |
| exit 1 |
| fi |
| ;; |
| |
| #mapred commands for backwards compatibility |
| pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker) |
| hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated." |
| subcmd=${subcmd/mrgroups/groups} |
| hadoop_error "WARNING: Attempting to execute replacement \"mapred ${subcmd}\" instead." |
| hadoop_error "" |
| #try to locate mapred and if present, delegate to it. |
| if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then |
| exec "${HADOOP_MAPRED_HOME}/bin/mapred" \ |
| --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then |
| exec "${HADOOP_HOME}/bin/mapred" \ |
| --config "${HADOOP_CONF_DIR}" "${subcmd}" "$@" |
| else |
| hadoop_error "HADOOP_MAPRED_HOME not found!" |
| exit 1 |
| fi |
| ;; |
| checknative) |
| HADOOP_CLASSNAME=org.apache.hadoop.util.NativeLibraryChecker |
| ;; |
| classpath) |
| hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@" |
| ;; |
| conftest) |
| HADOOP_CLASSNAME=org.apache.hadoop.util.ConfTest |
| ;; |
| credential) |
| HADOOP_CLASSNAME=org.apache.hadoop.security.alias.CredentialShell |
| ;; |
| daemonlog) |
| HADOOP_CLASSNAME=org.apache.hadoop.log.LogLevel |
| ;; |
| dtutil) |
| HADOOP_CLASSNAME=org.apache.hadoop.security.token.DtUtilShell |
| ;; |
| envvars) |
| echo "JAVA_HOME='${JAVA_HOME}'" |
| echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'" |
| echo "HADOOP_COMMON_DIR='${HADOOP_COMMON_DIR}'" |
| echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'" |
| echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'" |
| echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'" |
| echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'" |
| echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'" |
| echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'" |
| if [[ -n "${QATESTMODE}" ]]; then |
| echo "MYNAME=${MYNAME}" |
| echo "HADOOP_SHELL_EXECNAME=${HADOOP_SHELL_EXECNAME}" |
| fi |
| exit 0 |
| ;; |
| fs) |
| HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell |
| ;; |
| jar) |
| if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then |
| hadoop_error "WARNING: Use \"yarn jar\" to launch YARN applications." |
| fi |
| if [[ -z $1 || $1 = "--help" ]]; then |
| echo "Usage: hadoop jar <jar> [mainClass] args..." |
| exit 0 |
| fi |
| HADOOP_CLASSNAME=org.apache.hadoop.util.RunJar |
| ;; |
| jnipath) |
| hadoop_finalize |
| echo "${JAVA_LIBRARY_PATH}" |
| exit 0 |
| ;; |
| kerbname) |
| HADOOP_CLASSNAME=org.apache.hadoop.security.HadoopKerberosName |
| ;; |
| kdiag) |
| HADOOP_CLASSNAME=org.apache.hadoop.security.KDiag |
| ;; |
| key) |
| HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.KeyShell |
| ;; |
| registrydns) |
| HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true" |
| HADOOP_SECURE_CLASSNAME='org.apache.hadoop.registry.server.dns.PrivilegedRegistryDNSStarter' |
| HADOOP_CLASSNAME='org.apache.hadoop.registry.server.dns.RegistryDNSServer' |
| ;; |
| trace) |
| HADOOP_CLASSNAME=org.apache.hadoop.tracing.TraceAdmin |
| ;; |
| version) |
| HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo |
| ;; |
| *) |
| HADOOP_CLASSNAME="${subcmd}" |
| if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then |
| hadoop_exit_with_usage 1 |
| fi |
| ;; |
| esac |
| } |
| |
| # This script runs the hadoop core commands. |
| |
| # let's locate libexec... |
| if [[ -n "${HADOOP_HOME}" ]]; then |
| HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" |
| else |
| bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) |
| HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" |
| fi |
| |
| HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}" |
| HADOOP_NEW_CONFIG=true |
| if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then |
| # shellcheck source=./hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh |
| . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" |
| else |
| echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1 |
| exit 1 |
| fi |
| |
| # now that we have support code, let's abs MYNAME so we can use it later |
| MYNAME=$(hadoop_abs "${MYNAME}") |
| |
| if [[ $# = 0 ]]; then |
| hadoop_exit_with_usage 1 |
| fi |
| |
| HADOOP_SUBCMD=$1 |
| shift |
| |
| if hadoop_need_reexec hadoop "${HADOOP_SUBCMD}"; then |
| hadoop_uservar_su hadoop "${HADOOP_SUBCMD}" \ |
| "${MYNAME}" \ |
| "--reexec" \ |
| "${HADOOP_USER_PARAMS[@]}" |
| exit $? |
| fi |
| |
| hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" |
| |
| HADOOP_SUBCMD_ARGS=("$@") |
| |
| if declare -f hadoop_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then |
| hadoop_debug "Calling dynamically: hadoop_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}" |
| "hadoop_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}" |
| else |
| hadoopcmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}" |
| fi |
| |
| hadoop_add_client_opts |
| |
| if [[ ${HADOOP_WORKER_MODE} = true ]]; then |
| hadoop_common_worker_mode_execute "${HADOOP_COMMON_HOME}/bin/hadoop" "${HADOOP_USER_PARAMS[@]}" |
| exit $? |
| fi |
| |
| hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}" |
| |
| # everything is in globals at this point, so call the generic handler |
| hadoop_generic_java_subcmd_handler |