blob: 0eeae3c804304bfea7703f5e648599d3f59ef23e [file] [log] [blame]
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#######
# Advanced Users Only
######
# You can do things like replace parts of the shell underbelly.
# Most of this code is in hadoop-functions.sh.
#
#
# For example, if you want to add compression to the rotation
# menthod for the .out files that daemons generate, you can do
# that by redefining the hadoop_rotate_log function by
# uncommenting this code block:
#function hadoop_rotate_log
#{
# local log=$1;
# local num=${2:-5};
#
# if [[ -f "${log}" ]]; then
# while [[ ${num} -gt 1 ]]; do
# #shellcheck disable=SC2086
# let prev=${num}-1
# if [[ -f "${log}.${prev}.gz" ]]; then
# mv "${log}.${prev}.gz" "${log}.${num}.gz"
# fi
# num=${prev}
# done
# mv "${log}" "${log}.${num}"
# gzip -9 "${log}.${num}"
# fi
#}
#
#
#
# Example: finding java
#
# By default, Hadoop assumes that $JAVA_HOME is always defined
# outside of its configuration. Eons ago, Apple standardized
# on a helper program called java_home to find it for you.
#
#function hadoop_java_setup
#{
#
# if [[ -z "${JAVA_HOME}" ]]; then
# case $HADOOP_OS_TYPE in
# Darwin*)
# JAVA_HOME=$(/usr/libexec/java_home)
# ;;
# esac
# fi
#
# # Bail if we did not detect it
# if [[ -z "${JAVA_HOME}" ]]; then
# echo "ERROR: JAVA_HOME is not set and could not be found." 1>&2
# exit 1
# fi
#
# if [[ ! -d "${JAVA_HOME}" ]]; then
# echo "ERROR: JAVA_HOME (${JAVA_HOME}) does not exist." 1>&2
# exit 1
# fi
#
# JAVA="${JAVA_HOME}/bin/java"
#
# if [[ ! -x ${JAVA} ]]; then
# echo "ERROR: ${JAVA} is not executable." 1>&2
# exit 1
# fi
#}
#
# Example: efficient command execution for the workers
#
# To improve performance, you can use xargs -P
# instead of the for loop, if supported.
#
#function hadoop_connect_to_hosts_without_pdsh
#{
# local tmpslvnames
#
# # quoting here gets tricky. it's easier to push it into a function
# # so that we don't have to deal with it. However...
# # xargs can't use a function so instead we'll export it out
# # and force it into a subshell
# # moral of the story: just use pdsh.
# export -f hadoop_actual_ssh
# export HADOOP_SSH_OPTS
#
# # xargs is used with option -I to replace the placeholder in arguments
# # list with each hostname read from stdin/pipe. But it consider one
# # line as one argument while reading from stdin/pipe. So place each
# # hostname in different lines while passing via pipe.
# tmpslvnames=$(echo "${HADOOP_WORKER_NAMES}" | tr ' ' '\n' )
# echo "${tmpslvnames}" | \
# xargs -n 1 -P"${HADOOP_SSH_PARALLEL}" \
# -I {} bash -c -- "hadoop_actual_ssh {} ${params}"
# wait
#}