blob: 0124696b4062414d783f8fb53c1698ba4c4deafd [file] [log] [blame]
#!/bin/bash
#
# Run a shell command on all slave hosts.
#
# Environment Variables
#
# HADOOP_SLAVES File naming remote hosts.
# Default is ${HADOOP_CONF_DIR}/slaves.
# HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
# HADOOP_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
# HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
##
usage="Usage: slaves.sh command..."
# if no args specified, show usage
if [ $# -le 0 ]; then
echo $usage
exit 1
fi
# resolve links - $0 may be a softlink
this="$0"
while [ -h "$this" ]; do
ls=`ls -ld "$this"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
this="$link"
else
this=`dirname "$this"`/"$link"
fi
done
# the root of the Hadoop installation
HADOOP_HOME=`dirname "$this"`/..
# Allow alternate conf dir location.
HADOOP_CONF_DIR="${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}"
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
source "${HADOOP_CONF_DIR}/hadoop-env.sh"
fi
if [ "$HADOOP_SLAVES" = "" ]; then
export HADOOP_SLAVES="${HADOOP_CONF_DIR}/slaves"
fi
for slave in `cat "$HADOOP_SLAVES"`; do
ssh $HADOOP_SSH_OPTS $slave $"${@// /\\ }" \
2>&1 | sed "s/^/$slave: /" &
if [ "$HADOOP_SLAVE_SLEEP" != "" ]; then
sleep $HADOOP_SLAVE_SLEEP
fi
done
wait