Merge pull request #181 from ctubbsii/minimize-output-128

Minimize output 128
diff --git a/README.md b/README.md
index e89b9c3..d290902 100644
--- a/README.md
+++ b/README.md
@@ -37,7 +37,7 @@
 git clone https://github.com/apache/fluo-uno.git
 cd fluo-uno
 ./bin/uno fetch accumulo            # Fetches binary tarballs of Accumulo and its dependencies
-./bin/uno setup accumulo            # Sets up Accumulo and its dependencies (Hadoop & Zookeeper)
+./bin/uno setup accumulo            # Sets up Accumulo and its dependencies (Hadoop & ZooKeeper)
 eval "$(./bin/uno env)"             # Bash-specific command that sets up current shell
 ```
 
@@ -74,7 +74,7 @@
 
 All commands are run using the `uno` script in `bin/`. Uno has a command that helps you configure
 your shell so that you can run commands from any directory and easily set common environment
-variables in your shell for Uno, Hadoop, Zookeeper, Fluo, and Spark. Run the following command to
+variables in your shell for Uno, Hadoop, ZooKeeper, Fluo, and Spark. Run the following command to
 print this shell configuration. You can also add `--paths` or `--vars` to the command below to limit
 output to PATH or environment variable configuration:
 
@@ -106,7 +106,7 @@
 The `setup` command will install the downloaded tarballs to the directory set by `$INSTALL` in your
 `uno.conf` and run you local development cluster. The command can be run in several different ways:
 
-1. Sets up Apache Accumulo and its dependencies of Hadoop, Zookeeper. This starts all processes and
+1. Sets up Apache Accumulo and its dependencies of Hadoop, ZooKeeper. This starts all processes and
    will wipe Accumulo/Hadoop if this command was run previously.
 
         uno setup accumulo
@@ -130,7 +130,7 @@
 
         uno setup spark
 
-6. Sets up all components (Fluo, Accumulo, Hadoop, Zookeeper, Spark, metrics service).
+6. Sets up all components (Fluo, Accumulo, Hadoop, ZooKeeper, Spark, metrics service).
 
         uno setup all
 
diff --git a/bin/impl/fetch.sh b/bin/impl/fetch.sh
index 980543a..7163095 100755
--- a/bin/impl/fetch.sh
+++ b/bin/impl/fetch.sh
@@ -159,8 +159,8 @@
   echo "Usage: uno fetch <component>"
   echo -e "\nPossible components:\n"
   echo "    all        Fetches all binary tarballs of the following components"
-  echo "    accumulo   Downloads Accumulo, Hadoop & Zookeeper. Builds Accumulo if repo set in uno.conf"
-  echo "    fluo       Downloads Fluo, Accumulo, Hadoop & Zookeeper. Builds Fluo or Accumulo if repo set in uno.conf"
+  echo "    accumulo   Downloads Accumulo, Hadoop & ZooKeeper. Builds Accumulo if repo set in uno.conf"
+  echo "    fluo       Downloads Fluo, Accumulo, Hadoop & ZooKeeper. Builds Fluo or Accumulo if repo set in uno.conf"
   echo "    metrics    Downloads InfluxDB and Grafana"
   echo "    spark      Downloads Spark"
   echo "Options:"
diff --git a/bin/impl/setup-accumulo.sh b/bin/impl/setup-accumulo.sh
index c26a80c..ec179ad 100755
--- a/bin/impl/setup-accumulo.sh
+++ b/bin/impl/setup-accumulo.sh
@@ -21,8 +21,8 @@
 fi
 
 if [[ $1 != "--no-deps" ]]; then
-  "$UNO_HOME"/bin/impl/setup-hadoop.sh
-  "$UNO_HOME"/bin/impl/setup-zookeeper.sh
+  run_setup_script Hadoop
+  run_setup_script ZooKeeper
 fi
 
 pkill -f accumulo.start
@@ -30,7 +30,7 @@
 # stop if any command fails
 set -e
 
-echo "Setting up Apache Accumulo at $ACCUMULO_HOME"
+echo >&0 "Setting up Apache Accumulo at $ACCUMULO_HOME"
 
 rm -rf "$INSTALL"/accumulo-*
 rm -f "$ACCUMULO_LOG_DIR"/*
@@ -78,7 +78,7 @@
     echo "accumulo.sink.graphite.server_port=2004"
     echo "accumulo.sink.graphite.metrics_prefix=accumulo"
   } >> "$conf"/"$metrics_props"
-  "$UNO_HOME"/bin/impl/setup-metrics.sh
+  run_setup_script Metrics
 fi
 
 if [[ "$ACCUMULO_USE_NATIVE_MAP" == "true" ]]; then
@@ -98,4 +98,3 @@
   "$ACCUMULO_HOME"/bin/accumulo-cluster start
 fi
 
-echo "Apache Accumulo setup complete"
diff --git a/bin/impl/setup-fluo-yarn.sh b/bin/impl/setup-fluo-yarn.sh
index ae896d3..ddcbafb 100755
--- a/bin/impl/setup-fluo-yarn.sh
+++ b/bin/impl/setup-fluo-yarn.sh
@@ -24,11 +24,11 @@
 fi
 
 if [[ $1 != "--no-deps" ]]; then
-  "$UNO_HOME"/bin/impl/setup-fluo.sh
+  run_setup_script Fluo
 fi
 
 if [[ -f "$DOWNLOADS/$FLUO_YARN_TARBALL" ]]; then
-  echo "Setting up Apache Fluo YARN launcher at $FLUO_YARN_HOME"
+  echo >&0 "Setting up Apache Fluo YARN launcher at $FLUO_YARN_HOME"
   # Don't stop if pkills fail
   set +e
   pkill -f "fluo\.yarn"
@@ -51,10 +51,8 @@
 
   "$FLUO_YARN_HOME"/lib/fetch.sh
 
-  echo "Apache Fluo YARN launcher setup complete"
-
   stty sane
 else
-  echo "WARNING: Apache Fluo YARN launcher tarball '$FLUO_YARN_TARBALL' was not found in $DOWNLOADS."
-  echo "Apache Fluo YARN launcher will not be set up!"
+  echo >&0 "WARNING: Apache Fluo YARN launcher tarball '$FLUO_YARN_TARBALL' was not found in $DOWNLOADS."
+  echo >&0 "Apache Fluo YARN launcher will not be set up!"
 fi
diff --git a/bin/impl/setup-fluo.sh b/bin/impl/setup-fluo.sh
index b1ee442..424a74d 100755
--- a/bin/impl/setup-fluo.sh
+++ b/bin/impl/setup-fluo.sh
@@ -24,11 +24,11 @@
 fi
 
 if [[ $1 != "--no-deps" ]]; then
-  "$UNO_HOME"/bin/impl/setup-accumulo.sh
+  run_setup_script Accumulo
 fi
 
 if [[ -f "$DOWNLOADS/$FLUO_TARBALL" ]]; then
-  echo "Setting up Apache Fluo at $FLUO_HOME"
+  echo >&0 "Setting up Apache Fluo at $FLUO_HOME"
   # Don't stop if pkills fail
   set +e
   pkill -f fluo.yarn
@@ -70,10 +70,8 @@
 
   "$FLUO_HOME"/lib/fetch.sh extra
 
-  echo "Apache Fluo setup complete"
-
   stty sane
 else
-  echo "WARNING: Apache Fluo tarball '$FLUO_TARBALL' was not found in $DOWNLOADS."
-  echo "Apache Fluo will not be set up!"
+  echo >&0 "WARNING: Apache Fluo tarball '$FLUO_TARBALL' was not found in $DOWNLOADS."
+  echo >&0 "Apache Fluo will not be set up!"
 fi
diff --git a/bin/impl/setup-hadoop.sh b/bin/impl/setup-hadoop.sh
index d2bf0e0..d8a81ff 100755
--- a/bin/impl/setup-hadoop.sh
+++ b/bin/impl/setup-hadoop.sh
@@ -24,7 +24,7 @@
 # stop if any command fails
 set -e
 
-echo "Setting up Apache Hadoop at $HADOOP_PREFIX"
+echo >&0 "Setting up Apache Hadoop at $HADOOP_PREFIX"
 
 rm -rf "$INSTALL"/hadoop-*
 rm -f "$HADOOP_LOG_DIR"/*
@@ -51,4 +51,3 @@
 "$HADOOP_PREFIX"/sbin/start-dfs.sh
 "$HADOOP_PREFIX"/sbin/start-yarn.sh
 
-echo "Apache Hadoop setup complete"
diff --git a/bin/impl/setup-metrics.sh b/bin/impl/setup-metrics.sh
index f4b3c45..e045ca1 100755
--- a/bin/impl/setup-metrics.sh
+++ b/bin/impl/setup-metrics.sh
@@ -17,11 +17,11 @@
 source "$UNO_HOME"/bin/impl/util.sh
 
 if [[ "$OSTYPE" == "darwin"* ]]; then
-  echo "The metrics services (InfluxDB and Grafana) are not supported on Mac OS X at this time."
+  echo >&0 "The metrics services (InfluxDB and Grafana) are not supported on Mac OS X at this time."
   exit 1
 fi
 
-echo "Killing InfluxDB & Grafana (if running)"
+echo >&0 "Killing InfluxDB & Grafana (if running)"
 pkill -f influxdb
 pkill -f grafana-server
 
@@ -35,36 +35,36 @@
 INFLUXDB_TARBALL=influxdb-"$INFLUXDB_VERSION".tar.gz
 GRAFANA_TARBALL=grafana-"$GRAFANA_VERSION".tar.gz
 if [[ ! -f "$DOWNLOADS/build/$INFLUXDB_TARBALL" ]]; then
-  echo "InfluxDB tarball $INFLUXDB_TARBALL does not exists in downloads/build/"
+  echo >&0 "InfluxDB tarball $INFLUXDB_TARBALL does not exists in downloads/build/"
   exit 1
 fi
 if [[ ! -f "$DOWNLOADS/build/$GRAFANA_TARBALL" ]]; then
-  echo "Grafana tarball $GRAFANA_TARBALL does not exists in downloads/build"
+  echo >&0 "Grafana tarball $GRAFANA_TARBALL does not exists in downloads/build"
   exit 1
 fi
 
 if [[ ! -d "$FLUO_HOME" ]]; then
-  echo "Fluo must be installed before setting up metrics"
+  echo >&0 "Fluo must be installed before setting up metrics"
   exit 1
 fi
 
 # stop if any command fails
 set -e
 
-echo "Removing previous versions of InfluxDB & Grafana"
+echo >&0 "Removing previous versions of InfluxDB & Grafana"
 rm -rf "$INSTALL"/influxdb-*
 rm -rf "$INSTALL"/grafana-*
 
-echo "Remove previous log and data dirs"
+echo >&0 "Remove previous log and data dirs"
 rm -f "$LOGS_DIR"/metrics/*
 rm -rf "$DATA_DIR"/influxdb
 mkdir -p "$LOGS_DIR"/metrics
 
-echo "Setting up metrics (influxdb + grafana)..."
+echo >&0 "Setting up metrics (influxdb + grafana)..."
 tar xzf "$DOWNLOADS/build/$INFLUXDB_TARBALL" -C "$INSTALL"
 "$INFLUXDB_HOME"/bin/influxd config -config "$UNO_HOME"/conf/influxdb/influxdb.conf > "$INFLUXDB_HOME"/influxdb.conf
 if [[ ! -f "$INFLUXDB_HOME"/influxdb.conf ]]; then
-  echo "Failed to create $INFLUXDB_HOME/influxdb.conf"
+  echo >&0 "Failed to create $INFLUXDB_HOME/influxdb.conf"
   exit 1
 fi
 $SED "s#DATA_DIR#$DATA_DIR#g" "$INFLUXDB_HOME"/influxdb.conf
@@ -79,7 +79,7 @@
 cp "$UNO_HOME"/conf/grafana/accumulo-dashboard.json "$GRAFANA_HOME"/dashboards/
 "$GRAFANA_HOME"/bin/grafana-server -homepath="$GRAFANA_HOME" 2> /dev/null &
 
-echo "Configuring Fluo to send metrics to InfluxDB"
+echo >&0 "Configuring Fluo to send metrics to InfluxDB"
 if [[ $FLUO_VERSION =~ ^1\.[0-1].*$ ]]; then
   FLUO_PROPS=$FLUO_HOME/conf/fluo.properties
 else
@@ -94,14 +94,14 @@
   echo "fluo.metrics.reporter.graphite.frequency=30"
 } >> "$FLUO_PROPS"
 
-echo "Configuring InfluxDB..."
+echo >&0 "Configuring InfluxDB..."
 sleep 10
 "$INFLUXDB_HOME"/bin/influx -import -path "$FLUO_HOME"/contrib/influxdb/fluo_metrics_setup.txt
 
 # allow commands to fail
 set +e
 
-echo "Configuring Grafana..."
+echo >&0 "Configuring Grafana..."
 
 sleep 5
 
@@ -112,11 +112,11 @@
       --data-binary "$1"
     retcode=$?
     if [[ $retcode != 0 ]]; then
-      echo "Failed to add Grafana data source. Retrying in 5 sec.."
+      echo >&0 "Failed to add Grafana data source. Retrying in 5 sec.."
       sleep 5
     fi
   done
-  echo ""
+  echo >&0 ""
 }
 
 accumulo_data='{"name":"accumulo_metrics","type":"influxdb","url":"http://'
diff --git a/bin/impl/setup-spark.sh b/bin/impl/setup-spark.sh
index c7688c4..3f015f9 100755
--- a/bin/impl/setup-spark.sh
+++ b/bin/impl/setup-spark.sh
@@ -19,11 +19,11 @@
 verify_exist_hash "$SPARK_TARBALL" "$SPARK_HASH"
 
 if [[ ! -d "$HADOOP_PREFIX" ]]; then
-  echo "Apache Hadoop needs to be setup before Apache Spark can be setup."
+  echo >&0 "Apache Hadoop needs to be setup before Apache Spark can be setup."
   exit 1
 fi
 
-echo "Setting up Apache Spark at $SPARK_HOME"
+echo >&0 "Setting up Apache Spark at $SPARK_HOME"
 
 pkill -f org.apache.spark.deploy.history.HistoryServer
 
@@ -45,4 +45,3 @@
 export SPARK_LOG_DIR=$LOGS_DIR/spark
 "$SPARK_HOME"/sbin/start-history-server.sh
 
-echo "Apache Spark setup complete"
diff --git a/bin/impl/setup-zookeeper.sh b/bin/impl/setup-zookeeper.sh
index 919e0ff..9a6ef37 100755
--- a/bin/impl/setup-zookeeper.sh
+++ b/bin/impl/setup-zookeeper.sh
@@ -23,7 +23,7 @@
 # stop if any command fails
 set -e
 
-echo "Setting up Apache Zookeeper at $ZOOKEEPER_HOME"
+echo >&0 "Setting up Apache ZooKeeper at $ZOOKEEPER_HOME"
 rm -rf "$INSTALL"/zookeeper-*
 rm -f "$ZOO_LOG_DIR"/*
 mkdir -p "$ZOO_LOG_DIR"
@@ -36,4 +36,3 @@
 rm -rf "$DATA_DIR"/zookeeper
 "$ZOOKEEPER_HOME"/bin/zkServer.sh start
 
-echo "Apache Zookeeper setup complete"
diff --git a/bin/impl/start.sh b/bin/impl/start.sh
index 4e437c2..8fe8b00 100755
--- a/bin/impl/start.sh
+++ b/bin/impl/start.sh
@@ -26,7 +26,7 @@
       tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
       if [[ -z "$tmp" ]]; then
         "$ZOOKEEPER_HOME"/bin/zkServer.sh start
-      else echo "Zookeeper   already running at: $tmp"
+      else echo "ZooKeeper   already running at: $tmp"
       fi
 
       tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
@@ -73,7 +73,7 @@
     tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
     if [[ -z "$tmp" ]]; then
       "$ZOOKEEPER_HOME"/bin/zkServer.sh start
-    else echo "Zookeeper   already running at: $tmp"
+    else echo "ZooKeeper   already running at: $tmp"
     fi
     ;;
   metrics)
@@ -100,9 +100,9 @@
   *)
     echo "Usage: uno start <component> [--no-deps]"
     echo -e "\nPossible components:\n"
-    echo "    accumulo   Start Apache Accumulo plus dependencies: Hadoop, Zookeeper"
+    echo "    accumulo   Start Apache Accumulo plus dependencies: Hadoop, ZooKeeper"
     echo "    hadoop     Start Apache Hadoop"
-    echo "    zookeeper  Start Apache Zookeeper"
+    echo "    zookeeper  Start Apache ZooKeeper"
     echo "    metrics    Start InfluxDB and Grafana"
     echo "Options:"
     echo "    --no-deps  Dependencies will start unless this option is specified. Only works for accumulo component."
diff --git a/bin/impl/stop.sh b/bin/impl/stop.sh
index 030d083..617e70c 100755
--- a/bin/impl/stop.sh
+++ b/bin/impl/stop.sh
@@ -77,9 +77,9 @@
   *)
     echo "Usage: uno stop <component> [--no-deps]"
     echo -e "\nPossible components:\n"
-    echo "    accumulo   Stop Apache Accumulo plus dependencies: Hadoop, Zookeeper"
+    echo "    accumulo   Stop Apache Accumulo plus dependencies: Hadoop, ZooKeeper"
     echo "    hadoop     Stop Apache Hadoop"
-    echo "    zookeeper  Stop Apache Zookeeper"
+    echo "    zookeeper  Stop Apache ZooKeeper"
     echo "Options:"
     echo "    --no-deps  Dependencies will stop unless this option is specified. Only works for accumulo component."
     exit 1
diff --git a/bin/impl/util.sh b/bin/impl/util.sh
index 14bbe7c..ac1a4fa 100755
--- a/bin/impl/util.sh
+++ b/bin/impl/util.sh
@@ -19,7 +19,7 @@
   expected_hash=$(echo "${2// /}" | tr '[:upper:]' '[:lower:]')
 
   if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
-    echo "The tarball $tarball does not exist in downloads/"
+    echo >&0 "The tarball $tarball does not exist in downloads/"
     exit 1
   fi
 
@@ -30,14 +30,14 @@
     64) HASH_CMD='shasum -a 256' ;;
     128) HASH_CMD='shasum -a 512' ;;
     *)
-      echo "Expected checksum ($expected_hash) of $tarball is not an MD5, SHA1, SHA256, or SHA512 sum"
+      echo >&0 "Expected checksum ($expected_hash) of $tarball is not an MD5, SHA1, SHA256, or SHA512 sum"
       exit 1
       ;;
   esac
   actual_hash=$($HASH_CMD "$DOWNLOADS/$tarball" | awk '{print $1}')
 
   if [[ "$actual_hash" != "$expected_hash" ]]; then
-    echo "The actual checksum ($actual_hash) of $tarball does not match the expected checksum ($expected_hash)"
+    echo >&0 "The actual checksum ($actual_hash) of $tarball does not match the expected checksum ($expected_hash)"
     exit 1
   fi
 }
@@ -46,8 +46,16 @@
 function check_dirs() {
   for arg in "$@"; do
     if [[ ! -d "${!arg}" ]]; then
-      echo "$arg=${!arg} is not a valid directory. Please make sure it exists"
+      echo >&0 "$arg=${!arg} is not a valid directory. Please make sure it exists"
       exit 1
     fi
   done
 }
+
+function run_setup_script() {
+  local SCRIP; SCRIP=$(echo "$1" | tr '[:upper:] ' '[:lower:]-')
+  local L_DIR; L_DIR="$LOGS_DIR/setup"
+  mkdir -p "$L_DIR"
+  shift
+  "$UNO_HOME/bin/impl/setup-$SCRIP.sh" "$@" 1>"$L_DIR/$SCRIP.stdout" 2>"$L_DIR/$SCRIP.stderr"
+}
diff --git a/bin/uno b/bin/uno
index 9500573..94c74ab 100755
--- a/bin/uno
+++ b/bin/uno
@@ -40,41 +40,49 @@
   fi
 	;;
 setup)
+  [[ -n $LOGS_DIR ]] && rm -f "$LOGS_DIR"/setup/*.std{out,err}
+  echo "Beginning setup (detailed logs in $LOGS_DIR/setup)..."
   case "$2" in
     all)
-      "$bin"/impl/setup-fluo.sh
-      "$bin"/impl/setup-spark.sh
-      "$bin"/impl/setup-metrics.sh
+      run_setup_script Fluo
+      run_setup_script Spark
+      run_setup_script Metrics
       ;;
     accumulo)
-      "$bin"/impl/setup-accumulo.sh "$3"
+      run_setup_script Accumulo "$3"
       ;;
     fluo)
-      "$bin"/impl/setup-fluo.sh "$3"
+      run_setup_script Fluo "$3"
       ;;
     fluo-yarn)
-      "$bin"/impl/setup-fluo-yarn.sh "$3"
+      run_setup_script "Fluo Yarn" "$3"
       ;;
     spark)
-      "$bin"/impl/setup-spark.sh
+      run_setup_script Spark
       ;;
     metrics)
-      "$bin"/impl/setup-metrics.sh
+      run_setup_script Metrics
       ;;
     *)
       echo "Usage: uno setup <component> [--no-deps]"
       echo -e "\nPossible components:\n"
       echo "    all        Sets up all of the following components"
-      echo "    accumulo   Sets up Apache Accumulo and its dependencies (Hadoop & Zookeeper)"
+      echo "    accumulo   Sets up Apache Accumulo and its dependencies (Hadoop & ZooKeeper)"
       echo "    spark      Sets up Apache Spark"
-      echo "    fluo       Sets up Apache Fluo and its dependencies (Accumulo, Hadoop, & Zookeeper)"
-      echo "    fluo-yarn  Sets up Apache Fluo YARN and its dependencies (Fluo, Accumulo, Hadoop, & Zookeeper)"
+      echo "    fluo       Sets up Apache Fluo and its dependencies (Accumulo, Hadoop, & ZooKeeper)"
+      echo "    fluo-yarn  Sets up Apache Fluo YARN and its dependencies (Fluo, Accumulo, Hadoop, & ZooKeeper)"
       echo -e "    metrics    Sets up metrics service (InfluxDB + Grafana)\n"
       echo "Options:"
       echo "    --no-deps  Dependencies will be setup unless this option is specified. Only works for fluo & accumulo components."
       exit 1
       ;;
   esac
+  if [[ "$?" == 0 ]]; then
+    echo "Setup complete."
+  else
+    echo "Setup failed!"
+    false
+  fi
   ;;
 kill)
   "$bin"/impl/kill.sh "${@:2}"
@@ -103,8 +111,8 @@
   echo "                         Run 'uno fetch' for a list of possible components."
   echo "  setup <component>      Sets up component and its dependencies (clearing any existing data)"
   echo "                         Run 'uno setup' for list of components."
-  echo "  start <component>      Start Zookeeper, Hadoop, Accumulo, if not running."
-  echo "  stop  <component>      Stop Accumulo, Hadoop, Zookeeper, if running."
+  echo "  start <component>      Start ZooKeeper, Hadoop, Accumulo, if not running."
+  echo "  stop  <component>      Stop Accumulo, Hadoop, ZooKeeper, if running."
   echo "  kill                   Kills all software"
   echo "  ashell                 Runs the Accumulo shell"
   echo "  env                    Prints out shell configuration for PATH and common environment variables."