Updates for Hadoop 3

* Using HADOOP_HOME in place of HADOOP_PREFIX
* Using HADOOP_LOG_DIR instead of YARN_LOG_DIR
diff --git a/bin/impl/load-env.sh b/bin/impl/load-env.sh
index b828f9e..df507eb 100755
--- a/bin/impl/load-env.sh
+++ b/bin/impl/load-env.sh
@@ -34,7 +34,7 @@
   exit 1
 fi
 
-HP=$HADOOP_PREFIX
+HH=$HADOOP_HOME
 HC=$HADOOP_CONF_DIR
 ZH=$ZOOKEEPER_HOME
 SH=$SPARK_HOME
@@ -53,8 +53,8 @@
 
 # Confirm that hadoop, accumulo, and zookeeper env variables are not set
 if [[ ! "version env" =~ $1 ]]; then
-  if [[ -n "$HP" && "$HP" != "$HADOOP_PREFIX" ]]; then
-    echo "HADOOP_PREFIX in your shell env '$HP' needs to match your uno uno.conf '$HADOOP_PREFIX'"
+  if [[ -n "$HH" && "$HH" != "$HADOOP_HOME" ]]; then
+    echo "HADOOP_HOME in your shell env '$HH' needs to match your uno uno.conf '$HADOOP_HOME'"
     exit 1
   fi
   if [[ -n "$HC" && "$HC" != "$HADOOP_CONF_DIR" ]]; then
@@ -115,7 +115,7 @@
 : "${ZOOKEEPER_TARBALL:?"ZOOKEEPER_TARBALL is not set in uno.conf"}"
 : "${FLUO_HOME:?"FLUO_HOME is not set in uno.conf"}"
 : "${ZOOKEEPER_HOME:?"ZOOKEEPER_HOME is not set in uno.conf"}"
-: "${HADOOP_PREFIX:?"HADOOP_PREFIX is not set in uno.conf"}"
+: "${HADOOP_HOME:?"HADOOP_HOME is not set in uno.conf"}"
 : "${ACCUMULO_HOME:?"ACCUMULO_HOME is not set in uno.conf"}"
 : "${ACCUMULO_INSTANCE:?"ACCUMULO_INSTANCE is not set in uno.conf"}"
 : "${ACCUMULO_USER:?"ACCUMULO_USER is not set in uno.conf"}"
@@ -123,7 +123,6 @@
 : "${LOGS_DIR:?"LOGS_DIR is not set in uno.conf"}"
 : "${ACCUMULO_LOG_DIR:?"ACCUMULO_LOG_DIR is not set in uno.conf"}"
 : "${HADOOP_LOG_DIR:?"HADOOP_LOG_DIR is not set in uno.conf"}"
-: "${YARN_LOG_DIR:?"YARN_LOG_DIR is not set in uno.conf"}"
 : "${ZOO_LOG_DIR:?"ZOO_LOG_DIR is not set in uno.conf"}"
 
 hash shasum 2>/dev/null || { echo >&2 "shasum must be installed & on PATH. Aborting."; exit 1; }
diff --git a/bin/impl/print-env.sh b/bin/impl/print-env.sh
index 4c34576..092994f 100755
--- a/bin/impl/print-env.sh
+++ b/bin/impl/print-env.sh
@@ -15,7 +15,11 @@
 # limitations under the License.
 
 if [[ -z "$1" || "$1" == "--vars" ]]; then
-  echo "export HADOOP_PREFIX=\"$HADOOP_PREFIX\""
+  if [[ $HADOOP_VERSION =~ ^1\..*$ ]]; then
+    echo "export HADOOP_PREFIX=\"$HADOOP_HOME\""
+  else
+    echo "export HADOOP_HOME=\"$HADOOP_HOME\""
+  fi
   echo "export HADOOP_CONF_DIR=\"$HADOOP_CONF_DIR\""
   echo "export ZOOKEEPER_HOME=\"$ZOOKEEPER_HOME\""
   echo "export SPARK_HOME=\"$SPARK_HOME\""
@@ -25,7 +29,7 @@
 fi
 
 if [[ -z "$1" || "$1" == "--paths" ]]; then
-  echo -n "export PATH=\"\$PATH:$UNO_HOME/bin:$HADOOP_PREFIX/bin:$ZOOKEEPER_HOME/bin:$ACCUMULO_HOME/bin"
+  echo -n "export PATH=\"\$PATH:$UNO_HOME/bin:$HADOOP_HOME/bin:$ZOOKEEPER_HOME/bin:$ACCUMULO_HOME/bin"
   if [[ -d "$SPARK_HOME" ]]; then
     echo -n ":$SPARK_HOME/bin"
   fi
diff --git a/bin/impl/setup-accumulo.sh b/bin/impl/setup-accumulo.sh
index b05b2a6..a924eb2 100755
--- a/bin/impl/setup-accumulo.sh
+++ b/bin/impl/setup-accumulo.sh
@@ -57,7 +57,7 @@
 fi
 $SED "s#localhost#$UNO_HOST#" "$conf/masters" "$conf/monitor" "$conf/gc"
 $SED "s#export ZOOKEEPER_HOME=[^ ]*#export ZOOKEEPER_HOME=$ZOOKEEPER_HOME#" "$conf"/accumulo-env.sh
-$SED "s#export HADOOP_PREFIX=[^ ]*#export HADOOP_PREFIX=$HADOOP_PREFIX#" "$conf"/accumulo-env.sh
+$SED "s#export HADOOP_PREFIX=[^ ]*#export HADOOP_PREFIX=$HADOOP_HOME#" "$conf"/accumulo-env.sh
 $SED "s#export ACCUMULO_LOG_DIR=[^ ]*#export ACCUMULO_LOG_DIR=$ACCUMULO_LOG_DIR#" "$conf"/accumulo-env.sh
 if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
   $SED "s#ACCUMULO_TSERVER_OPTS=.*#ACCUMULO_TSERVER_OPTS=\"-Xmx$ACCUMULO_TSERV_MEM -Xms$ACCUMULO_TSERV_MEM\"#" "$conf"/accumulo-env.sh
@@ -91,7 +91,7 @@
   fi
 fi
 
-"$HADOOP_PREFIX"/bin/hadoop fs -rm -r /accumulo 2> /dev/null || true
+"$HADOOP_HOME"/bin/hadoop fs -rm -r /accumulo 2> /dev/null || true
 "$ACCUMULO_HOME"/bin/accumulo init --clear-instance-name --instance-name "$ACCUMULO_INSTANCE" --password "$ACCUMULO_PASSWORD"
 
 if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
diff --git a/bin/impl/setup-fluo-yarn.sh b/bin/impl/setup-fluo-yarn.sh
index 8bf0d31..bd98b0a 100755
--- a/bin/impl/setup-fluo-yarn.sh
+++ b/bin/impl/setup-fluo-yarn.sh
@@ -46,7 +46,7 @@
   $SED "s/.*fluo.yarn.worker.max.memory.mb=.*/fluo.yarn.worker.max.memory.mb=$FLUO_WORKER_MEM_MB/g" "$yarn_props"
   $SED "s/.*fluo.yarn.worker.instances=.*/fluo.yarn.worker.instances=$FLUO_WORKER_INSTANCES/g" "$yarn_props"
   $SED "s#FLUO_HOME=.*#FLUO_HOME=$FLUO_HOME#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
-  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_PREFIX#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
+  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_HOME#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
   $SED "s#ZOOKEEPER_HOME=.*#ZOOKEEPER_HOME=$ZOOKEEPER_HOME#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
 
   "$FLUO_YARN_HOME"/lib/fetch.sh
diff --git a/bin/impl/setup-fluo.sh b/bin/impl/setup-fluo.sh
index bd487e8..19abe54 100755
--- a/bin/impl/setup-fluo.sh
+++ b/bin/impl/setup-fluo.sh
@@ -64,7 +64,7 @@
   $SED "s/.*fluo.yarn.worker.max.memory.mb=.*/fluo.yarn.worker.max.memory.mb=$FLUO_WORKER_MEM_MB/g" "$fluo_props"
   $SED "s/.*fluo.yarn.worker.instances=.*/fluo.yarn.worker.instances=$FLUO_WORKER_INSTANCES/g" "$fluo_props"
 
-  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_PREFIX#g" "$FLUO_HOME"/conf/fluo-env.sh
+  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_HOME#g" "$FLUO_HOME"/conf/fluo-env.sh
   $SED "s#ACCUMULO_HOME=.*o#ACCUMULO_HOME=$ACCUMULO_HOME#g" "$FLUO_HOME"/conf/fluo-env.sh
   $SED "s#ZOOKEEPER_HOME=.*#ZOOKEEPER_HOME=$ZOOKEEPER_HOME#g" "$FLUO_HOME"/conf/fluo-env.sh
 
diff --git a/bin/impl/setup-hadoop.sh b/bin/impl/setup-hadoop.sh
index 483223e..2f8f6f8 100755
--- a/bin/impl/setup-hadoop.sh
+++ b/bin/impl/setup-hadoop.sh
@@ -24,30 +24,32 @@
 # stop if any command fails
 set -e
 
-print_to_console "Setting up Apache Hadoop at $HADOOP_PREFIX"
+print_to_console "Setting up Apache Hadoop at $HADOOP_HOME"
+print_to_console "Apache Hadoop logs are at $HADOOP_LOG_DIR"
 
 rm -rf "$INSTALL"/hadoop-*
 rm -f "$HADOOP_LOG_DIR"/*
-rm -rf "$YARN_LOG_DIR"/application_*
-rm -f "$YARN_LOG_DIR"/*
+rm -rf "$HADOOP_LOG_DIR"/application_*
 rm -rf "$DATA_DIR"/hadoop
 mkdir -p "$HADOOP_LOG_DIR"
-mkdir -p "$YARN_LOG_DIR"
 
 tar xzf "$DOWNLOADS/$HADOOP_TARBALL" -C "$INSTALL"
 
-hadoop_conf="$HADOOP_PREFIX"/etc/hadoop
+hadoop_conf="$HADOOP_HOME"/etc/hadoop
 cp "$UNO_HOME"/conf/hadoop/* "$hadoop_conf/"
 $SED "s#UNO_HOST#$UNO_HOST#g" "$hadoop_conf/core-site.xml" "$hadoop_conf/hdfs-site.xml" "$hadoop_conf/yarn-site.xml"
 $SED "s#DATA_DIR#$DATA_DIR#g" "$hadoop_conf/hdfs-site.xml" "$hadoop_conf/yarn-site.xml" "$hadoop_conf/mapred-site.xml"
-$SED "s#YARN_LOGS#$YARN_LOG_DIR#g" "$hadoop_conf/yarn-site.xml"
+$SED "s#HADOOP_LOG_DIR#$HADOOP_LOG_DIR#g" "$hadoop_conf/yarn-site.xml"
 $SED "s#YARN_NM_MEM_MB#$YARN_NM_MEM_MB#g" "$hadoop_conf/yarn-site.xml"
 $SED "s#YARN_NM_CPU_VCORES#$YARN_NM_CPU_VCORES#g" "$hadoop_conf/yarn-site.xml"
-$SED "s#\#export HADOOP_LOG_DIR=[^ ]*#export HADOOP_LOG_DIR=$HADOOP_LOG_DIR#g" "$hadoop_conf/hadoop-env.sh"
-$SED "s#\${JAVA_HOME}#${JAVA_HOME}#g" "$hadoop_conf/hadoop-env.sh"
-$SED "s#YARN_LOG_DIR=[^ ]*#YARN_LOG_DIR=$YARN_LOG_DIR#g" "$hadoop_conf/yarn-env.sh"
 
-"$HADOOP_PREFIX"/bin/hdfs namenode -format
-"$HADOOP_PREFIX"/sbin/start-dfs.sh
-"$HADOOP_PREFIX"/sbin/start-yarn.sh
+echo "export JAVA_HOME=$JAVA_HOME" >> "$hadoop_conf/hadoop-env.sh"
+echo "export HADOOP_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/hadoop-env.sh"
+if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
+  echo "export YARN_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/yarn-env.sh"
+fi
+
+"$HADOOP_HOME"/bin/hdfs namenode -format
+"$HADOOP_HOME"/sbin/start-dfs.sh
+"$HADOOP_HOME"/sbin/start-yarn.sh
 
diff --git a/bin/impl/setup-spark.sh b/bin/impl/setup-spark.sh
index 46f8b5c..efea054 100755
--- a/bin/impl/setup-spark.sh
+++ b/bin/impl/setup-spark.sh
@@ -18,7 +18,7 @@
 
 verify_exist_hash "$SPARK_TARBALL" "$SPARK_HASH"
 
-if [[ ! -d "$HADOOP_PREFIX" ]]; then
+if [[ ! -d "$HADOOP_HOME" ]]; then
   print_to_console "Apache Hadoop needs to be setup before Apache Spark can be setup."
   exit 1
 fi
diff --git a/bin/impl/start.sh b/bin/impl/start.sh
index 8fe8b00..6117e13 100755
--- a/bin/impl/start.sh
+++ b/bin/impl/start.sh
@@ -21,7 +21,7 @@
     check_dirs ACCUMULO_HOME
 
     if [[ "$2" != "--no-deps" ]]; then
-      check_dirs ZOOKEEPER_HOME HADOOP_PREFIX
+      check_dirs ZOOKEEPER_HOME HADOOP_HOME
 
       tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
       if [[ -z "$tmp" ]]; then
@@ -31,13 +31,13 @@
 
       tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
       if [[ -z "$tmp" ]]; then
-        "$HADOOP_PREFIX"/sbin/start-dfs.sh
+        "$HADOOP_HOME"/sbin/start-dfs.sh
       else echo "Hadoop DFS  already running at: $tmp"  
       fi
       
       tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
       if [[ -z "$tmp" ]]; then
-        "$HADOOP_PREFIX"/sbin/start-yarn.sh
+        "$HADOOP_HOME"/sbin/start-yarn.sh
       else echo "Hadoop Yarn already running at: $tmp"  
       fi
     fi
@@ -53,17 +53,17 @@
     fi
     ;;
   hadoop)
-    check_dirs HADOOP_PREFIX
+    check_dirs HADOOP_HOME
     
     tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
     if [[ -z "$tmp" ]]; then
-      "$HADOOP_PREFIX"/sbin/start-dfs.sh
+      "$HADOOP_HOME"/sbin/start-dfs.sh
     else echo "Hadoop DFS  already running at: $tmp"  
     fi
 
     tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
     if [[ -z "$tmp" ]]; then
-      "$HADOOP_PREFIX"/sbin/start-yarn.sh
+      "$HADOOP_HOME"/sbin/start-yarn.sh
     else echo "Hadoop Yarn already running at: $tmp"  
     fi
     ;;
diff --git a/bin/impl/stop.sh b/bin/impl/stop.sh
index 617e70c..ac4af5c 100755
--- a/bin/impl/stop.sh
+++ b/bin/impl/stop.sh
@@ -29,14 +29,14 @@
     fi
 
     if [[ "$2" != "--no-deps" ]]; then
-      check_dirs ZOOKEEPER_HOME HADOOP_PREFIX
+      check_dirs ZOOKEEPER_HOME HADOOP_HOME
 
       if [[ ! -z "$(pgrep -f hadoop\\.yarn)" ]]; then
-        "$HADOOP_PREFIX"/sbin/stop-yarn.sh
+        "$HADOOP_HOME"/sbin/stop-yarn.sh
       fi
 
       if [[ ! -z "$(pgrep -f hadoop\\.hdfs)" ]]; then
-        "$HADOOP_PREFIX"/sbin/stop-dfs.sh
+        "$HADOOP_HOME"/sbin/stop-dfs.sh
       fi
 
       if [[ ! -z "$(pgrep -f QuorumPeerMain)" ]]; then
@@ -45,14 +45,14 @@
     fi
     ;;
   hadoop)
-    check_dirs HADOOP_PREFIX
+    check_dirs HADOOP_HOME
     
     if [[ ! -z "$(pgrep -f hadoop\\.yarn)" ]]; then
-      "$HADOOP_PREFIX"/sbin/stop-yarn.sh
+      "$HADOOP_HOME"/sbin/stop-yarn.sh
     fi
 
     if [[ ! -z "$(pgrep -f hadoop\\.hdfs)" ]]; then
-      "$HADOOP_PREFIX"/sbin/stop-dfs.sh
+      "$HADOOP_HOME"/sbin/stop-dfs.sh
     fi
     ;;
   zookeeper)
diff --git a/conf/hadoop/yarn-site.xml b/conf/hadoop/yarn-site.xml
index c3ccb43..465122d 100644
--- a/conf/hadoop/yarn-site.xml
+++ b/conf/hadoop/yarn-site.xml
@@ -31,7 +31,7 @@
   </property>
   <property>
     <name>yarn.nodemanager.log-dirs</name>
-    <value>YARN_LOGS</value>
+    <value>HADOOP_LOG_DIR</value>
   </property>
   <property>
     <name>yarn.nodemanager.aux-services</name>
diff --git a/conf/spark/spark-env.sh b/conf/spark/spark-env.sh
index ccbd7ff..016dd17 100755
--- a/conf/spark/spark-env.sh
+++ b/conf/spark/spark-env.sh
@@ -20,5 +20,5 @@
 # This file is sourced when running various Spark programs.
 # Copy it as spark-env.sh and edit that to configure Spark for your site.
 
-SPARK_DIST_CLASSPATH=$("$HADOOP_PREFIX"/bin/hadoop classpath)
+SPARK_DIST_CLASSPATH=$("$HADOOP_HOME"/bin/hadoop classpath)
 export SPARK_DIST_CLASSPATH
diff --git a/conf/uno.conf b/conf/uno.conf
index 5fc87f2..8f4c4a4 100644
--- a/conf/uno.conf
+++ b/conf/uno.conf
@@ -45,7 +45,7 @@
 # If set, 'uno fetch' will build (instead of downloading) an Accumulo tarball
 # from that directory and copy it to the downloads directory.
 
-#export ACCUMULO_REPO=
+#export ACCUMULO_REPO=/path/to/accumulo
 
 # Comment out the following if block if you don't want to automatically detect
 # version from the pom.xml. This could be useful if you want to switch branches
@@ -62,7 +62,7 @@
 # If set, 'uno fetch' will build (rather than download) a Fluo tarball
 # from that directory and copy it to the downloads directory.
 
-#export FLUO_REPO=
+#export FLUO_REPO=/path/to/fluo
 
 # Comment out the following if block if you don't want to automatically detect
 # version from the pom.xml. This could be useful if you want to switch branches
@@ -79,7 +79,7 @@
 # If set, 'uno fetch' will build (rather than download) a Fluo YARN tarball
 # from that directory and copy it to the downloads directory.
 
-#export FLUO_YARN_REPO=
+#export FLUO_YARN_REPO=/path/to/fluo-yarn
 
 # Comment out the following if block if you don't want to automatically detect
 # version from the pom.xml. This could be useful if you want to switch branches
@@ -99,18 +99,20 @@
 export DATA_DIR=$INSTALL/data
 # Home directories
 export ZOOKEEPER_HOME=$INSTALL/zookeeper-$ZOOKEEPER_VERSION
-export HADOOP_PREFIX=$INSTALL/hadoop-$HADOOP_VERSION
+export HADOOP_HOME=$INSTALL/hadoop-$HADOOP_VERSION
+if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
+  export HADOOP_PREFIX=$HADOOP_HOME
+fi
 export ACCUMULO_HOME=$INSTALL/accumulo-$ACCUMULO_VERSION
 export SPARK_HOME=$INSTALL/spark-$SPARK_VERSION-bin-without-hadoop
 export FLUO_HOME=$INSTALL/fluo-$FLUO_VERSION
 export FLUO_YARN_HOME=$INSTALL/fluo-yarn-$FLUO_YARN_VERSION
 # Config directories
-export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
+export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
 # Log directories
 export LOGS_DIR=$INSTALL/logs
 export ACCUMULO_LOG_DIR=$LOGS_DIR/accumulo
 export HADOOP_LOG_DIR=$LOGS_DIR/hadoop
-export YARN_LOG_DIR=$LOGS_DIR/yarn
 export ZOO_LOG_DIR=$LOGS_DIR/zookeeper
 
 # Accumulo configuration