Update CI checks for script QA (#306)
* Fix shellcheck issue
* Add check for formatting scripts with shfmt
* Format scripts using shfmt
diff --git a/.github/workflows/shellcheck.yaml b/.github/workflows/shellcheck.yaml
index a9e7ca8..177d637 100644
--- a/.github/workflows/shellcheck.yaml
+++ b/.github/workflows/shellcheck.yaml
@@ -7,7 +7,7 @@
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
@@ -25,7 +25,23 @@
pull_request:
branches: [ '*' ]
+permissions:
+ contents: read
+
jobs:
+ shfmt:
+ name: shfmt
+ timeout-minutes: 3
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Show the first log message
+ run: git log -n1
+ - name: Install shfmt
+ run: contrib/ci/install-shfmt.sh
+ - name: Checking formatting of all scripts
+ run: contrib/ci/run-shfmt.sh
+
shellcheck:
name: ShellCheck
timeout-minutes: 3
@@ -33,5 +49,5 @@
steps:
- uses: actions/checkout@v4
- name: Running shellcheck on bin/** and conf/uno.conf
- run: contrib/run-shellcheck
+ run: contrib/ci/run-shellcheck.sh
diff --git a/bin/impl/commands.sh b/bin/impl/commands.sh
index f8c9d51..4f8ff0f 100755
--- a/bin/impl/commands.sh
+++ b/bin/impl/commands.sh
@@ -22,7 +22,7 @@
function uno_install_main() {
case "$1" in
- accumulo|hadoop|fluo|fluo-yarn|zookeeper)
+ accumulo | hadoop | fluo | fluo-yarn | zookeeper)
if install_component "$@"; then
echo "Installation of $1 complete."
else
@@ -42,7 +42,7 @@
echo "Running $1 (detailed logs in $LOGS_DIR/setup)..."
save_console_fd
case "$1" in
- accumulo|hadoop|fluo|fluo-yarn|zookeeper)
+ accumulo | hadoop | fluo | fluo-yarn | zookeeper)
if run_component "$@"; then
echo "Running $1 complete."
else
@@ -62,7 +62,7 @@
echo "Setting up $1 (detailed logs in $LOGS_DIR/setup)..."
save_console_fd
case "$1" in
- accumulo|hadoop|fluo|fluo-yarn|zookeeper)
+ accumulo | hadoop | fluo | fluo-yarn | zookeeper)
if setup_component "$@"; then
echo "Setup of $1 complete."
else
@@ -108,11 +108,11 @@
fi
if [[ -z $1 || $1 == '--paths' ]]; then
echo -n "export PATH=\"\$PATH:$UNO_HOME/bin:$HADOOP_HOME/bin:$ZOOKEEPER_HOME/bin:$ACCUMULO_HOME/bin"
- [[ -d "$SPARK_HOME" ]] && echo -n ":$SPARK_HOME/bin"
- [[ -d "$FLUO_HOME" ]] && echo -n ":$FLUO_HOME/bin"
- [[ -d "$FLUO_YARN_HOME" ]] && echo -n ":$FLUO_YARN_HOME/bin"
- [[ -d "$INFLUXDB_HOME" ]] && echo -n ":$INFLUXDB_HOME/bin"
- [[ -d "$GRAFANA_HOME" ]] && echo -n ":$GRAFANA_HOME/bin"
+ [[ -d $SPARK_HOME ]] && echo -n ":$SPARK_HOME/bin"
+ [[ -d $FLUO_HOME ]] && echo -n ":$FLUO_HOME/bin"
+ [[ -d $FLUO_YARN_HOME ]] && echo -n ":$FLUO_YARN_HOME/bin"
+ [[ -d $INFLUXDB_HOME ]] && echo -n ":$INFLUXDB_HOME/bin"
+ [[ -d $GRAFANA_HOME ]] && echo -n ":$GRAFANA_HOME/bin"
echo '"'
fi
}
@@ -145,19 +145,22 @@
tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
if [[ -z $tmp ]]; then
"$ZOOKEEPER_HOME"/bin/zkServer.sh start
- else echo "ZooKeeper already running at: $tmp"
+ else
+ echo "ZooKeeper already running at: $tmp"
fi
tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
if [[ -z $tmp ]]; then
"$HADOOP_HOME"/sbin/start-dfs.sh
- else echo "Hadoop DFS already running at: $tmp"
+ else
+ echo "Hadoop DFS already running at: $tmp"
fi
tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
if [[ -z $tmp ]]; then
"$HADOOP_HOME"/sbin/start-yarn.sh
- else echo "Hadoop Yarn already running at: $tmp"
+ else
+ echo "Hadoop Yarn already running at: $tmp"
fi
fi
@@ -168,7 +171,8 @@
else
"$ACCUMULO_HOME"/bin/accumulo-cluster start
fi
- else echo "Accumulo already running at: $tmp"
+ else
+ echo "Accumulo already running at: $tmp"
fi
;;
hadoop)
@@ -177,13 +181,15 @@
tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
if [[ -z $tmp ]]; then
"$HADOOP_HOME"/sbin/start-dfs.sh
- else echo "Hadoop DFS already running at: $tmp"
+ else
+ echo "Hadoop DFS already running at: $tmp"
fi
tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
if [[ -z $tmp ]]; then
"$HADOOP_HOME"/sbin/start-yarn.sh
- else echo "Hadoop Yarn already running at: $tmp"
+ else
+ echo "Hadoop Yarn already running at: $tmp"
fi
;;
zookeeper)
@@ -192,7 +198,8 @@
tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
if [[ -z $tmp ]]; then
"$ZOOKEEPER_HOME"/bin/zkServer.sh start
- else echo "ZooKeeper already running at: $tmp"
+ else
+ echo "ZooKeeper already running at: $tmp"
fi
;;
*)
@@ -285,14 +292,20 @@
}
function uno_zk_main() {
- check_dirs ZOOKEEPER_HOME || return 1
+ check_dirs ZOOKEEPER_HOME || return 1
"$ZOOKEEPER_HOME"/bin/zkCli.sh "$@"
}
function uno_fetch_main() {
- hash mvn 2>/dev/null || { echo >&2 "Maven must be installed & on PATH. Aborting."; return 1; }
- hash wget 2>/dev/null || { echo >&2 "wget must be installed & on PATH. Aborting."; return 1; }
- if [[ "$1" == "all" ]]; then
+ hash mvn 2>/dev/null || {
+ echo >&2 "Maven must be installed & on PATH. Aborting."
+ return 1
+ }
+ hash wget 2>/dev/null || {
+ echo >&2 "wget must be installed & on PATH. Aborting."
+ return 1
+ }
+ if [[ $1 == "all" ]]; then
"$bin"/impl/fetch.sh fluo
else
"$bin"/impl/fetch.sh "$1" "$2"
@@ -304,7 +317,7 @@
uno_kill_main
read -r -p "Are you sure you want to wipe '$INSTALL'? " yn
case "$yn" in
- [yY]|[yY][eE][sS])
+ [yY] | [yY][eE][sS])
if [[ -d $INSTALL && $INSTALL != '/' ]]; then
echo "removing $INSTALL"
rm -rf "${INSTALL:?}"
diff --git a/bin/impl/fetch.sh b/bin/impl/fetch.sh
index 982af50..72c6b5d 100755
--- a/bin/impl/fetch.sh
+++ b/bin/impl/fetch.sh
@@ -29,7 +29,7 @@
function fetch_accumulo() {
[[ $1 != '--no-deps' ]] && fetch_hadoop && fetch_zookeeper
- if [[ -n "$ACCUMULO_REPO" ]]; then
+ if [[ -n $ACCUMULO_REPO ]]; then
declare -a maven_args=(-DskipTests -DskipFormat)
if [[ $HADOOP_VERSION =~ 3\..* ]]; then
maven_args=("${maven_args[@]}" '-Dhadoop.profile=3')
@@ -42,7 +42,7 @@
rm -f "${DOWNLOADS:?}/${ACCUMULO_TARBALL:?}"
(cd "$ACCUMULO_REPO" && mvn -V -e clean package "${maven_args[@]}")
accumulo_built_tarball=$ACCUMULO_REPO/assemble/target/$ACCUMULO_TARBALL
- if [[ ! -f "$accumulo_built_tarball" ]]; then
+ if [[ ! -f $accumulo_built_tarball ]]; then
cat <<EOF
The following file does not exist :
@@ -63,11 +63,11 @@
function fetch_fluo() {
[[ $1 != '--no-deps' ]] && fetch_accumulo
- if [[ -n "$FLUO_REPO" ]]; then
+ if [[ -n $FLUO_REPO ]]; then
rm -f "${DOWNLOADS:?}/${FLUO_TARBALL:?}"
(cd "$FLUO_REPO" && mvn -V -e clean package -DskipTests -Dformatter.skip)
fluo_built_tarball=$FLUO_REPO/modules/distribution/target/$FLUO_TARBALL
- if [[ ! -f "$fluo_built_tarball" ]]; then
+ if [[ ! -f $fluo_built_tarball ]]; then
echo "The tarball $fluo_built_tarball does not exist after building from the FLUO_REPO=$FLUO_REPO"
echo "Does your repo contain code matching the FLUO_VERSION=$FLUO_VERSION set in uno.conf?"
exit 1
@@ -88,36 +88,36 @@
fi
case "$1" in
-accumulo)
- fetch_accumulo "$2"
- ;;
-fluo)
- fetch_fluo "$2"
- ;;
-fluo-yarn)
- [[ $2 != '--no-deps' ]] && fetch_fluo
- if [[ -n $FLUO_YARN_REPO ]]; then
- rm -f "${DOWNLOADS:?}/${FLUO_YARN_TARBALL:?}"
- (cd "$FLUO_YARN_REPO" && mvn -V -e clean package -DskipTests -Dformatter.skip)
- built_tarball=$FLUO_YARN_REPO/target/$FLUO_YARN_TARBALL
- if [[ ! -f "$built_tarball" ]]; then
- echo "The tarball $built_tarball does not exist after building from the FLUO_YARN_REPO=$FLUO_YARN_REPO"
- echo "Does your repo contain code matching the FLUO_YARN_VERSION=$FLUO_YARN_VERSION set in uno.conf?"
- exit 1
+ accumulo)
+ fetch_accumulo "$2"
+ ;;
+ fluo)
+ fetch_fluo "$2"
+ ;;
+ fluo-yarn)
+ [[ $2 != '--no-deps' ]] && fetch_fluo
+ if [[ -n $FLUO_YARN_REPO ]]; then
+ rm -f "${DOWNLOADS:?}/${FLUO_YARN_TARBALL:?}"
+ (cd "$FLUO_YARN_REPO" && mvn -V -e clean package -DskipTests -Dformatter.skip)
+ built_tarball=$FLUO_YARN_REPO/target/$FLUO_YARN_TARBALL
+ if [[ ! -f $built_tarball ]]; then
+ echo "The tarball $built_tarball does not exist after building from the FLUO_YARN_REPO=$FLUO_YARN_REPO"
+ echo "Does your repo contain code matching the FLUO_YARN_VERSION=$FLUO_YARN_VERSION set in uno.conf?"
+ exit 1
+ fi
+ cp "$built_tarball" "$DOWNLOADS"/
+ else
+ download_apache "fluo/fluo-yarn/$FLUO_YARN_VERSION" "$FLUO_YARN_TARBALL" "$FLUO_YARN_HASH"
fi
- cp "$built_tarball" "$DOWNLOADS"/
- else
- download_apache "fluo/fluo-yarn/$FLUO_YARN_VERSION" "$FLUO_YARN_TARBALL" "$FLUO_YARN_HASH"
- fi
- ;;
-hadoop)
- fetch_hadoop
- ;;
-zookeeper)
- fetch_zookeeper
- ;;
-*)
- cat <<EOF
+ ;;
+ hadoop)
+ fetch_hadoop
+ ;;
+ zookeeper)
+ fetch_zookeeper
+ ;;
+ *)
+ cat <<EOF
Usage: uno fetch <component>
Possible components:
@@ -131,7 +131,8 @@
--no-deps Dependencies will be fetched unless this option is specified. Only works for fluo & accumulo components.
--test Copy the test jar built in accumulo to the downloads directory
EOF
- exit 1
+ exit 1
+ ;;
esac
# fetch.sh
diff --git a/bin/impl/install/accumulo.sh b/bin/impl/install/accumulo.sh
index 6ca06ed..4542755 100755
--- a/bin/impl/install/accumulo.sh
+++ b/bin/impl/install/accumulo.sh
@@ -53,8 +53,8 @@
cp "$UNO_HOME"/conf/accumulo/common/* "$conf"
if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
- print_to_console "Accumulo 1 is not supported; use an earlier uno or a newer accumulo"
- exit 1
+ print_to_console "Accumulo 1 is not supported; use an earlier uno or a newer accumulo"
+ exit 1
else
accumulo_conf=$conf/accumulo.properties
cp "$UNO_HOME"/conf/accumulo/2/* "$conf"
diff --git a/bin/impl/install/hadoop.sh b/bin/impl/install/hadoop.sh
index ab626bf..01d1489 100755
--- a/bin/impl/install/hadoop.sh
+++ b/bin/impl/install/hadoop.sh
@@ -60,8 +60,8 @@
echo "export JAVA_HOME=\"$JAVA_HOME\""
echo "export HADOOP_LOG_DIR=\"$HADOOP_LOG_DIR\""
echo "export HADOOP_MAPRED_HOME=\"$HADOOP_HOME\""
-} >> "$hadoop_conf/hadoop-env.sh"
-[[ $HADOOP_VERSION =~ ^2\..*$ ]] && echo "export YARN_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/yarn-env.sh"
+} >>"$hadoop_conf/hadoop-env.sh"
+[[ $HADOOP_VERSION =~ ^2\..*$ ]] && echo "export YARN_LOG_DIR=$HADOOP_LOG_DIR" >>"$hadoop_conf/yarn-env.sh"
# Yarn requires extra JVM args to start with Java 17+
jver=$("$JAVA_HOME"/bin/java -version 2>&1 | grep version | cut -f2 -d'"' | cut -f1 -d.)
@@ -69,9 +69,9 @@
if [[ $jver -gt 11 ]]; then
echo "Setting yarn JVM args for java $jver"
{
- echo "export YARN_RESOURCEMANAGER_OPTS=\"--add-opens java.base/java.lang=ALL-UNNAMED\""
- echo "export YARN_NODEMANAGER_OPTS=\"--add-opens java.base/java.lang=ALL-UNNAMED\""
- } >> "$hadoop_conf/yarn-env.sh"
+ echo 'export YARN_RESOURCEMANAGER_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED"'
+ echo 'export YARN_NODEMANAGER_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED"'
+ } >>"$hadoop_conf/yarn-env.sh"
fi
true
diff --git a/bin/impl/load-env.sh b/bin/impl/load-env.sh
index a58eabc..7515ebc 100755
--- a/bin/impl/load-env.sh
+++ b/bin/impl/load-env.sh
@@ -18,7 +18,7 @@
: "${bin:?"'\$bin' should be set by 'uno' script"}"
# Determine UNO_HOME - Use env variable set by user. If none set, calculate using bin dir
-UNO_HOME="${UNO_HOME:-$( cd -P "${bin}"/.. && pwd )}"
+UNO_HOME="${UNO_HOME:-$(cd -P "${bin}"/.. && pwd)}"
export UNO_HOME
if [[ -z $UNO_HOME || ! -d $UNO_HOME ]]; then
echo "UNO_HOME=$UNO_HOME is not a valid directory. Please make sure it exists"
@@ -52,34 +52,34 @@
# Confirm that hadoop, accumulo, and zookeeper env variables are not set
if [[ ! "version env" =~ $1 ]]; then
- [[ -n "$HH" && "$HH" != "$HADOOP_HOME" ]] && env_error 'HADOOP_HOME' "$HH" "$HADOOP_HOME"
- [[ -n "$HC" && "$HC" != "$HADOOP_CONF_DIR" ]] && env_error 'HADOOP_CONF_DIR' "$HC" "$HADOOP_CONF_DIR"
- [[ -n "$ZH" && "$ZH" != "$ZOOKEEPER_HOME" ]] && env_error 'ZOOKEEPER_HOME' "$ZH" "$ZOOKEEPER_HOME"
- [[ -n "$SH" && "$SH" != "$SPARK_HOME" ]] && env_error 'SPARK_HOME' "$SH" "$SPARK_HOME"
- [[ -n "$AH" && "$AH" != "$ACCUMULO_HOME" ]] && env_error 'ACCUMULO_HOME' "$AH" "$ACCUMULO_HOME"
- [[ -n "$FH" && "$FH" != "$FLUO_HOME" ]] && env_error 'FLUO_HOME' "$FH" "$FLUO_HOME"
+ [[ -n $HH && $HH != "$HADOOP_HOME" ]] && env_error 'HADOOP_HOME' "$HH" "$HADOOP_HOME"
+ [[ -n $HC && $HC != "$HADOOP_CONF_DIR" ]] && env_error 'HADOOP_CONF_DIR' "$HC" "$HADOOP_CONF_DIR"
+ [[ -n $ZH && $ZH != "$ZOOKEEPER_HOME" ]] && env_error 'ZOOKEEPER_HOME' "$ZH" "$ZOOKEEPER_HOME"
+ [[ -n $SH && $SH != "$SPARK_HOME" ]] && env_error 'SPARK_HOME' "$SH" "$SPARK_HOME"
+ [[ -n $AH && $AH != "$ACCUMULO_HOME" ]] && env_error 'ACCUMULO_HOME' "$AH" "$ACCUMULO_HOME"
+ [[ -n $FH && $FH != "$FLUO_HOME" ]] && env_error 'FLUO_HOME' "$FH" "$FLUO_HOME"
fi
# Confirm that env variables were set correctly
-if [[ -n "$FLUO_REPO" && ! -d "$FLUO_REPO" ]]; then
+if [[ -n $FLUO_REPO && ! -d $FLUO_REPO ]]; then
echo "FLUO_REPO=$FLUO_REPO is not a valid directory. Please make sure it exists"
exit 1
fi
-if [[ -n "$ACCUMULO_REPO" && ! -d "$ACCUMULO_REPO" ]]; then
+if [[ -n $ACCUMULO_REPO && ! -d $ACCUMULO_REPO ]]; then
echo "ACCUMULO_REPO=$ACCUMULO_REPO is not a valid directory. Please make sure it exists"
exit 1
fi
-if [[ -z "$INSTALL" ]]; then
+if [[ -z $INSTALL ]]; then
echo "INSTALL=$INSTALL needs to be set in uno.conf"
exit 1
fi
-if [[ ! -d "$INSTALL" ]]; then
+if [[ ! -d $INSTALL ]]; then
mkdir -p "$INSTALL"
fi
-if [[ -z "$JAVA_HOME" || ! -d "$JAVA_HOME" ]]; then
+if [[ -z $JAVA_HOME || ! -d $JAVA_HOME ]]; then
echo "JAVA_HOME must be set in your shell to a valid directory. Currently, JAVA_HOME=$JAVA_HOME"
exit 1
fi
@@ -119,8 +119,14 @@
exit 1
fi
-hash shasum 2>/dev/null || { echo >&2 "shasum must be installed & on PATH. Aborting."; exit 1; }
-hash sed 2>/dev/null || { echo >&2 "sed must be installed & on PATH. Aborting."; exit 1; }
+hash shasum 2>/dev/null || {
+ echo >&2 "shasum must be installed & on PATH. Aborting."
+ exit 1
+}
+hash sed 2>/dev/null || {
+ echo >&2 "sed must be installed & on PATH. Aborting."
+ exit 1
+}
if sed --version >/dev/null 2>&1; then
# GNU sed supports --version and -i without a backup suffix parameter
diff --git a/bin/impl/run/accumulo.sh b/bin/impl/run/accumulo.sh
index 7564161..bb3ffea 100755
--- a/bin/impl/run/accumulo.sh
+++ b/bin/impl/run/accumulo.sh
@@ -26,7 +26,7 @@
[[ $1 != '--no-deps' ]] && run_component hadoop && run_component zookeeper
-"$HADOOP_HOME"/bin/hadoop fs -rm -r /accumulo 2> /dev/null || true
+"$HADOOP_HOME"/bin/hadoop fs -rm -r /accumulo 2>/dev/null || true
"$ACCUMULO_HOME"/bin/accumulo init --clear-instance-name --instance-name "$ACCUMULO_INSTANCE" --password "$ACCUMULO_PASSWORD"
"$ACCUMULO_HOME"/bin/accumulo-cluster start
diff --git a/bin/impl/util.sh b/bin/impl/util.sh
index b0c3719..2bf3286 100755
--- a/bin/impl/util.sh
+++ b/bin/impl/util.sh
@@ -16,9 +16,18 @@
# limitations under the License.
# check if running in a color terminal
-function terminalSupportsColor() { local c; c=$(tput colors 2>/dev/null) || c=-1; [[ -t 1 ]] && [[ $c -ge 8 ]]; }
+function terminalSupportsColor() {
+ local c
+ c=$(tput colors 2>/dev/null) || c=-1
+ [[ -t 1 ]] && [[ $c -ge 8 ]]
+}
terminalSupportsColor && doColor=1 || doColor=0
-function color() { local c; c=$1; shift; [[ $doColor -eq 1 ]] && echo -e "\\e[0;${c}m${*}\\e[0m" || echo "$@"; }
+function color() {
+ local c
+ c=$1
+ shift
+ [[ $doColor -eq 1 ]] && echo -e "\\e[0;${c}m${*}\\e[0m" || echo "$@"
+}
function red() { color 31 "$@"; }
function green() { color 32 "$@"; }
function yellow() { color 33 "$@"; }
@@ -76,7 +85,7 @@
for plugin in $POST_RUN_PLUGINS; do
echo "Executing post run plugin: $plugin"
plugin_script="${UNO_HOME}/plugins/${plugin}.sh"
- if [[ ! -f "$plugin_script" ]]; then
+ if [[ ! -f $plugin_script ]]; then
echo "Plugin does not exist: $plugin_script"
return 1
fi
@@ -85,23 +94,26 @@
}
function install_component() {
- local component; component=$(echo "$1" | tr '[:upper:] ' '[:lower:]-')
+ local component
+ component=$(echo "$1" | tr '[:upper:] ' '[:lower:]-')
shift
"$UNO_HOME/bin/impl/install/$component.sh" "$@" || return 1
case "$component" in
- accumulo|fluo) post_install_plugins ;;
+ accumulo | fluo) post_install_plugins ;;
*) ;;
esac
}
function run_component() {
- local component; component=$(echo "$1" | tr '[:upper:] ' '[:lower:]-')
- local logs; logs="$LOGS_DIR/setup"
+ local component
+ component=$(echo "$1" | tr '[:upper:] ' '[:lower:]-')
+ local logs
+ logs="$LOGS_DIR/setup"
mkdir -p "$logs"
shift
"$UNO_HOME/bin/impl/run/$component.sh" "$@" 1>"$logs/${component}.out" 2>"$logs/${component}.err" || return 1
case "$component" in
- accumulo|fluo) post_run_plugins ;;
+ accumulo | fluo) post_run_plugins ;;
*) ;;
esac
}
@@ -113,7 +125,7 @@
function save_console_fd {
# this requires at least bash 4.1
local v=("${BASH_VERSINFO[@]}")
- if [[ -z $UNO_CONSOLE_FD ]] && (( v[0]>4 || (v[0]==4 && v[1]>=1) )); then
+ if [[ -z $UNO_CONSOLE_FD ]] && ((v[0] > 4 || (v[0] == 4 && v[1] >= 1))); then
# Allocate an unused file descriptor and make it dup stdout
# https://stackoverflow.com/a/41620630/7298689
exec {UNO_CONSOLE_FD}>&1
@@ -125,34 +137,34 @@
if [[ -z $UNO_CONSOLE_FD ]]; then
echo "$@"
else
- echo "$@" >&${UNO_CONSOLE_FD}
+ echo "$@" >&"$UNO_CONSOLE_FD"
fi
}
function download_tarball() {
local url_prefix=$1 tarball=$2 expected_hash=$3
- verify_exist_hash "$tarball" "$expected_hash" &>/dev/null || \
- wget -c -P "$DOWNLOADS" "$url_prefix/$tarball"
+ verify_exist_hash "$tarball" "$expected_hash" &>/dev/null ||
+ wget -c -P "$DOWNLOADS" "$url_prefix/$tarball"
verify_exist_hash "$tarball" "$expected_hash" || return 1
echo "$(yellow "$tarball") download matches expected checksum ($(green "$expected_hash"))"
}
function download_apache() {
local url_prefix=$1 tarball=$2 expected_hash=$3
- verify_exist_hash "$tarball" "$expected_hash" &>/dev/null || \
- {
- [[ -n "${apache_mirror:-}" ]] && wget -c -P "$DOWNLOADS" "$apache_mirror/$url_prefix/$tarball"
- if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
- echo "Downloading $tarball from Apache archive"
- wget -c -P "$DOWNLOADS" "https://archive.apache.org/dist/$url_prefix/$tarball"
- fi
- }
+ verify_exist_hash "$tarball" "$expected_hash" &>/dev/null ||
+ {
+ [[ -n ${apache_mirror:-} ]] && wget -c -P "$DOWNLOADS" "$apache_mirror/$url_prefix/$tarball"
+ if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
+ echo "Downloading $tarball from Apache archive"
+ wget -c -P "$DOWNLOADS" "https://archive.apache.org/dist/$url_prefix/$tarball"
+ fi
+ }
verify_exist_hash "$tarball" "$expected_hash" || return 1
echo "$(yellow "$tarball") download matches expected checksum ($(green "$expected_hash"))"
}
function print_cmd_usage() {
- cat <<EOF
+ cat <<EOF
Usage: uno $1 <component> [--no-deps] [--test]
Possible components:
diff --git a/bin/uno b/bin/uno
index 8f1c176..07c3af7 100755
--- a/bin/uno
+++ b/bin/uno
@@ -18,14 +18,14 @@
# Start: Resolve Script Directory
SOURCE="${BASH_SOURCE[0]}"
# resolve $SOURCE until the file is no longer a symlink
-while [[ -h "$SOURCE" ]]; do
- bin="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
- SOURCE="$(readlink "$SOURCE")"
- # if $SOURCE was a relative symlink, we need to resolve it relative to the
- # path where the symlink file was located
- [[ $SOURCE != /* ]] && SOURCE="$bin/$SOURCE"
+while [[ -L $SOURCE ]]; do
+ bin="$(cd -P "$(dirname "$SOURCE")" && pwd)"
+ SOURCE="$(readlink "$SOURCE")"
+ # if $SOURCE was a relative symlink, we need to resolve it relative to the
+ # path where the symlink file was located
+ [[ $SOURCE != /* ]] && SOURCE="$bin/$SOURCE"
done
-bin="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+bin="$(cd -P "$(dirname "$SOURCE")" && pwd)"
# Stop: Resolve Script Directory
uno_cmd=$1
@@ -37,7 +37,7 @@
source "$UNO_HOME"/bin/impl/commands.sh
case "$uno_cmd" in
- ashell|env|fetch|install|jshell|kill|run|setup|start|status|stop|version|wipe|zk)
+ ashell | env | fetch | install | jshell | kill | run | setup | start | status | stop | version | wipe | zk)
"uno_${uno_cmd}_main" "$@"
;;
*)
diff --git a/contrib/ci/install-shfmt.sh b/contrib/ci/install-shfmt.sh
new file mode 100755
index 0000000..fd73efc
--- /dev/null
+++ b/contrib/ci/install-shfmt.sh
@@ -0,0 +1,29 @@
+#! /usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Install shfmt tool to search for and optionally format bash scripts
+# This is useful for other CI tools to run ShellCheck and shfmt to format
+
+set -e
+set -x
+
+shfmt_version=3.4.3
+sudo wget "https://github.com/mvdan/sh/releases/download/v${shfmt_version}/shfmt_v${shfmt_version}_linux_amd64" -O /usr/local/bin/shfmt &&
+ sudo chmod +x /usr/local/bin/shfmt
diff --git a/contrib/run-shellcheck b/contrib/ci/run-shellcheck.sh
similarity index 82%
rename from contrib/run-shellcheck
rename to contrib/ci/run-shellcheck.sh
index 2935f62..9e34907 100755
--- a/contrib/run-shellcheck
+++ b/contrib/ci/run-shellcheck.sh
@@ -3,10 +3,9 @@
set -e
set -x
-cd "$(dirname "${BASH_SOURCE[0]}")/../"
+cd "$(dirname "${BASH_SOURCE[0]}")/../../"
mapfile -t filestocheck < <(find bin/ -type f)
for x in "${filestocheck[@]}"; do
shellcheck conf/uno.conf bin/impl/util.sh bin/impl/load-env.sh bin/impl/commands.sh "$x"
done
-
diff --git a/contrib/ci/run-shfmt.sh b/contrib/ci/run-shfmt.sh
new file mode 100755
index 0000000..233df9a
--- /dev/null
+++ b/contrib/ci/run-shfmt.sh
@@ -0,0 +1,26 @@
+#! /usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Check formatting of all bash scripts
+
+set -e
+set -x
+
+shfmt -ln bash -l -d -i 2 -ci -s .
diff --git a/plugins/accumulo-encryption.sh b/plugins/accumulo-encryption.sh
index b93ea3c..3d1bbdf 100755
--- a/plugins/accumulo-encryption.sh
+++ b/plugins/accumulo-encryption.sh
@@ -25,5 +25,5 @@
accumulo_conf=$ACCUMULO_HOME/conf/accumulo.properties
encrypt_key=$ACCUMULO_HOME/conf/data-encryption.key
openssl rand -out $encrypt_key 32
-echo "instance.crypto.opts.key.uri=file://$encrypt_key" >> "$accumulo_conf"
-echo "instance.crypto.service=$(jar -tvf "$ACCUMULO_HOME"/lib/accumulo-core-2.*.jar | grep -o 'org.apache.accumulo.core.*AESCryptoService' | tr / . | tail -1)" >> "$accumulo_conf"
+echo "instance.crypto.opts.key.uri=file://$encrypt_key" >>"$accumulo_conf"
+echo "instance.crypto.service=$(jar -tvf "$ACCUMULO_HOME"/lib/accumulo-core-2.*.jar | grep -o 'org.apache.accumulo.core.*AESCryptoService' | tr / . | tail -1)" >>"$accumulo_conf"
diff --git a/plugins/accumulo-proxy.sh b/plugins/accumulo-proxy.sh
index e2f5332..666ef9e 100755
--- a/plugins/accumulo-proxy.sh
+++ b/plugins/accumulo-proxy.sh
@@ -19,7 +19,7 @@
TARBALL_PATH=$PROXY_REPO/target/$PROXY_TARBALL
-if [[ ! -f "$TARBALL_PATH" ]]; then
+if [[ ! -f $TARBALL_PATH ]]; then
cd $PROXY_REPO/
mvn -V -e clean package -Ptarball
fi
@@ -37,7 +37,7 @@
pkill -f accumulo\\.proxy\\.Proxy
-"$PROXY_HOME"/bin/accumulo-proxy -p "$PROXY_HOME"/conf/proxy.properties &> "${INSTALL}/logs/accumulo-proxy/accumulo-proxy.log" &
+"$PROXY_HOME"/bin/accumulo-proxy -p "$PROXY_HOME"/conf/proxy.properties &>"${INSTALL}/logs/accumulo-proxy/accumulo-proxy.log" &
print_to_console "Accumulo Proxy $PROXY_VERSION is running"
print_to_console " * view logs at $INSTALL/logs/accumulo-proxy/"
diff --git a/plugins/influx-metrics.sh b/plugins/influx-metrics.sh
index e36b0f0..3f05ad1 100755
--- a/plugins/influx-metrics.sh
+++ b/plugins/influx-metrics.sh
@@ -17,7 +17,7 @@
source "$UNO_HOME"/bin/impl/util.sh
-if [[ "$OSTYPE" == "darwin"* ]]; then
+if [[ $OSTYPE == "darwin"* ]]; then
echo "The metrics services (InfluxDB and Grafana) are not supported on Mac OS X at this time."
exit 1
fi
@@ -66,7 +66,7 @@
echo "Installing InfluxDB $INFLUXDB_VERSION to $INFLUXDB_HOME"
tar xzf "$DOWNLOADS/build/$INFLUXDB_TARBALL" -C "$INSTALL"
-"$INFLUXDB_HOME"/bin/influxd config -config "$UNO_HOME"/plugins/influx-metrics/influxdb.conf > "$INFLUXDB_HOME"/influxdb.conf
+"$INFLUXDB_HOME"/bin/influxd config -config "$UNO_HOME"/plugins/influx-metrics/influxdb.conf >"$INFLUXDB_HOME"/influxdb.conf
if [[ ! -f "$INFLUXDB_HOME"/influxdb.conf ]]; then
print_to_console "Failed to create $INFLUXDB_HOME/influxdb.conf"
exit 1
@@ -81,7 +81,7 @@
$SED "s#LOGS_DIR#$LOGS_DIR#g" "$GRAFANA_HOME"/conf/custom.ini
mkdir "$GRAFANA_HOME"/dashboards
-if [[ -d "$ACCUMULO_HOME" ]]; then
+if [[ -d $ACCUMULO_HOME ]]; then
echo "Configuring Accumulo metrics"
cp "$UNO_HOME"/plugins/influx-metrics/accumulo-dashboard.json "$GRAFANA_HOME"/dashboards/
conf=$ACCUMULO_HOME/conf
@@ -93,10 +93,10 @@
echo "accumulo.sink.graphite.server_host=localhost"
echo "accumulo.sink.graphite.server_port=2004"
echo "accumulo.sink.graphite.metrics_prefix=accumulo"
- } >> "$conf"/"$metrics_props"
+ } >>"$conf"/"$metrics_props"
fi
-if [[ -d "$FLUO_HOME" ]]; then
+if [[ -d $FLUO_HOME ]]; then
echo "Configuring Fluo metrics"
cp "$FLUO_HOME"/contrib/grafana/* "$GRAFANA_HOME"/dashboards/
if [[ $FLUO_VERSION =~ ^1\.[0-1].*$ ]]; then
@@ -110,16 +110,16 @@
echo "fluo.metrics.reporter.graphite.host=$UNO_HOST"
echo "fluo.metrics.reporter.graphite.port=2003"
echo "fluo.metrics.reporter.graphite.frequency=30"
- } >> "$FLUO_PROPS"
+ } >>"$FLUO_PROPS"
fi
-"$INFLUXDB_HOME"/bin/influxd -config "$INFLUXDB_HOME"/influxdb.conf &> "$LOGS_DIR"/metrics/influxdb.log &
+"$INFLUXDB_HOME"/bin/influxd -config "$INFLUXDB_HOME"/influxdb.conf &>"$LOGS_DIR"/metrics/influxdb.log &
-"$GRAFANA_HOME"/bin/grafana-server -homepath="$GRAFANA_HOME" 2> /dev/null &
+"$GRAFANA_HOME"/bin/grafana-server -homepath="$GRAFANA_HOME" 2>/dev/null &
sleep 10
-if [[ -d "$FLUO_HOME" ]]; then
+if [[ -d $FLUO_HOME ]]; then
"$INFLUXDB_HOME"/bin/influx -import -path "$FLUO_HOME"/contrib/influxdb/fluo_metrics_setup.txt
fi
@@ -130,7 +130,7 @@
function add_datasource() {
retcode=1
- while [[ $retcode != 0 ]]; do
+ while [[ $retcode != 0 ]]; do
curl 'http://admin:admin@localhost:3000/api/datasources' -X POST -H 'Content-Type: application/json;charset=UTF-8' \
--data-binary "$1"
retcode=$?
@@ -142,14 +142,14 @@
echo ""
}
-if [[ -d "$ACCUMULO_HOME" ]]; then
+if [[ -d $ACCUMULO_HOME ]]; then
accumulo_data='{"name":"accumulo_metrics","type":"influxdb","url":"http://'
accumulo_data+=$UNO_HOST
accumulo_data+=':8086","access":"direct","isDefault":true,"database":"accumulo_metrics","user":"accumulo","password":"secret"}'
add_datasource $accumulo_data
fi
-if [[ -d "$FLUO_HOME" ]]; then
+if [[ -d $FLUO_HOME ]]; then
fluo_data='{"name":"fluo_metrics","type":"influxdb","url":"http://'
fluo_data+=$UNO_HOST
fluo_data+=':8086","access":"direct","isDefault":false,"database":"fluo_metrics","user":"fluo","password":"secret"}'
diff --git a/plugins/spark.sh b/plugins/spark.sh
index 941ed53..8d8c563 100755
--- a/plugins/spark.sh
+++ b/plugins/spark.sh
@@ -27,7 +27,7 @@
verify_exist_hash "$SPARK_TARBALL" "$SPARK_HASH"
-if [[ ! -d "$HADOOP_HOME" ]]; then
+if [[ ! -d $HADOOP_HOME ]]; then
print_to_console "Apache Hadoop needs to be setup before Apache Spark can be setup."
exit 1
fi