HAWQ-1789. Make GitHub Workflow init script idempotent
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 4ee1b4b..3b32df6 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -118,3 +118,18 @@
         source .github/workflows/scripts/gtest_filter_negative
         export PGDATABASE=hawq_feature_test_db
         src/test/feature/feature-test --gtest_filter=*-$GTEST_FILTER_NEGATIVE
+
+    - name: test scripts's idempotence
+      run: |
+        case $(uname -s) in
+          Darwin) .github/workflows/scripts/init_macos.sh ;;
+          Linux)  .github/workflows/scripts/init_linux.sh ;;
+        esac
+
+        export HADOOP_HOME=/usr/local/hadoop/
+        .github/workflows/scripts/init_hdfs.sh
+
+        source /tmp/hawq/greenplum_path.sh
+        .github/workflows/scripts/init_hawq.sh
+        psql -d postgres -c 'create database hawq_feature_test_db;'
+        src/test/feature/feature-test --gtest_filter=TestDatabase.BasicTest
diff --git a/.github/workflows/scripts/init_hawq.sh b/.github/workflows/scripts/init_hawq.sh
index 0793ebd..89242a5 100755
--- a/.github/workflows/scripts/init_hawq.sh
+++ b/.github/workflows/scripts/init_hawq.sh
@@ -17,6 +17,12 @@
 
 
 
+# Check
+if [[ -z $GPHOME ]]; then
+  echo "Please source HAWQ's greenplum_path.sh"
+  exit 1
+fi
+
 # Configure
 tee $GPHOME/etc/hawq-site.xml << EOF_hawq_site
 <configuration>
@@ -56,9 +62,12 @@
 </configuration>
 EOF_hawq_site
 
-# Initialize
-rm -rf /opt/dependency*
+# Clean
+pkill -9 postgres || true
+hdfs dfs -rm -f -r hdfs://localhost:8020/hawq_default
 rm -rf /tmp/db_data/hawq-data-directory
+
+# Initialize
 install -d /tmp/db_data/hawq-data-directory/masterdd
 install -d /tmp/db_data/hawq-data-directory/segmentdd
 hawq init cluster -a
diff --git a/.github/workflows/scripts/init_hdfs.sh b/.github/workflows/scripts/init_hdfs.sh
index 0ab2094..e888af5 100755
--- a/.github/workflows/scripts/init_hdfs.sh
+++ b/.github/workflows/scripts/init_hdfs.sh
@@ -17,6 +17,16 @@
 
 
 
+# Check
+if ! command -v java; then
+  echo "Please check java in PATH"
+  exit 1
+fi
+if [[ -z $HADOOP_HOME ]]; then
+  echo "Please export HADOOP_HOME"
+  exit 1
+fi
+
 # Configure
 tee $HADOOP_HOME/etc/hadoop/core-site.xml << EOF_core_site
 <configuration>
@@ -39,6 +49,14 @@
 </configuration>
 EOF_hdfs_site
 
+tee -a $HADOOP_HOME/etc/hadoop/hadoop-env.sh << EOF_hadoop_env
+export JAVA_HOME=$(java -XshowSettings:properties -version 2>&1 | sed -nE 's|.*java.home = (.*)|\1|p')
+EOF_hadoop_env
+
+# Clean
+$HADOOP_HOME/sbin/stop-dfs.sh
+rm -rf /tmp/db_data/hdfs/name /tmp/db_data/hdfs/data
+
 # Initialize
 install -d /tmp/db_data/hdfs/name
 install -d /tmp/db_data/hdfs/data
diff --git a/.github/workflows/scripts/init_linux.sh b/.github/workflows/scripts/init_linux.sh
index 357adf9..0126873 100755
--- a/.github/workflows/scripts/init_linux.sh
+++ b/.github/workflows/scripts/init_linux.sh
@@ -29,6 +29,7 @@
    StrictHostKeyChecking no
    UserKnownHostsFile=/dev/null
 EOF_ssh_config
+chmod 600 ~/.ssh/config
 
 ssh -v localhost whoami
 
diff --git a/.github/workflows/scripts/init_macos.sh b/.github/workflows/scripts/init_macos.sh
index f02aa21..51823d7 100755
--- a/.github/workflows/scripts/init_macos.sh
+++ b/.github/workflows/scripts/init_macos.sh
@@ -18,9 +18,12 @@
 
 
 # Setup passphraseless ssh
-sudo systemsetup -setremotelogin on
-ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
+sudo systemsetup -setremotelogin on &>/dev/null || true
+/bin/launchctl load -w /System/Library/LaunchDaemons/ssh.plist &>/dev/null || true
+
+test -f ~/.ssh/id_rsa || ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
 cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
+chmod go-w ~
 chmod 0700 ~/.ssh
 chmod 0600 ~/.ssh/authorized_keys
 
@@ -29,6 +32,7 @@
    StrictHostKeyChecking no
    UserKnownHostsFile=/dev/null
 EOF_ssh_config
+chmod 600 ~/.ssh/config
 
 ssh -v localhost whoami
 
@@ -43,7 +47,7 @@
 kern.maxfilesperproc=65536
 kern.corefile=/cores/core.%N.%P
 EOF_sysctl
-</etc/sysctl.conf xargs sudo sysctl
+</etc/sysctl.conf xargs sudo sysctl || true
 
 # Add data folder
 sudo install -o $USER -d /tmp/db_data/
diff --git a/.github/workflows/scripts/toolchain.sh b/.github/workflows/scripts/toolchain.sh
index 743bf0a..1e76375 100644
--- a/.github/workflows/scripts/toolchain.sh
+++ b/.github/workflows/scripts/toolchain.sh
@@ -99,6 +99,8 @@
 
 
 ###
+find . $HAWQ_TOOLCHAIN_PATH/../../../../ -name CMakeCache.txt -delete
+find . $HAWQ_TOOLCHAIN_PATH/../../../../ -name '*build_timestamp' -delete
 rm -rf $HAWQ_TOOLCHAIN_PATH/dependency/package/include/hdfs
 rm -rf $HAWQ_TOOLCHAIN_PATH/dependency/package/lib/libhdfs3*
 source $HAWQ_TOOLCHAIN_PATH/dependency/package/env.sh
diff --git a/src/test/feature/lib/compent_config.cpp b/src/test/feature/lib/compent_config.cpp
index 21b64c5..ea49990 100644
--- a/src/test/feature/lib/compent_config.cpp
+++ b/src/test/feature/lib/compent_config.cpp
@@ -204,6 +204,8 @@
       string valueLine = lines[i];
       if (valueLine.find("ssh:") != string::npos)
         continue;
+      if (valueLine.find("sudo:") != string::npos)
+        continue;
       if (valueLine.find("WARNING") != string::npos)
         continue;
       auto datanodeInfo = hawq::test::split(valueLine, ':');