HBASE-27293 Remove jenkins and personality scripts support for 1.x (#4690)

Signed-off-by: GeorryHuang <huangzhuoyue@apache.org>
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index c849870..f287f01 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -34,7 +34,6 @@
     YETUS_RELEASE = '0.12.0'
     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
-    OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
     OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
     OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
     OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
@@ -186,7 +185,6 @@
         // stash with given name for all tests we might run, so that we can unstash all of them even if
         // we skip some due to e.g. branch-specific JDK or Hadoop support
         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
-        stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
         stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
         stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
         stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
@@ -296,116 +294,6 @@
             }
           }
         }
-        stage ('yetus jdk7 checks') {
-          agent {
-            node {
-              label 'hbase'
-            }
-          }
-          when {
-            branch 'branch-1*'
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = "${env.DEEP_CHECKS}"
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
-            SET_JAVA_HOME = "/usr/lib/jvm/java-7"
-          }
-          steps {
-            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
-              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
-            '''
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
-              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
-              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
-            '''
-            script {
-              def ret = sh(
-                returnStatus: true,
-                script: '''#!/usr/bin/env bash
-                  set -e
-                  declare -i status=0
-                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                    echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                  else
-                    echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                    status=1
-                  fi
-                  echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
-                  exit "${status}"
-                '''
-              )
-              if (ret != 0) {
-                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                // test output. See HBASE-26339 for more details.
-                currentBuild.result = 'UNSTABLE'
-              }
-            }
-          }
-          post {
-            always {
-              stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
-              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              sshPublisher(publishers: [
-                sshPublisherDesc(configName: 'Nightlies',
-                  transfers: [
-                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
-                    )
-                  ]
-                )
-              ])
-              // remove the big test logs zip file, store the nightlies url in test_logs.html
-              sh '''#!/bin/bash -e
-                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
-                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
-                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
-                  python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
-                else
-                  echo "No test_logs.zip, skipping"
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK7 Nightly Build Report'
-              ]
-            }
-          }
-        }
         stage ('yetus jdk8 hadoop2 checks') {
           agent {
             node {
@@ -413,7 +301,7 @@
             }
           }
           when {
-            anyOf { branch 'branch-1*'; branch 'branch-2*' }
+            branch 'branch-2*'
           }
           environment {
             BASEDIR = "${env.WORKSPACE}/component"
@@ -522,11 +410,6 @@
               label 'hbase'
             }
           }
-          when {
-            not {
-              branch 'branch-1*'
-            }
-          }
           environment {
             BASEDIR = "${env.WORKSPACE}/component"
             TESTS = "${env.DEEP_CHECKS}"
@@ -636,11 +519,6 @@
               label 'hbase'
             }
           }
-          when {
-            not {
-              branch 'branch-1*'
-            }
-          }
           environment {
             BASEDIR = "${env.WORKSPACE}/component"
             TESTS = "${env.DEEP_CHECKS}"
@@ -817,7 +695,7 @@
             '''
             unstash 'hadoop-2'
             sh '''#!/bin/bash -xe
-              if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
+              if [[ "${BRANCH}" = branch-2* ]]; then
                 echo "Attempting to use run an instance on top of Hadoop 2."
                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
@@ -841,44 +719,40 @@
             '''
             unstash 'hadoop-3'
             sh '''#!/bin/bash -e
-              if [[ "${BRANCH}" = branch-1* ]]; then
-                echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
-              else
-                echo "Attempting to use run an instance on top of Hadoop 3."
-                artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
-                tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
-                if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
-                    --single-process \
-                    --working-dir output-integration/hadoop-3 \
-                    --hbase-client-install hbase-client \
-                    hbase-install \
-                    hadoop-3/bin/hadoop \
-                    hadoop-3/share/hadoop/yarn/timelineservice \
-                    hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                    hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                    hadoop-3/bin/mapred \
-                    >output-integration/hadoop-3.log 2>&1 ; then
-                  echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
-                  exit 2
-                fi
-                echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
-                if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
-                    --single-process \
-                    --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
-                    --working-dir output-integration/hadoop-3-shaded \
-                    --hbase-client-install hbase-client \
-                    hbase-install \
-                    hadoop-3/bin/hadoop \
-                    hadoop-3/share/hadoop/yarn/timelineservice \
-                    hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                    hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                    hadoop-3/bin/mapred \
-                    >output-integration/hadoop-3-shaded.log 2>&1 ; then
-                  echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
-                  exit 2
-                fi
-                echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
+              echo "Attempting to use run an instance on top of Hadoop 3."
+              artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
+              tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
+              if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
+                  --single-process \
+                  --working-dir output-integration/hadoop-3 \
+                  --hbase-client-install hbase-client \
+                  hbase-install \
+                  hadoop-3/bin/hadoop \
+                  hadoop-3/share/hadoop/yarn/timelineservice \
+                  hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+                  hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                  hadoop-3/bin/mapred \
+                  >output-integration/hadoop-3.log 2>&1 ; then
+                echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
+                exit 2
               fi
+              echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
+              if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
+                  --single-process \
+                  --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
+                  --working-dir output-integration/hadoop-3-shaded \
+                  --hbase-client-install hbase-client \
+                  hbase-install \
+                  hadoop-3/bin/hadoop \
+                  hadoop-3/share/hadoop/yarn/timelineservice \
+                  hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
+                  hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                  hadoop-3/bin/mapred \
+                  >output-integration/hadoop-3-shaded.log 2>&1 ; then
+                echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
+                exit 2
+              fi
+              echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
             '''
           }
           post {
@@ -919,14 +793,12 @@
       script {
          try {
            unstash 'general-result'
-           unstash 'jdk7-result'
            unstash 'jdk8-hadoop2-result'
            unstash 'jdk8-hadoop3-result'
            unstash 'jdk11-hadoop3-result'
            unstash 'srctarball-result'
            sh "printenv"
            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
                           "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index dbcb7ec..68a75a9 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -145,8 +145,6 @@
   local repostatus=$1
   local testtype=$2
   local extra=""
-  local branch1jdk8=()
-  local jdk8module=""
   local MODULES=("${CHANGED_MODULES[@]}")
 
   yetus_info "Personality: ${repostatus} ${testtype}"
@@ -173,10 +171,6 @@
   tmpdir=$(realpath target)
   extra="${extra} -Djava.io.tmpdir=${tmpdir} -DHBasePatchProcess"
 
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-    extra="${extra} -Dhttps.protocols=TLSv1.2"
-  fi
-
   # If we have HADOOP_PROFILE specified and we're on branch-2.x, pass along
   # the hadoop.profile system property. Ensures that Hadoop2 and Hadoop3
   # logic is not both activated within Maven.
@@ -207,21 +201,6 @@
     return
   fi
 
-  # This list should include any modules that require jdk8. Maven should be configured to only
-  # include them when a proper JDK is in use, but that doesn' work if we specifically ask for the
-  # module to build as yetus does if something changes in the module.  Rather than try to
-  # figure out what jdk is in use so we can duplicate the module activation logic, just
-  # build at the top level if anything changes in one of these modules and let maven sort it out.
-  branch1jdk8=(hbase-error-prone hbase-tinylfu-blockcache)
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-    for jdk8module in "${branch1jdk8[@]}"; do
-      if [[ "${MODULES[*]}" =~ ${jdk8module} ]]; then
-        MODULES=(.)
-        break
-      fi
-    done
-  fi
-
   if [[ ${testtype} == spotbugs ]]; then
     # Run spotbugs on each module individually to diff pre-patch and post-patch results and
     # report new warnings for changed modules only.
@@ -241,8 +220,7 @@
     return
   fi
 
-  if [[ ${testtype} == compile ]] && [[ "${SKIP_ERRORPRONE}" != "true" ]] &&
-      [[ "${PATCH_BRANCH}" != branch-1* ]] ; then
+  if [[ ${testtype} == compile ]] && [[ "${SKIP_ERRORPRONE}" != "true" ]]; then
     extra="${extra} -PerrorProne"
   fi
 
@@ -445,11 +423,7 @@
     return 1
   fi
 
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-    pdf_output="book.pdf"
-  else
-    pdf_output="apache_hbase_reference_guide.pdf"
-  fi
+  pdf_output="apache_hbase_reference_guide.pdf"
 
   if [[ ! -f "${PATCH_DIR}/${repostatus}-site/${pdf_output}" ]]; then
     add_vote_table -1 refguide "${repostatus} failed to produce the pdf version of the reference guide."
@@ -601,43 +575,8 @@
 
   # All supported Hadoop versions that we want to test the compilation with
   # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1.4 ]]; then
-    yetus_info "Setting Hadoop 2 versions to test based on branch-1.4 rules."
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop2_versions="2.7.7"
-    else
-      hbase_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7"
-    fi
-  elif [[ "${PATCH_BRANCH}" = branch-1 ]]; then
-    yetus_info "Setting Hadoop 2 versions to test based on branch-1 rules."
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop2_versions="2.10.0"
-    else
-      hbase_hadoop2_versions="2.10.0"
-    fi
-  elif [[ "${PATCH_BRANCH}" = branch-2.0 ]]; then
-    yetus_info "Setting Hadoop 2 versions to test based on branch-2.0 rules."
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop2_versions="2.6.5 2.7.7 2.8.5"
-    else
-      hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
-    fi
-  elif [[ "${PATCH_BRANCH}" = branch-2.1 ]]; then
-    yetus_info "Setting Hadoop 2 versions to test based on branch-2.1 rules."
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop2_versions="2.7.7 2.8.5"
-    else
-      hbase_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
-    fi
-  elif [[ "${PATCH_BRANCH}" = branch-2.2 ]]; then
-    yetus_info "Setting Hadoop 2 versions to test based on branch-2.2 rules."
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop2_versions="2.8.5 2.9.2 2.10.0"
-    else
-      hbase_hadoop2_versions="2.8.5 2.9.2 2.10.0"
-    fi
-  elif [[ "${PATCH_BRANCH}" = branch-2.* ]]; then
-    yetus_info "Setting Hadoop 2 versions to test based on branch-2.3+ rules."
+  if [[ "${PATCH_BRANCH}" = branch-2.* ]]; then
+    yetus_info "Setting Hadoop 2 versions to test based on branch-2.4+ rules."
     if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
       hbase_hadoop2_versions="2.10.1"
     else
@@ -647,30 +586,11 @@
     yetus_info "Setting Hadoop 2 versions to null on master/feature branch rules since we do not support hadoop 2 for hbase 3.x any more."
     hbase_hadoop2_versions=""
   fi
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-    yetus_info "Setting Hadoop 3 versions to test based on branch-1.x rules."
-    hbase_hadoop3_versions=""
-  elif [[ "${PATCH_BRANCH}" = branch-2.0 ]] || [[ "${PATCH_BRANCH}" = branch-2.1 ]]; then
-    yetus_info "Setting Hadoop 3 versions to test based on branch-2.0/branch-2.1 rules"
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop3_versions="3.0.3 3.1.2"
-    else
-      hbase_hadoop3_versions="3.0.3 3.1.1 3.1.2"
-    fi
-  elif [[ "${PATCH_BRANCH}" = branch-2.2 ]] || [[ "${PATCH_BRANCH}" = branch-2.3 ]]; then
-    yetus_info "Setting Hadoop 3 versions to test based on branch-2.2/branch-2.3 rules"
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop3_versions="3.1.2 3.2.2"
-    else
-      hbase_hadoop3_versions="3.1.1 3.1.2 3.2.0 3.2.1 3.2.2"
-    fi
+  yetus_info "Setting Hadoop 3 versions to test based on branch-2.4+/master/feature branch rules"
+  if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
+    hbase_hadoop3_versions="3.1.2 3.2.2 3.3.1"
   else
-    yetus_info "Setting Hadoop 3 versions to test based on branch-2.4+/master/feature branch rules"
-    if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
-      hbase_hadoop3_versions="3.1.2 3.2.2 3.3.1"
-    else
-      hbase_hadoop3_versions="3.1.1 3.1.2 3.2.0 3.2.1 3.2.2 3.3.0 3.3.1"
-    fi
+    hbase_hadoop3_versions="3.1.1 3.1.2 3.2.0 3.2.1 3.2.2 3.3.0 3.3.1"
   fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"