PHOENIX-6163 Move CI to ASF Jenkins for connectors

add .asf.yaml
update ASF maven parent to 23
add spotbugs reporting
copy and adapt postcommit job from core
copy and adapt Github PR precommit job from core
diff --git a/.asf.yaml b/.asf.yaml
new file mode 100644
index 0000000..eeed404
--- /dev/null
+++ b/.asf.yaml
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file controls the integration of the Phoenix Connectors subproject 
+# with ASF infrastructure. Refer to
+# https://cwiki.apache.org/confluence/display/INFRA/git+-+.asf.yaml+features
+# for details. Be careful when changing the contents of this file since it
+# may affect many developers of the project and make sure to discuss the
+# changes with dev@ before committing.
+
+notifications:
+    commits:      commits@phoenix.apache.org
+    issues:       issues@phoenix.apache.org
+    pullrequests: issues@phoenix.apache.org
+    jira_options: link label comment
diff --git a/dev/Jenkinsfile b/dev/Jenkinsfile
new file mode 100644
index 0000000..0af7d00
--- /dev/null
+++ b/dev/Jenkinsfile
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+pipeline {
+    agent {
+        dockerfile {
+            dir 'dev/docker'
+            filename 'Dockerfile'
+            label 'Hadoop'
+        }
+    }
+    
+    environment {
+        MAVEN_OPTS = '-Xmx3G'
+    }
+
+    options {
+        buildDiscarder(logRotator(daysToKeepStr: '30'))
+        timestamps()
+    }
+
+    stages {
+
+        stage('BuildAndTest') {
+            options {
+                timeout(time: 3, unit: 'HOURS')
+            }
+            steps {
+                sh """#!/bin/bash
+                    ulimit -a
+                    mvn clean verify -B
+                """
+            }
+            post {
+                always {
+                    archiveArtifacts artifacts: '**/target/surefire-reports/*.txt'
+                    archiveArtifacts artifacts: '**/target/surefire-reports/*.dumpstream'
+                    archiveArtifacts artifacts: '**/target/failsafe-reports/*.txt'
+                    archiveArtifacts artifacts: '**/target/failsafe-reports/*.dumpstream'
+                    junit '**/target/surefire-reports/TEST-*.xml'
+                    junit '**/target/failsafe-reports/TEST-*.xml'
+                }
+            }
+        }
+    }
+
+    post {
+
+        always {
+            emailext(
+                subject: "Apache-Phoenix-Connectors | Build ${BUILD_DISPLAY_NAME} ${currentBuild.currentResult}",
+                to: 'commits@phoenix.apache.org',
+                replyTo: 'commits@phoenix.apache.org',
+                mimeType: 'text/html',
+                recipientProviders: [
+                    [$class: "DevelopersRecipientProvider"],
+                    [$class: 'CulpritsRecipientProvider'],
+                    [$class: 'RequesterRecipientProvider']],
+                body: """
+<a href="http://phoenix.apache.org"><img src='http://phoenix.apache.org/images/phoenix-logo-small.png'/></a>
+<br>build ${BUILD_DISPLAY_NAME} status <b>${currentBuild.currentResult}</b><hr/>
+<b>Build ${BUILD_DISPLAY_NAME}</b> ${BUILD_URL}
+<hr/>
+"""
+           )
+        }
+
+        cleanup {
+            deleteDir()
+        }
+    }
+}
\ No newline at end of file
diff --git a/dev/Jenkinsfile.github b/dev/Jenkinsfile.github
new file mode 100644
index 0000000..ad8cb1f
--- /dev/null
+++ b/dev/Jenkinsfile.github
@@ -0,0 +1,174 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+pipeline {
+
+    //FIXME convert this to Matrix job
+    agent {
+        label 'Hadoop'
+    }
+
+    options {
+        // N.B. this is per-branch, which means per PR
+        disableConcurrentBuilds()
+        buildDiscarder(logRotator(numToKeepStr: '15'))
+        timeout (time: 9, unit: 'HOURS')
+        timestamps()
+        skipDefaultCheckout()
+    }
+
+    environment {
+        SRC_REL = 'src'
+        PATCH_REL = 'output'
+        YETUS_REL = 'yetus'
+        DOCKERFILE_REL = "${SRC_REL}/dev/docker/Dockerfile.yetus"
+        YETUS_DRIVER_REL = "${SRC_REL}/dev/jenkins_precommit_github_yetus.sh"
+        // Branch or tag name.  Yetus release tags are 'rel/X.Y.Z'
+        YETUS_VERSION = 'rel/0.12.0'
+        PLUGINS= 'all,-findbugs,-gitlab'
+        //GENERAL_CHECK_PLUGINS = 'all,-compile,-javac,-javadoc,-jira,-shadedjars,-unit'
+        //JDK_SPECIFIC_PLUGINS = 'compile,github,htmlout,javac,javadoc,maven,mvninstall,shadedjars,unit'
+        // output from surefire; sadly the archive function in yetus only works on file names.
+        ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.*.txt,*.dumpstream,*.dump'
+        // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
+        //TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
+        TESTS_FILTER = 'dummy'
+        //EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${CHANGE_TARGET}/lastSuccessfulBuild/artifact/excludes"
+
+        // a global view of paths. parallel stages can land on the same host concurrently, so each
+        // stage works in its own subdirectory. there is an "output" under each of these
+        // directories, which we retrieve after the build is complete.
+        WORKDIR_REL_GENERAL_CHECK = 'yetus-general-check'
+        //WORKDIR_REL_JDK8_HADOOP3_CHECK = 'yetus-jdk8-hadoop3-check'
+        //WORKDIR_REL_JDK11_HADOOP3_CHECK = 'yetus-jdk11-hadoop3-check'
+        GITHUB_USE_TOKEN = 'true'
+    }
+
+    parameters {
+        booleanParam(name: 'DEBUG',
+               defaultValue: false,
+               description: 'Print extra outputs for debugging the jenkins job and yetus')
+    }
+
+    stages {
+        stage ('precommit checks') {
+//Disabled while don't have actual parallel tasks, as this ties up an extra executor
+//            parallel {
+//                stage ('yetus general check') {
+//                    agent {
+//                        node {
+//                            label 'Hadoop'
+//                        }
+//                    }
+                    environment {
+                        // customized per parallel stage
+                        //PLUGINS = "${GENERAL_CHECK_PLUGINS}"
+                        SET_JAVA_HOME = '/usr/lib/jvm/java-8'
+                        WORKDIR_REL = "${WORKDIR_REL_GENERAL_CHECK}"
+                        // identical for all parallel stages
+                        WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
+                        YETUSDIR = "${WORKDIR}/${YETUS_REL}"
+                        SOURCEDIR = "${WORKDIR}/${SRC_REL}"
+                        PATCHDIR = "${WORKDIR}/${PATCH_REL}"
+                        BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
+                        DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
+                        YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
+                    }
+                    steps {
+                        dir("${SOURCEDIR}") {
+                            checkout scm
+                        }
+                        dir("${YETUSDIR}") {
+                            checkout([
+                              $class           : 'GitSCM',
+                              branches         : [[name: "${YETUS_VERSION}"]],
+                              userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
+                            )
+                        }
+                        dir("${WORKDIR}") {
+                            withCredentials([
+                                usernamePassword(
+                                  credentialsId: 'c06659ac-4c77-499f-9b9d-485f1b58792c',
+                                  passwordVariable: 'GITHUB_PASSWORD',
+                                  usernameVariable: 'GITHUB_USER'
+                                )]) {
+                                sh label: 'test-patch', script: '''#!/bin/bash -e
+                                    hostname -a ; pwd ; ls -la
+                                    printenv 2>&1 | sort
+                                    echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
+                                    "${YETUS_DRIVER}"
+                                '''
+                            }
+                        }
+                    }
+                    post {
+                        always {
+                            // Has to be relative to WORKSPACE.
+                            junit testResults: "${WORKDIR_REL}/${PATCH_REL}/**/target/**/TEST-*.xml", allowEmptyResults: true
+                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
+                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
+                            publishHTML target: [
+                              allowMissing: true,
+                              keepAll: true,
+                              alwaysLinkToLastBuild: true,
+                              // Has to be relative to WORKSPACE
+                              reportDir: "${WORKDIR_REL}/${PATCH_REL}",
+                              reportFiles: 'report.html',
+                              reportName: 'PR General Check Report'
+                            ]
+                        }
+                        // Jenkins pipeline jobs fill slaves on PRs without this :(
+                        cleanup() {
+                            script {
+                                sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
+                                    # See YETUS-764
+                                    if [ -f "${PATCHDIR}/pidfile.txt" ]; then
+                                      echo "test-patch process appears to still be running: killing"
+                                      kill `cat "${PATCHDIR}/pidfile.txt"` || true
+                                      sleep 10
+                                    fi
+                                    if [ -f "${PATCHDIR}/cidfile.txt" ]; then
+                                      echo "test-patch container appears to still be running: killing"
+                                      docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
+                                    fi
+                                    # See HADOOP-13951
+                                    chmod -R u+rxw "${WORKSPACE}"
+                                '''
+                                dir ("${WORKDIR}") {
+                                    deleteDir()
+                                }
+                            }
+                        }
+                    }
+                //}
+            //}
+        }
+    }
+
+    post {
+        // Jenkins pipeline jobs fill slaves on PRs without this :(
+        cleanup() {
+            script {
+                sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
+                    # See HADOOP-13951
+                    chmod -R u+rxw "${WORKSPACE}"
+                    '''
+                deleteDir()
+            }
+        }
+    }
+}
diff --git a/dev/cache-apache-project-artifact.sh b/dev/cache-apache-project-artifact.sh
new file mode 100755
index 0000000..8539350
--- /dev/null
+++ b/dev/cache-apache-project-artifact.sh
@@ -0,0 +1,139 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This was lovingly copied from Apache HBase
+
+set -e
+function usage {
+  echo "Usage: ${0} [options] /path/to/download/file.tar.gz download/fragment/eg/project/subdir/some-artifact-version.tar.gz"
+  echo ""
+  echo "    --force                       for a redownload even if /path/to/download/file.tar.gz exists."
+  echo "    --working-dir /path/to/use    Path for writing tempfiles. must exist."
+  echo "                                  defaults to making a directory via mktemp that we clean."
+  echo "    --keys url://to/project/KEYS  where to get KEYS. needed to check signature on download."
+  echo ""
+  exit 1
+}
+# if no args specified, show usage
+if [ $# -lt 2 ]; then
+  usage
+fi
+
+
+# Get arguments
+declare done_if_cached="true"
+declare working_dir
+declare cleanup="true"
+declare keys
+while [ $# -gt 0 ]
+do
+  case "$1" in
+    --force) shift; done_if_cached="false";;
+    --working-dir) shift; working_dir=$1; cleanup="false"; shift;;
+    --keys) shift; keys=$1; shift;;
+    --) shift; break;;
+    -*) usage ;;
+    *)  break;;  # terminate while loop
+  esac
+done
+
+# should still have required args
+if [ $# -lt 2 ]; then
+  usage
+fi
+
+target="$1"
+artifact="$2"
+
+if [ -f "${target}" ] && [ "true" = "${done_if_cached}" ]; then
+  echo "Reusing existing download of '${artifact}'."
+  exit 0
+fi
+
+if [ -z "${working_dir}" ]; then
+  if ! working_dir="$(mktemp -d -t hbase-download-apache-artifact)" ; then
+    echo "Failed to create temporary working directory. Please specify via --working-dir" >&2
+    exit 1
+  fi
+else
+  # absolutes please
+  working_dir="$(cd "$(dirname "${working_dir}")"; pwd)/$(basename "${working_dir}")"
+  if [ ! -d "${working_dir}" ]; then
+    echo "passed working directory '${working_dir}' must already exist." >&2
+    exit 1
+  fi
+fi
+
+function cleanup {
+  if [ -n "${keys}" ]; then
+    echo "Stopping gpg agent daemon"
+    gpgconf --homedir "${working_dir}/.gpg" --kill gpg-agent
+    echo "Stopped gpg agent daemon"
+  fi
+
+  if [ "true" = "${cleanup}" ]; then
+    echo "cleaning up temp space."
+    rm -rf "${working_dir}"
+  fi
+}
+trap cleanup EXIT SIGQUIT
+
+echo "New download of '${artifact}'"
+
+# N.B. this comes first so that if gpg falls over we skip the expensive download.
+if [ -n "${keys}" ]; then
+  if [ ! -d "${working_dir}/.gpg" ]; then
+    rm -rf "${working_dir}/.gpg"
+    mkdir -p "${working_dir}/.gpg"
+    chmod -R 700 "${working_dir}/.gpg"
+  fi
+
+  echo "installing project KEYS"
+  curl -L --fail -o "${working_dir}/KEYS" "${keys}"
+  if ! gpg --homedir "${working_dir}/.gpg" --import "${working_dir}/KEYS" ; then
+    echo "ERROR importing the keys via gpg failed. If the output above mentions this error:" >&2
+    echo "    gpg: can't connect to the agent: File name too long" >&2
+    # we mean to give them the command to run, not to run it.
+    #shellcheck disable=SC2016
+    echo 'then you prolly need to create /var/run/user/$(id -u)' >&2
+    echo "see this thread on gnupg-users: https://s.apache.org/uI7x" >&2
+    exit 2
+  fi
+
+  echo "downloading signature"
+  curl -L --fail -o "${working_dir}/artifact.asc" "https://archive.apache.org/dist/${artifact}.asc"
+fi
+
+echo "downloading artifact"
+if ! curl --dump-header "${working_dir}/artifact_download_headers.txt" -L --fail -o "${working_dir}/artifact" "https://www.apache.org/dyn/closer.lua?filename=${artifact}&action=download" ; then
+  echo "Artifact wasn't in mirror system. falling back to archive.a.o."
+  curl --dump-header "${working_dir}/artifact_fallback_headers.txt" -L --fail -o "${working_dir}/artifact" "http://archive.apache.org/dist/${artifact}"
+fi
+
+if [ -n "${keys}" ]; then
+  echo "verifying artifact signature"
+  gpg --homedir "${working_dir}/.gpg" --verify "${working_dir}/artifact.asc"
+  echo "signature good."
+fi
+
+echo "moving artifact into place at '${target}'"
+# ensure we're on the same filesystem
+mv "${working_dir}/artifact" "${target}.copying"
+# attempt atomic move
+mv "${target}.copying" "${target}"
+echo "all done!"
\ No newline at end of file
diff --git a/dev/docker/Dockerfile b/dev/docker/Dockerfile
new file mode 100644
index 0000000..4d8e33f
--- /dev/null
+++ b/dev/docker/Dockerfile
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM apache/yetus:0.12.0
+
+RUN groupadd --non-unique -g 910 jenkins || true
+RUN useradd -g 910 -u 910 -m jenkins || true
\ No newline at end of file
diff --git a/dev/docker/Dockerfile.yetus b/dev/docker/Dockerfile.yetus
new file mode 100644
index 0000000..a34676a
--- /dev/null
+++ b/dev/docker/Dockerfile.yetus
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM apache/yetus:0.12.0
+
+#####
+# Update SpotBugs
+#####
+RUN rm -rf /opt/spotbugs \
+    && git clone https://github.com/stoty/spotbugs.git \
+    && cd spotbugs \
+    && git checkout PHOENIX-1161-backport \
+    && ./gradlew clean build -x test \
+    && mkdir /opt/spotbugs \
+    && tar -C /opt/spotbugs --strip-components 1 -xpf spotbugs/build/distributions/spotbugs-4.1.2.tgz \
+    && chmod a+rx /opt/spotbugs/bin/* \
+    && cd .. \
+    && rm -rf ./spotbugs
+ENV SPOTBUGS_HOME /opt/spotbugs
+#####
+# Enable spotbugs prefix for maven
+#####
+RUN sed -i -e 's/<\/pluginGroups>/<pluginGroup>com.github.spotbugs<\/pluginGroup><\/pluginGroups>/' /etc/maven/settings.xml
diff --git a/dev/gather_machine_environment.sh b/dev/gather_machine_environment.sh
new file mode 100755
index 0000000..265a328
--- /dev/null
+++ b/dev/gather_machine_environment.sh
@@ -0,0 +1,58 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# SHELLDOC-IGNORE
+
+set -e
+function usage {
+  echo "Usage: ${0} /path/for/output/dir"
+  echo ""
+  echo "  Gather info about a build machine that test harnesses should poll before running."
+  echo "  presumes you'll then archive the passed output dir."
+
+  exit 1
+}
+
+if [ "$#" -lt 1 ]; then
+  usage
+fi
+
+
+declare output=$1
+
+if [ ! -d "${output}" ] || [ ! -w "${output}" ]; then
+  echo "Specified output directory must exist and be writable." >&2
+  exit 1
+fi
+
+echo "getting machine specs, find in ${BUILD_URL}/artifact/${output}/"
+echo "JAVA_HOME: ${JAVA_HOME}" >"${output}/java_home" 2>&1 || true
+ls -l "${JAVA_HOME}" >"${output}/java_home_ls" 2>&1 || true
+echo "MAVEN_HOME: ${MAVEN_HOME}" >"${output}/mvn_home" 2>&1 || true
+mvn --offline --version  >"${output}/mvn_version" 2>&1 || true
+cat /proc/cpuinfo >"${output}/cpuinfo" 2>&1 || true
+cat /proc/meminfo >"${output}/meminfo" 2>&1 || true
+cat /proc/diskstats >"${output}/diskstats" 2>&1 || true
+cat /sys/block/sda/stat >"${output}/sys-block-sda-stat" 2>&1 || true
+df -h >"${output}/df-h" 2>&1 || true
+ps -Aww >"${output}/ps-Aww" 2>&1 || true
+ifconfig -a >"${output}/ifconfig-a" 2>&1 || true
+lsblk -ta >"${output}/lsblk-ta" 2>&1 || true
+lsblk -fa >"${output}/lsblk-fa" 2>&1 || true
+ulimit -a >"${output}/ulimit-a" 2>&1 || true
+uptime >"${output}/uptime" 2>&1 || true
diff --git a/dev/jenkins_precommit_github_yetus.sh b/dev/jenkins_precommit_github_yetus.sh
new file mode 100755
index 0000000..b9589f4
--- /dev/null
+++ b/dev/jenkins_precommit_github_yetus.sh
@@ -0,0 +1,151 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+
+# place ourselves in the directory containing the phoenix-connectors and yetus checkouts
+cd "$(dirname "$0")/../.."
+echo "executing from $(pwd)"
+
+if [[ "true" = "${DEBUG}" ]]; then
+  set -x
+  printenv 2>&1 | sort
+fi
+
+declare -i missing_env=0
+declare -a required_envs=(
+  # these ENV variables define the required API with Jenkinsfile_GitHub
+  "ARCHIVE_PATTERN_LIST"
+  "BUILD_URL_ARTIFACTS"
+  "DOCKERFILE"
+  "GITHUB_PASSWORD"
+  "GITHUB_USER"
+  "PATCHDIR"
+  "PLUGINS"
+  "SET_JAVA_HOME"
+  "SOURCEDIR"
+  "TESTS_FILTER"
+  "YETUSDIR"
+)
+# Validate params
+for required_env in "${required_envs[@]}"; do
+  if [ -z "${!required_env}" ]; then
+    echo "[ERROR] Required environment variable '${required_env}' is not set."
+    missing_env=${missing_env}+1
+  fi
+done
+
+if [ ${missing_env} -gt 0 ]; then
+  echo "[ERROR] Please set the required environment variables before invoking. If this error is " \
+       "on Jenkins, then please file a JIRA about the error."
+  exit 1
+fi
+
+# TODO (HBASE-23900): cannot assume test-patch runs directly from sources
+TESTPATCHBIN="${YETUSDIR}/precommit/src/main/shell/test-patch.sh"
+
+# this must be clean for every run
+rm -rf "${PATCHDIR}"
+mkdir -p "${PATCHDIR}"
+
+# Checking on H* machine nonsense
+mkdir "${PATCHDIR}/machine"
+"${SOURCEDIR}/dev/gather_machine_environment.sh" "${PATCHDIR}/machine"
+
+# If CHANGE_URL is set (e.g., Github Branch Source plugin), process it.
+# Otherwise exit, because we don't want Phoenix connectors to do a
+# full build.  We wouldn't normally do this check for smaller
+# projects. :)
+if [[ -z "${CHANGE_URL}" ]]; then
+  echo "Full build skipped" > "${PATCHDIR}/report.html"
+  exit 0
+fi
+# enable debug output for yetus
+if [[ "true" = "${DEBUG}" ]]; then
+  YETUS_ARGS+=("--debug")
+fi
+# If we're doing docker, make sure we don't accidentally pollute the image with a host java path
+if [ -n "${JAVA_HOME}" ]; then
+  unset JAVA_HOME
+fi
+YETUS_ARGS+=("--ignore-unknown-options=true")
+YETUS_ARGS+=("--patch-dir=${PATCHDIR}")
+# where the source is located
+YETUS_ARGS+=("--basedir=${SOURCEDIR}")
+# our project defaults come from a personality file
+# which will get loaded automatically by setting the project name
+YETUS_ARGS+=("--project=phoenix-connectors")
+# lots of different output formats
+YETUS_ARGS+=("--brief-report-file=${PATCHDIR}/brief.txt")
+YETUS_ARGS+=("--console-report-file=${PATCHDIR}/console.txt")
+YETUS_ARGS+=("--html-report-file=${PATCHDIR}/report.html")
+# enable writing back to Github
+if [[ "true" = "${GITHUB_USE_TOKEN}" ]]; then
+  YETUS_ARGS+=("--github-token=${GITHUB_PASSWORD}")
+else
+  YETUS_ARGS+=("--github-user=${GITHUB_USER}")
+  YETUS_ARGS+=("--github-password=${GITHUB_PASSWORD}")
+fi
+# auto-kill any surefire stragglers during unit test runs
+YETUS_ARGS+=("--reapermode=kill")
+# set relatively high limits for ASF machines
+# changing these to higher values may cause problems
+# with other jobs on systemd-enabled machines
+YETUS_ARGS+=("--dockermemlimit=20g")
+# -1 spotbugs issues that show up prior to the patch being applied
+#YETUS_ARGS+=("--spotbugs-strict-precheck")
+# rsync these files back into the archive dir
+YETUS_ARGS+=("--archive-list=${ARCHIVE_PATTERN_LIST}")
+# URL for user-side presentation in reports and such to our artifacts
+YETUS_ARGS+=("--build-url-artifacts=${BUILD_URL_ARTIFACTS}")
+# plugins to enable
+YETUS_ARGS+=("--plugins=${PLUGINS},-findbugs")
+# run in docker mode and specifically point to our
+# Dockerfile since we don't want to use the auto-pulled version.
+YETUS_ARGS+=("--docker")
+YETUS_ARGS+=("--dockerfile=${DOCKERFILE}")
+YETUS_ARGS+=("--mvn-custom-repos")
+YETUS_ARGS+=("--java-home=${SET_JAVA_HOME}")
+YETUS_ARGS+=("--whitespace-eol-ignore-list=.*/generated/.*")
+YETUS_ARGS+=("--whitespace-tabs-ignore-list=.*/generated/.*")
+YETUS_ARGS+=("--tests-filter=${TESTS_FILTER}")
+YETUS_ARGS+=("--personality=${SOURCEDIR}/dev/phoenix-connectors-personality.sh")
+#YETUS_ARGS+=("--quick-hadoopcheck")
+#YETUS_ARGS+=("--skip-errorprone")
+# effectively treat dev-support as a custom maven module
+YETUS_ARGS+=("--skip-dirs=dev")
+# For testing with specific hadoop version. Activates corresponding profile in maven runs.
+#if [[ -n "${HADOOP_PROFILE}" ]]; then
+#  # Master has only Hadoop3 support. We don't need to activate any profile.
+#  # The Jenkinsfile should not attempt to run any Hadoop2 tests.
+#  if [[ "${BRANCH_NAME}" =~ branch-2* ]]; then
+#    YETUS_ARGS+=("--hadoop-profile=${HADOOP_PROFILE}")
+#  fi
+#fi
+if [[ -n "${EXCLUDE_TESTS_URL}" ]]; then
+  YETUS_ARGS+=("--exclude-tests-url=${EXCLUDE_TESTS_URL}")
+fi
+# help keep the ASF boxes clean
+YETUS_ARGS+=("--sentinel")
+# use emoji vote so it is easier to find the broken line
+YETUS_ARGS+=("--github-use-emoji-vote")
+
+echo "Launching yetus with command line:"
+echo "${TESTPATCHBIN} ${YETUS_ARGS[*]}"
+
+/usr/bin/env bash "${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
diff --git a/dev/phoenix-connectors-personality.sh b/dev/phoenix-connectors-personality.sh
new file mode 100755
index 0000000..a0adab1
--- /dev/null
+++ b/dev/phoenix-connectors-personality.sh
@@ -0,0 +1,367 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Based on hbase-personality.sh of the HBase project
+#
+# You'll need a local installation of
+# [Apache Yetus' precommit checker](http://yetus.apache.org/documentation/0.12.0/#yetus-precommit)
+# to use this personality.
+#
+# Download from: http://yetus.apache.org/downloads/ . You can either grab the source artifact and
+# build from it, or use the convenience binaries provided on that download page.
+#
+# To run against, e.g. PHOENIX-5032 you'd then do
+# ```bash
+# test-patch --personality=dev-support/hbase-personality.sh HBASE-15074
+# ```
+#
+# pass the `--sentinel` flag if you want to allow test-patch to destructively alter local working
+# directory / branch in order to have things match what the issue patch requests.
+
+personality_plugins "all"
+
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+    echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
+# work around yetus overwriting JAVA_HOME from our docker image
+#function docker_do_env_adds
+#{
+#  declare k
+#  for k in "${DOCKER_EXTRAENVS[@]}"; do
+#    if [[ "JAVA_HOME" == "${k}" ]]; then
+#      if [ -n "${JAVA_HOME}" ]; then
+#        DOCKER_EXTRAARGS+=("--env=JAVA_HOME=${JAVA_HOME}")
+#      fi
+#    else
+#      DOCKER_EXTRAARGS+=("--env=${k}=${!k}")
+#    fi
+#  done
+#}
+
+
+## @description  Globals specific to this personality
+## @audience     private
+## @stability    evolving
+function personality_globals
+{
+  BUILDTOOL=maven
+  #shellcheck disable=SC2034
+  PROJECT_NAME=phoenix-connectors
+  #shellcheck disable=SC2034
+  PATCH_BRANCH_DEFAULT=master
+  #shellcheck disable=SC2034
+  JIRA_ISSUE_RE='^PHOENIX-[0-9]+$'
+  #shellcheck disable=SC2034
+  GITHUB_REPO="apache/phoenix-connectors"
+
+  # TODO use PATCH_BRANCH to select jdk versions to use.
+
+  # ASF Jenkins workers run up to two jobs per Agent. Docker doesn't do anything with nproc,
+  # any setting is simply set on the docker daemon user, and shared between containers.
+  # Thus, there is no way to protect containers from fork-bombing each other.
+  # Set nprocs higher than than the combined process count of two jobs.
+  # see https://docs.docker.com/engine/reference/commandline/run/#set-ulimits-in-container-ulimit
+  # Note that this won't stop a container started after us from resetting the limit to a
+  # lower value, but should help if we are started later.
+  #shellcheck disable=SC2034
+  PROC_LIMIT=30000
+
+  # Set docker container to run with 20g. Default is 4g in yetus.
+  # See HBASE-19902 for how we arrived at 20g.
+  # TODO Doesn't seem to have effect in Yetus 0.12, set in cli instead
+  #shellcheck disable=SC2034
+  DOCKERMEMLIMIT=20g
+}
+
+## @description  Parse extra arguments required by personalities, if any.
+## @audience     private
+## @stability    evolving
+function personality_parse_args
+{
+  declare i
+
+  for i in "$@"; do
+    case ${i} in
+      --exclude-tests-url=*)
+        delete_parameter "${i}"
+        EXCLUDE_TESTS_URL=${i#*=}
+      ;;
+      --include-tests-url=*)
+        delete_parameter "${i}"
+        INCLUDE_TESTS_URL=${i#*=}
+      ;;
+      --hbase-profile=*)
+        delete_parameter "${i}"
+        HBASE_PROFILE=${i#*=}
+      ;;
+      --skip-errorprone)
+        delete_parameter "${i}"
+        SKIP_ERRORPRONE=true
+      ;;
+    esac
+  done
+}
+
+## @description  Queue up modules for this personality
+## @audience     private
+## @stability    evolving
+## @param        repostatus
+## @param        testtype
+function personality_modules
+{
+  local repostatus=$1
+  local testtype=$2
+  local extra=""
+  local jdk8module=""
+  local MODULES=("${CHANGED_MODULES[@]}")
+
+  yetus_info "Personality: ${repostatus} ${testtype}"
+
+  clear_personality_queue
+
+  # TODO: test with threads>0, once the tests are stable
+  extra="--threads=1 -DPhoenixPatchProcess"
+  if [[ "${PATCH_BRANCH}" = 4* ]]; then
+    extra="${extra} -Dhttps.protocols=TLSv1.2"
+  fi
+
+  # If we have HBASE_PROFILE specified pass along
+  # the hadoop.profile system property.
+  if [[ -n "${HBASE_PROFILE}" ]] ; then
+    extra="${extra} -Dhbase.profile=${HBASE_PROFILE}"
+  fi
+
+  # BUILDMODE value is 'full' when there is no patch to be tested, and we are running checks on
+  # full source code instead. In this case, do full compiles, tests, etc instead of per
+  # module.
+  # Used in nightly runs.
+  # If BUILDMODE is 'patch', for unit and compile testtypes, there is no need to run individual
+  # modules if root is included. HBASE-18505
+  if [[ "${BUILDMODE}" == "full" ]] || \
+     { { [[ "${testtype}" == unit ]] || [[ "${testtype}" == compile ]] || [[ "${testtype}" == checkstyle ]]; } && \
+     [[ "${MODULES[*]}" =~ \. ]]; }; then
+    MODULES=(.)
+  fi
+
+  # If the checkstyle configs change, check everything.
+  if [[ "${testtype}" == checkstyle ]] && [[ "${MODULES[*]}" =~ hbase-checkstyle ]]; then
+    MODULES=(.)
+  fi
+
+  if [[ ${testtype} == mvninstall ]]; then
+    # shellcheck disable=SC2086
+    personality_enqueue_module . ${extra}
+    return
+  fi
+
+  if [[ ${testtype} == spotbugs ]]; then
+    # Run spotbugs on each module individually to diff pre-patch and post-patch results and
+    # report new warnings for changed modules only.
+    # For some reason, spotbugs on root is not working, but running on individual modules is
+    # working. For time being, let it run on original list of CHANGED_MODULES. HBASE-19491
+    for module in "${CHANGED_MODULES[@]}"; do
+      # skip spotbugs on any module that lacks content in `src/main/java`
+      if [[ "$(find "${BASEDIR}/${module}" -iname '*.java' -and -ipath '*/src/main/java/*' \
+          -type f | wc -l | tr -d '[:space:]')" -eq 0 ]]; then
+        yetus_debug "no java files found under ${module}/src/main/java. skipping."
+        continue
+      else
+        # shellcheck disable=SC2086
+        personality_enqueue_module ${module} ${extra}
+      fi
+    done
+    return
+  fi
+
+  if [[ ${testtype} == compile ]] && [[ "${SKIP_ERRORPRONE}" != "true" ]] &&
+      [[ "${PATCH_BRANCH}" != branch-1* ]] ; then
+    extra="${extra} -PerrorProne"
+  fi
+
+  # If EXCLUDE_TESTS_URL/INCLUDE_TESTS_URL is set, fetches the url
+  # and sets -Dtest.exclude.pattern/-Dtest to exclude/include the
+  # tests respectively.
+  if [[ ${testtype} == unit ]]; then
+    local tests_arg=""
+    get_include_exclude_tests_arg tests_arg
+    #Phoenix projects traditially runs the full IT suite from Precommit
+    #keep it that way to ease transition
+    extra="verify ${extra} ${tests_arg}"
+
+    # Inject the jenkins build-id for our surefire invocations
+    # Used by zombie detection stuff, even though we're not including that yet.
+    if [ -n "${BUILD_ID}" ]; then
+      extra="${extra} -Dbuild.id=${BUILD_ID}"
+    fi
+
+  fi
+
+  for module in "${MODULES[@]}"; do
+    # shellcheck disable=SC2086
+    personality_enqueue_module ${module} ${extra}
+  done
+}
+
+## @description places where we override the built in assumptions about what tests to run
+## @audience    private
+## @stability   evolving
+## @param       filename of changed file
+function personality_file_tests
+{
+  local filename=$1
+  yetus_debug "Phoenix connectors specific personality_file_tests"
+  # If we change checkstyle configs, run checkstyle
+  if [[ ${filename} =~ checkstyle.*\.xml ]]; then
+    yetus_debug "tests/checkstyle: ${filename}"
+    add_test checkstyle
+  fi
+  # fallback to checking which tests based on what yetus would do by default
+  if declare -f "${BUILDTOOL}_builtin_personality_file_tests" >/dev/null; then
+    "${BUILDTOOL}_builtin_personality_file_tests" "${filename}"
+  elif declare -f builtin_personality_file_tests >/dev/null; then
+    builtin_personality_file_tests "${filename}"
+fi
+}
+
+## @description  Uses relevant include/exclude env variable to fetch list of included/excluded
+#                tests and sets given variable to arguments to be passes to maven command.
+## @audience     private
+## @stability    evolving
+## @param        name of variable to set with maven arguments
+function get_include_exclude_tests_arg
+{
+  #Phoenix projects doesn't support this yet, but should
+  return
+  local  __resultvar=$1
+  yetus_info "EXCLUDE_TESTS_URL=${EXCLUDE_TESTS_URL}"
+  yetus_info "INCLUDE_TESTS_URL=${INCLUDE_TESTS_URL}"
+  if [[ -n "${EXCLUDE_TESTS_URL}" ]]; then
+      if wget "${EXCLUDE_TESTS_URL}" -O "excludes"; then
+        excludes=$(cat excludes)
+        yetus_debug "excludes=${excludes}"
+        if [[ -n "${excludes}" ]]; then
+          eval "${__resultvar}='-Dtest.exclude.pattern=${excludes}'"
+        fi
+        rm excludes
+      else
+        yetus_error "Wget error $? in fetching excludes file from url" \
+             "${EXCLUDE_TESTS_URL}. Ignoring and proceeding."
+      fi
+  elif [[ -n "$INCLUDE_TESTS_URL" ]]; then
+      if wget "$INCLUDE_TESTS_URL" -O "includes"; then
+        includes=$(cat includes)
+        yetus_debug "includes=${includes}"
+        if [[ -n "${includes}" ]]; then
+          eval "${__resultvar}='-Dtest=${includes}'"
+        fi
+        rm includes
+      else
+        yetus_error "Wget error $? in fetching includes file from url" \
+             "${INCLUDE_TESTS_URL}. Ignoring and proceeding."
+      fi
+  else
+    # Use branch specific exclude list when EXCLUDE_TESTS_URL and INCLUDE_TESTS_URL are empty
+    FLAKY_URL="https://ci-hadoop.apache.org/job/HBase/job/HBase-Find-Flaky-Tests/job/${PATCH_BRANCH}/lastSuccessfulBuild/artifact/excludes/"
+    if wget "${FLAKY_URL}" -O "excludes"; then
+      excludes=$(cat excludes)
+        yetus_debug "excludes=${excludes}"
+        if [[ -n "${excludes}" ]]; then
+          eval "${__resultvar}='-Dtest.exclude.pattern=${excludes}'"
+        fi
+        rm excludes
+      else
+        yetus_error "Wget error $? in fetching excludes file from url" \
+             "${FLAKY_URL}. Ignoring and proceeding."
+      fi
+  fi
+}
+
+######################################
+
+add_test_type hbaseanti
+
+## @description  hbaseanti file filter
+## @audience     private
+## @stability    evolving
+## @param        filename
+function hbaseanti_filefilter
+{
+  local filename=$1
+
+  if [[ ${filename} =~ \.java$ ]]; then
+    add_test hbaseanti
+  fi
+}
+
+## @description  hbaseanti patch file check
+## @audience     private
+## @stability    evolving
+## @param        filename
+function hbaseanti_patchfile
+{
+  local patchfile=$1
+  local warnings
+  local result
+
+  if [[ "${BUILDMODE}" = full ]]; then
+    return 0
+  fi
+
+  if ! verify_needed_test hbaseanti; then
+    return 0
+  fi
+
+  big_console_header "Checking for known anti-patterns"
+
+  start_clock
+
+  warnings=$(${GREP} -c 'new TreeMap<byte.*()' "${patchfile}")
+  if [[ ${warnings} -gt 0 ]]; then
+    add_vote_table -1 hbaseanti "" "The patch appears to have anti-pattern where BYTES_COMPARATOR was omitted."
+    ((result=result+1))
+  fi
+
+  if [[ ${result} -gt 0 ]]; then
+    return 1
+  fi
+
+  add_vote_table +1 hbaseanti "" "Patch does not have any anti-patterns."
+  return 0
+}
+
+## @description  process the javac output for generating WARNING/ERROR
+## @audience     private
+## @stability    evolving
+## @param        input filename
+## @param        output filename
+# Override the default javac_logfilter so that we can do a sort before outputing the WARNING/ERROR.
+# This is because that the output order of the error prone warnings is not stable, so the diff
+# method will report unexpected errors if we do not sort it. Notice that a simple sort will cause
+# line number being sorted by lexicographical so the output maybe a bit strange to human but it is
+# really hard to sort by file name first and then line number and column number in shell...
+function hbase_javac_logfilter
+{
+  declare input=$1
+  declare output=$2
+
+  ${GREP} -E '\[(ERROR|WARNING)\] /.*\.java:' "${input}" | sort > "${output}"
+}
diff --git a/dev/rebuild_hbase.sh b/dev/rebuild_hbase.sh
new file mode 100755
index 0000000..e4c9211
--- /dev/null
+++ b/dev/rebuild_hbase.sh
@@ -0,0 +1,45 @@
+#!/usr/bin/env bash
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+# Rebuilds HBase with -Dhadoop.profile=3.0 locally, to work around PHOENIX-5993
+# Intended mainly for CI jobs, but can simplify manual rebuilds as well.
+
+# The name of the Apache Hbase source file
+HBASE_SOURCE_NAME="hbase-$1-src.tar.gz"
+# The relative path on the ASF mirrors for the Hbase source file
+HBASE_SOURCE_MIRROR_NAME="hbase/$1/$HBASE_SOURCE_NAME"
+
+# Downloads the specified HBase version source, extracts it,
+# then rebuilds and installs the maven artifacts locally with -Dhadoop.profile=3.0
+
+if [ $# -ne 1 ]
+  then
+    echo "Supply the Hbase version as paramater i.e.: rebuild_hbase.sh 2.2.6 "
+fi
+
+DEV_SUPPORT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+ARTIFACTS_DIR="$DEV_SUPPORT/artifacts"
+WORK_DIR="$DEV_SUPPORT/work"
+
+mkdir "$ARTIFACTS_DIR"
+mkdir "$WORK_DIR"
+
+$DEV_SUPPORT/cache-apache-project-artifact.sh --keys https://downloads.apache.org/hbase/KEYS \
+    --working-dir "$WORK_DIR" "$ARTIFACTS_DIR/$HBASE_SOURCE_NAME" "$HBASE_SOURCE_MIRROR_NAME"
+
+STARTDIR=$PWD
+cd $ARTIFACTS_DIR
+tar xfz hbase-$1-src.tar.gz
+cd hbase-$1
+mvn clean install -Dhadoop.profile=3.0 -DskipTests -B
+cd ${STARTDIR}
diff --git a/phoenix-kafka-base/pom.xml b/phoenix-kafka-base/pom.xml
index 7fb8a6a..db4aa6e 100644
--- a/phoenix-kafka-base/pom.xml
+++ b/phoenix-kafka-base/pom.xml
@@ -263,28 +263,6 @@
 						</execution>
 					</executions>
 				</plugin>
-				<!-- Add the ant-generated sources to the source path -->
-				<plugin>
-					<groupId>org.apache.maven.plugins</groupId>
-					<artifactId>maven-site-plugin</artifactId>
-					<dependencies>
-						<dependency>
-							<groupId>org.apache.maven.doxia</groupId>
-							<artifactId>doxia-module-markdown</artifactId>
-							<version>1.3</version>
-						</dependency>
-						<dependency>
-							<groupId>lt.velykis.maven.skins</groupId>
-							<artifactId>reflow-velocity-tools</artifactId>
-							<version>1.0.0</version>
-						</dependency>
-						<dependency>
-							<groupId>org.apache.velocity</groupId>
-							<artifactId>velocity</artifactId>
-							<version>1.7</version>
-						</dependency>
-					</dependencies>
-				</plugin>
 
 				<!-- Setup eclipse -->
 				<plugin>
@@ -327,16 +305,4 @@
 		</pluginManagement>
 	</build>
 
-        <reporting>
-            <plugins>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-project-info-reports-plugin</artifactId>
-                </plugin>
-                <plugin>
-                    <groupId>org.codehaus.mojo</groupId>
-                    <artifactId>findbugs-maven-plugin</artifactId>
-                </plugin>
-            </plugins>
-        </reporting>
 </project>
diff --git a/phoenix4-compat/pom.xml b/phoenix4-compat/pom.xml
index 5c6a487..c745eb5 100644
--- a/phoenix4-compat/pom.xml
+++ b/phoenix4-compat/pom.xml
@@ -25,7 +25,7 @@
     <parent>
         <groupId>org.apache</groupId>
         <artifactId>apache</artifactId>
-        <version>14</version>
+        <version>23</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
     <version>6.0.0-SNAPSHOT</version>
@@ -79,16 +79,4 @@
         </dependency>
     </dependencies>
 
-    <reporting>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-project-info-reports-plugin</artifactId>
-            </plugin>
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>findbugs-maven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </reporting>
 </project>
\ No newline at end of file
diff --git a/phoenix5-compat/pom.xml b/phoenix5-compat/pom.xml
index 4e8737e..3dd05c0 100644
--- a/phoenix5-compat/pom.xml
+++ b/phoenix5-compat/pom.xml
@@ -25,7 +25,7 @@
     <parent>
         <groupId>org.apache</groupId>
         <artifactId>apache</artifactId>
-        <version>14</version>
+        <version>23</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
     <version>6.0.0-SNAPSHOT</version>
@@ -80,16 +80,4 @@
 
     </dependencies>
 
-    <reporting>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-project-info-reports-plugin</artifactId>
-            </plugin>
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>findbugs-maven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </reporting>
 </project>
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 96cd57a..b4d3320 100644
--- a/pom.xml
+++ b/pom.xml
@@ -31,7 +31,7 @@
   <parent>
     <groupId>org.apache</groupId>
     <artifactId>apache</artifactId>
-    <version>14</version>
+    <version>23</version>
   </parent>
 
   <scm>
@@ -85,6 +85,9 @@
     <maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
     <maven-failsafe-plugin.version>2.22.2</maven-failsafe-plugin.version>
     <maven-shade-plugin.version>2.4.3</maven-shade-plugin.version>
+    <maven-project-info-reports-plugin.version>3.1.1</maven-project-info-reports-plugin.version>
+    <spotbugs-maven-plugin.version>4.1.3</spotbugs-maven-plugin.version>
+    <spotbugs.version>4.1.3</spotbugs.version>
 
     <maven-dependency-plugin.version>2.1</maven-dependency-plugin.version>
     <maven.assembly.version>2.5.2</maven.assembly.version>
@@ -128,7 +131,11 @@
             </toolchains>
           </configuration>
         </plugin>
-
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-project-info-reports-plugin</artifactId>
+          <version>${maven-project-info-reports-plugin.version}</version>
+        </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-compiler-plugin</artifactId>
@@ -184,6 +191,22 @@
           <!-- Avoid defining exclusions in pluginManagement as they are global.
                We already inherit some from the ASF parent pom. -->
         </plugin>
+        <plugin>
+          <groupId>com.github.spotbugs</groupId>
+          <artifactId>spotbugs-maven-plugin</artifactId>
+          <version>${spotbugs-maven-plugin.version}</version>
+          <dependencies>
+            <dependency>
+              <groupId>com.github.spotbugs</groupId>
+              <artifactId>spotbugs</artifactId>
+              <version>${spotbugs.version}</version>
+            </dependency>
+          </dependencies>
+          <configuration>
+            <effort>Max</effort>
+            <maxHeap>2048</maxHeap>
+          </configuration>
+        </plugin>
         <!-- We put slow-running tests into src/it and run them during the
             integration-test phase using the failsafe plugin. This way
             developers can run unit tests conveniently from the IDE or via
@@ -424,11 +447,6 @@
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-site-plugin</artifactId>
-        <version>3.7.1</version>
-      </plugin>
-      <plugin>
         <groupId>org.apache.rat</groupId>
         <artifactId>apache-rat-plugin</artifactId>
         <configuration>
@@ -442,24 +460,9 @@
             <exclude>dev/release_files/NOTICE</exclude>
             <!-- Exclude data files for examples -->
             <exclude>docs/*.csv</exclude>
-            <exclude>examples/*.csv</exclude>
-            <!-- Exclude SQL files from rat. Sqlline 1.1.9 doesn't work with
-                 comments on the first line of a file. -->
-            <exclude>examples/*.sql</exclude>
-            <exclude>examples/pig/testdata</exclude>
             <!-- precommit? -->
             <exclude>**/patchprocess/**</exclude>
-            <!-- Argparse is bundled to work around system Python version
-                 issues, compatibile with ALv2 -->
-            <exclude>bin/argparse-1.4.0/argparse.py</exclude>
-            <!-- Not our code -->
-            <exclude>python/requests-kerberos/**</exclude>
-            <exclude>python/phoenixdb/phoenixdb/avatica/proto/*</exclude>
-            <exclude>python/phoenixdb/*.rst</exclude>
-            <exclude>python/phoenixdb/ci/**</exclude>
-            <exclude>python/phoenixdb/doc/*.rst</exclude>
-            <exclude>python/phoenixdb/doc/conf.py</exclude>
-            <exclude>python/phoenixdb/doc/Makefile</exclude>
+            <exclude>**/derby.log</exclude>
           </excludes>
         </configuration>
       </plugin>
@@ -849,20 +852,38 @@
         <scala.binary.version>2.10</scala.binary.version>
       </properties>
     </profile>
+    <profile>
+      <id>spotbugs-site</id>
+      <activation>
+        <property>
+            <name>!spotbugs.site</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>com.github.spotbugs</groupId>
+            <artifactId>spotbugs-maven-plugin</artifactId>
+            <configuration>
+              <spotbugsXmlOutput>true</spotbugsXmlOutput>
+              <xmlOutput>true</xmlOutput>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
   </profiles>
 
   <reporting>
-      <plugins>
-          <plugin>
-              <groupId>org.apache.maven.plugins</groupId>
-              <artifactId>maven-project-info-reports-plugin</artifactId>
-              <version>3.0.0</version>
-          </plugin>
-          <plugin>
-              <groupId>org.codehaus.mojo</groupId>
-              <artifactId>findbugs-maven-plugin</artifactId>
-              <version>3.0.5</version>
-          </plugin>
-      </plugins>
+    <plugins>
+      <plugin>
+        <groupId>com.github.spotbugs</groupId>
+        <artifactId>spotbugs-maven-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+      </plugin>
+    </plugins>
   </reporting>
 </project>