blob: 2d8f4dfdb03eb7e4efd299a63fceada6f9cb2848 [file] [log] [blame]
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
pipeline {
//FIXME convert this to Matrix job
agent {
label 'Hadoop'
}
options {
// N.B. this is per-branch, which means per PR
disableConcurrentBuilds()
buildDiscarder(logRotator(daysToKeepStr: '15', artifactDaysToKeepStr: '5'))
timestamps()
skipDefaultCheckout()
}
environment {
SRC_REL = 'src'
PATCH_REL = 'output'
YETUS_REL = 'yetus'
DOCKERFILE_REL = "${SRC_REL}/dev/docker/Dockerfile.yetus"
YETUS_DRIVER_REL = "${SRC_REL}/dev/jenkins_precommit_github_yetus.sh"
// Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
YETUS_VERSION = 'rel/0.12.0'
PLUGINS= 'all,-findbugs,-gitlab'
//GENERAL_CHECK_PLUGINS = 'all,-compile,-javac,-javadoc,-jira,-shadedjars,-unit'
//JDK_SPECIFIC_PLUGINS = 'compile,github,htmlout,javac,javadoc,maven,mvninstall,shadedjars,unit'
// output from surefire; sadly the archive function in yetus only works on file names.
ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.*.txt,*.dumpstream,*.dump'
// These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
//TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
TESTS_FILTER = 'dummy'
//EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${CHANGE_TARGET}/lastSuccessfulBuild/artifact/excludes"
// a global view of paths. parallel stages can land on the same host concurrently, so each
// stage works in its own subdirectory. there is an "output" under each of these
// directories, which we retrieve after the build is complete.
WORKDIR_REL_GENERAL_CHECK = 'yetus-general-check'
//WORKDIR_REL_JDK8_HADOOP3_CHECK = 'yetus-jdk8-hadoop3-check'
//WORKDIR_REL_JDK11_HADOOP3_CHECK = 'yetus-jdk11-hadoop3-check'
GITHUB_USE_TOKEN = 'true'
}
parameters {
booleanParam(name: 'DEBUG',
defaultValue: false,
description: 'Print extra outputs for debugging the jenkins job and yetus')
}
stages {
stage ('precommit checks') {
//Disabled while don't have actual parallel tasks, as this ties up an extra executor
// parallel {
// stage ('yetus general check') {
// agent {
// node {
// label 'Hadoop'
// }
// }
options {
timeout (time: 3, unit: 'HOURS')
timestamps()
}
environment {
// customized per parallel stage
//PLUGINS = "${GENERAL_CHECK_PLUGINS}"
SET_JAVA_HOME = '/usr/lib/jvm/java-8'
WORKDIR_REL = "${WORKDIR_REL_GENERAL_CHECK}"
// identical for all parallel stages
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
YETUSDIR = "${WORKDIR}/${YETUS_REL}"
SOURCEDIR = "${WORKDIR}/${SRC_REL}"
PATCHDIR = "${WORKDIR}/${PATCH_REL}"
BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
}
steps {
dir("${SOURCEDIR}") {
checkout scm
}
dir("${YETUSDIR}") {
checkout([
$class : 'GitSCM',
branches : [[name: "${YETUS_VERSION}"]],
userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
)
}
dir("${WORKDIR}") {
withCredentials([
usernamePassword(
credentialsId: 'c06659ac-4c77-499f-9b9d-485f1b58792c',
passwordVariable: 'GITHUB_PASSWORD',
usernameVariable: 'GITHUB_USER'
)]) {
sh label: 'test-patch', script: '''#!/bin/bash -e
hostname -a ; pwd ; ls -la
printenv 2>&1 | sort
echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
"${YETUS_DRIVER}"
'''
}
}
}
post {
always {
// Has to be relative to WORKSPACE.
junit testResults: "${WORKDIR_REL}/${PATCH_REL}/**/target/**/TEST-*.xml", allowEmptyResults: true
publishHTML target: [
allowMissing: true,
keepAll: true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE
reportDir: "${WORKDIR_REL}/${PATCH_REL}",
reportFiles: 'report.html',
reportName: 'PR General Check Report'
]
sh "find ${WORKDIR_REL}/${PATCH_REL} -name org.apache\\*.txt -exec gzip {} \\;"
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
}
// Jenkins pipeline jobs fill slaves on PRs without this :(
cleanup() {
script {
sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
# See YETUS-764
if [ -f "${PATCHDIR}/pidfile.txt" ]; then
echo "test-patch process appears to still be running: killing"
kill `cat "${PATCHDIR}/pidfile.txt"` || true
sleep 10
fi
if [ -f "${PATCHDIR}/cidfile.txt" ]; then
echo "test-patch container appears to still be running: killing"
docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
fi
# See HADOOP-13951
chmod -R u+rxw "${WORKSPACE}"
'''
dir ("${WORKDIR}") {
deleteDir()
}
}
}
}
//}
//}
}
}
post {
// Jenkins pipeline jobs fill slaves on PRs without this :(
cleanup() {
script {
sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
# See HADOOP-13951
chmod -R u+rxw "${WORKSPACE}"
'''
deleteDir()
}
}
}
}