blob: 62cd3b23caae1fbeaa991d23907cb4cf5a8341aa [file] [log] [blame]
DUMMY = false
def doParallelBuilds() {
def jobnames = []
// Detect buildable os- jobs in branch folder
Jenkins.instance.getAllItems(Job.class).each {
if (it.class == org.jenkinsci.plugins.workflow.job.WorkflowJob && it.isBuildable()) {
def jobname = it.fullName
if (0 == jobname.indexOf(env.JOB_DIR + '/os-')) {
jobnames.add(jobname)
}
}
}
def builders = [:]
for (jobname in jobnames) {
def name = jobname
builders[name] = {
stage(name) {
echo name
script {
result = buildJob(name, name)
if (result == 'FAILURE') {
error("${name} build failed")
}
}
}
}
}
parallel builders
}
String buildJob(String ghcontext, String jobName) {
echo "Build of: " + jobName
if (DUMMY) { jobName = "Branch_Builds/dummy" }
def jobBuild = build(job: jobName, propagate: false,
parameters: [
string(name: 'GITHUB_URL', value: GITHUB_URL),
string(name: 'GITHUB_BRANCH', value: GITHUB_BRANCH),
string(name: 'JOB_DIR', value: JOB_DIR),
string(name: 'SHA1', value: SHA1),
]
)
def result = jobBuild.getResult()
echo "Build of " + jobName + " returned result: " + result
if ('FAILURE' == result) { error("${jobName} failed") }
return result
}
String autestJob(String ghcontext, String jobName, String shard) {
echo "Autest Build of: " + jobName + " " + shard
if (DUMMY) { jobName = "Branch_Builds/dummy" }
def jobBuild = build(job: jobName, propagate: false,
parameters: [
string(name: 'GITHUB_URL', value: GITHUB_URL),
string(name: 'GITHUB_BRANCH', value: GITHUB_BRANCH),
string(name: 'JOB_DIR', value: JOB_DIR),
string(name: 'SHA1', value: SHA1),
string(name: 'AUTEST_SHARD', value: shard),
]
)
def result = jobBuild.getResult()
echo "Build of " + jobName + " returned result: " + result
if ('FAILURE' == result) { error("${jobName} failed") }
return result
}
def shaForBranch(url, branch) {
sha1 = sh (
script: "set -- `git ls-remote -h $url refs/heads/$branch`; echo \${1}",
returnStdout: true
).trim()
return sha1
}
pipeline {
agent none
stages {
stage('Initialization') {
agent { label 'master' }
steps {
script {
if (! env.AUTEST_SHARDS) {
env.AUTEST_SHARDS = 4
}
if (! env.JOB_DIR) {
def bparts = env.JOB_NAME.split('/')
bparts = bparts - bparts.last()
env.JOB_DIR = bparts.join('/')
}
if (! env.GITHUB_BRANCH) {
def bparts = env.JOB_NAME.split('/')
if (2 != bparts.length) {
error("Invalid branch name from ${JOB_NAME}")
}
env.GITHUB_BRANCH = bparts[0]
}
currentBuild.displayName = "#${BUILD_NUMBER} ${GITHUB_BRANCH}"
if (! env.SHA1) {
env.SHA1 = shaForBranch(env.GITHUB_URL, env.GITHUB_BRANCH)
}
currentBuild.displayName = "#${BUILD_NUMBER} ${GITHUB_BRANCH}"
currentBuild.description = env.SHA1
sh 'printenv'
}
}
}
stage('Validation') {
parallel {
stage('In Tree') {
steps {
script {
buildJob('in_tree', env.JOB_DIR + '/in_tree')
}
}
}
stage('Out Of Tree') {
steps {
script {
buildJob('out_of_tree', env.JOB_DIR + '/out_of_tree')
}
}
}
stage('RAT') {
steps {
script {
buildJob('rat', env.JOB_DIR + '/rat')
}
}
}
stage('clang format') {
steps {
script {
buildJob('clang_format', env.JOB_DIR + '/clang_format')
}
}
}
}
}
stage('Snapshot') {
steps {
script {
//buildJob('snapshot', env.JOB_DIR + '/snapshot')
echo "Skipping snapshot"
}
}
}
stage('OS Builds') {
steps {
script {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
doParallelBuilds()
}
}
}
}
stage('Verification') {
parallel {
stage('clang analyzer') {
steps {
script {
buildJob('clang-analyzer', env.JOB_DIR + '/clang_analyzer')
}
}
}
stage('autests') {
steps {
script {
if (env.AUTEST_SHARDS) {
def nshards = env.AUTEST_SHARDS as int
def jobs = [:]
for (ind = 0 ; ind < nshards ; ind++) {
index = ind
String shard = index + "of" + env.AUTEST_SHARDS
jobs[shard] = { autestJob('autest', env.JOB_DIR + '/autest', shard) }
}
parallel jobs
} else {
buildJob('autest', env.JOB_DIR + '/autest')
}
}
}
}
stage('docs') {
steps {
script {
buildJob('docs', env.JOB_DIR + '/docs')
}
}
}
stage('cache_tests') {
steps {
script {
buildJob('cache-tests', env.JOB_DIR + '/cache-tests')
}
}
}
stage('coverage') {
steps {
script {
buildJob('coverage', env.JOB_DIR + '/coverage')
}
}
}
}
}
}
}