blob: c7bb48321a1205b22d6145de16e2cb74a55679b6 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import CommonJobProperties as commonJobProperties
// This job runs the Beam performance tests on PerfKit Benchmarker.
job('beam_PerformanceTests_Spark'){
// Set default Beam job properties.
commonJobProperties.setTopLevelMainJobProperties(delegate)
commonJobProperties.enablePhraseTriggeringFromPullRequest(
delegate,
'Spark Performance Test',
'Run Spark Performance Test')
// Run job in postcommit every 6 hours, don't trigger every push, and
// don't email individual committers.
commonJobProperties.setAutoJob(
delegate,
'H */6 * * *')
def argMap = [
benchmarks: 'dpb_wordcount_benchmark',
// There are currently problems uploading to Dataproc, so we use a file
// already present on the machines as input.
dpb_wordcount_input: '/etc/hosts',
config_override: 'dpb_wordcount_benchmark.dpb_service.service_type=dataproc',
bigquery_table: 'beam_performance.spark_pkp_results'
]
commonJobProperties.buildPerformanceTest(delegate, argMap)
}