blob: 52a81607c6397f8ed19880e424d7c4c952b0cdef [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import groovy.json.JsonOutput
plugins {
id 'org.apache.beam.module'
}
applyJavaNature(
exportJavadoc: false,
publish: false,
disableLintWarnings: ['requires-transitive-automatic', 'requires-automatic'],
enableSpotbugs: false,
requireJavaVersion: JavaVersion.VERSION_11
)
provideIntegrationTestingDependencies()
enableJavaPerformanceTesting()
description = "Apache Beam :: SDKs :: Java :: Testing :: JPMS Tests"
ext.summary = "E2E test for Java 9 modules"
/*
* List of runners to run integration tests on.
*/
def testRunnerClass = [
directRunnerIntegrationTest: "org.apache.beam.runners.direct.DirectRunner",
flinkRunnerIntegrationTest: "org.apache.beam.runners.flink.TestFlinkRunner",
dataflowRunnerIntegrationTest: "org.apache.beam.runners.dataflow.TestDataflowRunner",
sparkRunnerIntegrationTest: "org.apache.beam.runners.spark.TestSparkRunner",
]
configurations {
baseIntegrationTest
directRunnerIntegrationTest.extendsFrom(baseIntegrationTest)
flinkRunnerIntegrationTest.extendsFrom(baseIntegrationTest)
dataflowRunnerIntegrationTest.extendsFrom(baseIntegrationTest)
sparkRunnerIntegrationTest.extendsFrom(baseIntegrationTest)
}
dependencies {
implementation project(path: ":sdks:java:core", configuration: "shadow")
implementation project(path: ":sdks:java:extensions:google-cloud-platform-core")
permitUnusedDeclared project(":sdks:java:extensions:google-cloud-platform-core") // for autoservice filesystems
testImplementation library.java.junit
testImplementation library.java.hamcrest
baseIntegrationTest project(path: ":sdks:java:testing:jpms-tests", configuration: "testRuntimeMigration")
directRunnerIntegrationTest project(":runners:direct-java")
flinkRunnerIntegrationTest project(":runners:flink:${project.ext.latestFlinkVersion}")
dataflowRunnerIntegrationTest project(":runners:google-cloud-dataflow-java")
dataflowRunnerIntegrationTest project(":sdks:java:harness") // for staging locally built harness jar
sparkRunnerIntegrationTest project(":runners:spark:3")
}
/*
* Create a ${runner}IntegrationTest task for each runner which runs a set
* of integration tests for WordCount.
*/
def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing'
def gcpRegion = project.findProperty('gcpRegion') ?: 'us-central1'
def gcsTempRoot = project.findProperty('gcsTempRoot') ?: 'gs://temp-storage-for-end-to-end-tests/'
for (String runner : testRunnerClass.keySet()) {
tasks.create(name: runner, type: Test) {
def testPipelineOptions = [
"--project=${gcpProject}",
"--tempRoot=${gcsTempRoot}",
"--runner=" + testRunnerClass[runner],
]
if ("dataflowRunnerIntegrationTest".equals(runner)) {
testPipelineOptions.add("--region=${gcpRegion}")
testPipelineOptions.add("--experiments=use_staged_dataflow_worker_jar")
}
classpath = configurations."${runner}"
include "**/*IT.class"
maxParallelForks 4
systemProperty "beamTestPipelineOptions", JsonOutput.toJson(testPipelineOptions)
}
}
// Activate module support
// https://docs.gradle.org/current/samples/sample_java_modules_multi_project.html
plugins.withType(JavaPlugin).configureEach{
java {
modularity.inferModulePath = true
}
}