blob: 62efe0989057aba43c0c9f9d6bf96e0dc99b603d [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import groovy.json.JsonOutput
plugins { id 'org.apache.beam.module' }
applyJavaNature(
automaticModuleName: 'org.apache.beam.runners.dataflow',
classesTriggerCheckerBugs: [
'PrimitiveParDoSingleFactory': 'https://github.com/typetools/checker-framework/issues/3791',
],
)
description = "Apache Beam :: Runners :: Google Cloud Dataflow"
/*
* We need to rely on manually specifying these evaluationDependsOn to ensure that
* the following projects are evaluated before we evaluate this project. This is because
* we are attempting to reference parameters such as "sourceSets.test.output" directly.
*/
evaluationDependsOn(":sdks:java:io:google-cloud-platform")
evaluationDependsOn(":sdks:java:core")
evaluationDependsOn(":examples:java")
evaluationDependsOn(":runners:google-cloud-dataflow-java:worker:legacy-worker")
evaluationDependsOn(":runners:google-cloud-dataflow-java:worker")
evaluationDependsOn(":sdks:java:container")
processResources {
filter org.apache.tools.ant.filters.ReplaceTokens, tokens: [
'dataflow.legacy_environment_major_version' : '8',
'dataflow.fnapi_environment_major_version' : '8',
'dataflow.container_version' : 'beam-2.26.0'
]
}
// Exclude tests that need a runner
test {
systemProperty "beamTestPipelineOptions", ""
systemProperty "beamUseDummyRunner", "true"
useJUnit {
excludeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
}
}
configurations {
validatesRunner
coreSDKJavaIntegrationTest
examplesJavaIntegrationTest
googleCloudPlatformIntegrationTest
}
dependencies {
compile enforcedPlatform(library.java.google_cloud_platform_libraries_bom)
compile library.java.vendored_guava_26_0_jre
compile project(path: ":model:pipeline", configuration: "shadow")
compile project(path: ":sdks:java:core", configuration: "shadow")
compile project(":sdks:java:extensions:google-cloud-platform-core")
compile project(":sdks:java:io:google-cloud-platform")
compile project(":runners:core-construction-java")
compile library.java.avro
compile library.java.bigdataoss_util
compile library.java.google_api_client
compile library.java.google_api_services_clouddebugger
compile library.java.google_api_services_dataflow
compile library.java.google_api_services_storage
compile library.java.google_auth_library_credentials
compile library.java.google_auth_library_oauth2_http
compile library.java.google_http_client
compile library.java.google_http_client_jackson2
compile library.java.jackson_annotations
compile library.java.jackson_core
compile library.java.jackson_databind
compile library.java.joda_time
compile library.java.slf4j_api
compile library.java.vendored_grpc_1_26_0
testCompile library.java.hamcrest_core
testCompile library.java.guava_testlib
testCompile library.java.junit
testCompile project(path: ":sdks:java:io:google-cloud-platform", configuration: "testRuntime")
testCompile project(path: ":sdks:java:core", configuration: "shadowTest")
testCompile project(path: ":sdks:java:extensions:google-cloud-platform-core", configuration: "testRuntime")
testCompile library.java.google_cloud_dataflow_java_proto_library_all
testCompile library.java.jackson_dataformat_yaml
testCompile library.java.mockito_core
testCompile library.java.proto_google_cloud_datastore_v1
testCompile library.java.slf4j_jdk14
validatesRunner project(path: ":sdks:java:core", configuration: "shadowTest")
validatesRunner project(project.path)
validatesRunner project(path: project.path, configuration: "testRuntime")
validatesRunner library.java.hamcrest_core
validatesRunner library.java.hamcrest_library
coreSDKJavaIntegrationTest project(project.path)
coreSDKJavaIntegrationTest project(path: ":sdks:java:core", configuration: "shadowTest")
examplesJavaIntegrationTest project(project.path)
examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntime")
googleCloudPlatformIntegrationTest project(project.path)
googleCloudPlatformIntegrationTest project(path: ":sdks:java:io:google-cloud-platform", configuration: "testRuntime")
}
def dataflowProject = project.findProperty('dataflowProject') ?: 'apache-beam-testing'
def dataflowRegion = project.findProperty('dataflowRegion') ?: 'us-central1'
def dataflowValidatesTempRoot = project.findProperty('dataflowTempRoot') ?: 'gs://temp-storage-for-validates-runner-tests'
def dataflowPostCommitTempRoot = project.findProperty('dataflowTempRoot') ?: 'gs://temp-storage-for-end-to-end-tests'
def dataflowPostCommitTempRootKms = project.findProperty('dataflowTempRootKms') ?: 'gs://temp-storage-for-end-to-end-tests-cmek'
def dataflowUploadTemp = project.findProperty('dataflowTempRoot') ?: 'gs://temp-storage-for-upload-tests'
def testFilesToStage = project.findProperty('filesToStage') ?: 'test.txt'
def dataflowLegacyWorkerJar = project.findProperty('dataflowWorkerJar') ?: project(":runners:google-cloud-dataflow-java:worker:legacy-worker").shadowJar.archivePath
def dataflowFnApiWorkerJar = project.findProperty('dataflowWorkerJar') ?: project(":runners:google-cloud-dataflow-java:worker").shadowJar.archivePath
def dataflowKmsKey = project.findProperty('dataflowKmsKey') ?: "projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test"
def dockerImageRoot = project.findProperty('dockerImageRoot') ?: "us.gcr.io/${dataflowProject}/java-postcommit-it"
def dockerImageContainer = "${dockerImageRoot}/java"
def dockerTag = new Date().format('yyyyMMddHHmmss')
// If -PuseExecutableStage is set, the use_executable_stage_bundle_execution wil be enabled.
def fnapiExperiments = project.hasProperty('useExecutableStage') ? 'beam_fn_api,beam_fn_api_use_deprecated_read,use_executable_stage_bundle_execution' : "beam_fn_api,beam_fn_api_use_deprecated_read"
ext.dockerImageName = "${dockerImageContainer}:${dockerTag}"
def legacyPipelineOptions = [
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowValidatesTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
]
def fnApiPipelineOptions = [
"--dataflowWorkerJar=${dataflowFnApiWorkerJar}",
"--workerHarnessContainerImage=${dockerImageContainer}:${dockerTag}",
"--experiments=${fnapiExperiments}",
]
def runnerV2PipelineOptions = [
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowValidatesTempRoot}",
"--workerHarnessContainerImage=${dockerImageContainer}:${dockerTag}",
"--experiments=beam_fn_api,use_unified_worker",
]
def commonLegacyExcludeCategories = [
'org.apache.beam.sdk.testing.LargeKeys$Above10MB',
'org.apache.beam.sdk.testing.UsesAttemptedMetrics',
'org.apache.beam.sdk.testing.UsesCrossLanguageTransforms',
'org.apache.beam.sdk.testing.UsesDistributionMetrics',
'org.apache.beam.sdk.testing.UsesGaugeMetrics',
'org.apache.beam.sdk.testing.UsesSetState',
'org.apache.beam.sdk.testing.UsesMapState',
'org.apache.beam.sdk.testing.UsesOrderedListState',
'org.apache.beam.sdk.testing.UsesSplittableParDoWithWindowedSideInputs',
'org.apache.beam.sdk.testing.UsesTestStream',
'org.apache.beam.sdk.testing.UsesParDoLifecycle',
'org.apache.beam.sdk.testing.UsesMetricsPusher',
'org.apache.beam.sdk.testing.UsesBundleFinalizer',
]
// For the following test tasks using legacy worker, set workerHarnessContainerImage to empty to
// make Dataflow pick up the non-versioned container image, which handles a staged worker jar.
def createLegacyWorkerValidatesRunnerTest = { Map args ->
def name = args.name
def pipelineOptions = args.pipelineOptions ?: legacyPipelineOptions
def excludedTests = args.excludedTests ?: []
def excludedCategories = args.excludedCategories ?: []
return tasks.create(name: name, type: Test, group: "Verification") {
dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson(pipelineOptions)
// Increase test parallelism up to the number of Gradle workers. By default this is equal
// to the number of CPU cores, but can be increased by setting --max-workers=N.
maxParallelForks Integer.MAX_VALUE
classpath = configurations.validatesRunner
testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) +
files(project(project.path).sourceSets.test.output.classesDirs)
useJUnit {
includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
commonLegacyExcludeCategories.each {
excludeCategories it
}
excludedCategories.each {
excludeCategories it
}
filter {
excludedTests.each {
excludeTestsMatching it
}
}
}
}
}
def createRunnerV2ValidatesRunnerTest = { Map args ->
def name = args.name
def pipelineOptions = args.pipelineOptions ?: runnerV2PipelineOptions
def excludedTests = args.excludedTests ?: []
def excludedCategories = args.excludedCategories ?: []
return tasks.create(name: name, type: Test, group: "Verification") {
systemProperty "beamTestPipelineOptions", JsonOutput.toJson(pipelineOptions)
// Increase test parallelism up to the number of Gradle workers. By default this is equal
// to the number of CPU cores, but can be increased by setting --max-workers=N.
maxParallelForks Integer.MAX_VALUE
classpath = configurations.validatesRunner
testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) +
files(project(project.path).sourceSets.test.output.classesDirs)
useJUnit {
includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
excludedCategories.each {
excludeCategories it
}
filter {
excludedTests.each {
excludeTestsMatching it
}
}
}
}
}
// Push docker images to a container registry for use within tests.
// NB: Tasks which consume docker images from the registry should depend on this
// task directly ('dependsOn buildAndPushDockerContainer'). This ensures the correct
// task ordering such that the registry doesn't get cleaned up prior to task completion.
task buildAndPushDockerContainer() {
dependsOn ":sdks:java:container:docker"
finalizedBy 'cleanUpDockerImages'
def defaultDockerImageName = containerImageName(
name: project.docker_image_default_repo_prefix + "java_sdk",
root: "apache",
tag: project.sdk_version)
doLast {
exec {
commandLine "docker", "tag", "${defaultDockerImageName}", "${dockerImageName}"
}
exec {
commandLine "gcloud", "docker", "--", "push", "${dockerImageContainer}"
}
}
}
// Clean up built images
task cleanUpDockerImages() {
doLast {
exec {
commandLine "docker", "rmi", "${dockerImageName}"
}
exec {
commandLine "gcloud", "--quiet", "container", "images", "delete", "--force-delete-tags", "${dockerImageName}"
}
}
}
afterEvaluate {
// Ensure all tasks which use published docker images run before they are cleaned up
tasks.each { t ->
if (t.dependsOn.contains(buildAndPushDockerContainer)) {
cleanUpDockerImages.mustRunAfter t
}
}
}
task printFnApiPipelineOptions {
group = "Help"
description = "Prints to the console extra pipeline options needed to run a Dataflow pipeline using portability"
dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
dependsOn buildAndPushDockerContainer
doLast {
println "To run a Dataflow job with portability, add the following pipeline options to your command-line:"
println fnApiPipelineOptions.join(' ')
}
}
task validatesRunner {
group = "Verification"
description "Validates Dataflow runner"
dependsOn(createLegacyWorkerValidatesRunnerTest(name: 'validatesRunnerLegacyWorkerTest'))
}
task validatesRunnerStreaming {
group = "Verification"
description "Validates Dataflow runner forcing streaming mode"
dependsOn(createLegacyWorkerValidatesRunnerTest(
name: 'validatesRunnerLegacyWorkerTestStreaming',
pipelineOptions: legacyPipelineOptions + ['--streaming=true'],
excludedCategories: [
'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
'org.apache.beam.sdk.testing.UsesStrictTimerOrdering'
],
excludedTests: [
'org.apache.beam.sdk.metrics.MetricsTest$CommittedMetricTests'
]
))
}
task validatesRunnerV2 {
group = "Verification"
description = "Runs the ValidatesRunner tests on Dataflow Runner V2"
dependsOn buildAndPushDockerContainer
dependsOn(createRunnerV2ValidatesRunnerTest(
name: 'validatesRunnerV2Test',
excludedCategories: [
// TODO(BEAM-11130): support OrderedListState in Google Cloud Dataflow Runner V2
'org.apache.beam.sdk.testing.UsesOrderedListState',
'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
],
excludedTests: [
'org.apache.beam.sdk.transforms.windowing.WindowingTest.testNonPartitioningWindowing',
'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindowsKeyedCollection',
'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindows',
'org.apache.beam.sdk.transforms.SplittableDoFnTest.testBundleFinalizationOccursOnUnboundedSplittableDoFn',
'org.apache.beam.sdk.transforms.SplittableDoFnTest.testOutputAfterCheckpointBounded',
'org.apache.beam.sdk.transforms.SplittableDoFnTest.testOutputAfterCheckpointUnbounded',
'org.apache.beam.sdk.transforms.SplittableDoFnTest.testPairWithIndexBasicBounded',
'org.apache.beam.sdk.transforms.SplittableDoFnTest.testPairWithIndexBasicUnbounded',
'org.apache.beam.sdk.transforms.SplittableDoFnTest.testPairWithIndexWindowedTimestampedBounded',
'org.apache.beam.sdk.transforms.SplittableDoFnTest.testWindowedSideInputWithCheckpointsBounded',
'org.apache.beam.sdk.transforms.ReshuffleTest.testReshuffleWithTimestampsStreaming',
'org.apache.beam.sdk.transforms.ParDoTest$BundleFinalizationTests.testBundleFinalization',
'org.apache.beam.sdk.transforms.ParDoTest$BundleFinalizationTests.testBundleFinalizationWithSideInputs',
'org.apache.beam.sdk.transforms.ParDoTest$BundleFinalizationTests.testBundleFinalizationWithState',
'org.apache.beam.sdk.transforms.ParDoTest$OnWindowExpirationTests.testOnWindowExpirationSimpleBounded',
'org.apache.beam.sdk.transforms.ParDoTest$OnWindowExpirationTests.testOnWindowExpirationSimpleUnbounded',
'org.apache.beam.sdk.transforms.ParDoTest$StateCoderInferenceTests.testMapStateCoderInference',
'org.apache.beam.sdk.transforms.ParDoTest$StateCoderInferenceTests.testSetStateCoderInference',
'org.apache.beam.sdk.transforms.ParDoTest$StateTests.testMapState',
'org.apache.beam.sdk.transforms.ParDoTest$StateTests.testSetState',
'org.apache.beam.sdk.transforms.ParDoTest$StateTests.testValueStateTaggedOutput',
'org.apache.beam.sdk.transforms.ParDoTest$TimerFamilyTests.testTimerFamilyProcessingTime',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.duplicateTimerSetting',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerCanBeReset',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerOrdering',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerOrderingWithCreate',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestamp',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestampWithProcessingTime',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testProcessingTimeTimerCanBeReset',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testTwoTimersSettingEachOther',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testTwoTimersSettingEachOtherWithCreateAsInput',
'org.apache.beam.sdk.transforms.ParDoSchemaTest.testMapStateSchemaInference',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testFnCallSequenceStateful',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundle',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundleStateful',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElement',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElementStateful',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetup',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetupStateful',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundle',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundleStateful',
'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testCombiningAccumulatingProcessingTime',
'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys100MB',
'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys10MB',
'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2',
'org.apache.beam.sdk.testing.TestStreamTest.testMultiStage',
'org.apache.beam.sdk.metrics.MetricsTest$AttemptedMetricTests.testAllAttemptedMetrics',
'org.apache.beam.sdk.metrics.MetricsTest$AttemptedMetricTests.testAttemptedGaugeMetrics',
'org.apache.beam.sdk.metrics.MetricsTest$CommittedMetricTests.testAllCommittedMetrics',
'org.apache.beam.sdk.metrics.MetricsTest$CommittedMetricTests.testCommittedGaugeMetrics',
]
))
}
task validatesRunnerV2Streaming {
group = "Verification"
description = "Runs the ValidatesRunner tests on Dataflow Runner V2 forcing streaming mode"
dependsOn buildAndPushDockerContainer
dependsOn(createRunnerV2ValidatesRunnerTest(
name: 'validatesRunnerV2TestStreaming',
pipelineOptions: runnerV2PipelineOptions + ['--streaming=true'],
excludedCategories: [
// TODO(BEAM-11130): support OrderedListState in Google Cloud Dataflow Runner V2
'org.apache.beam.sdk.testing.UsesOrderedListState',
'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
'org.apache.beam.sdk.testing.UsesMetricsPusher',
],
excludedTests: [
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerAlignBounded',
'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindows',
'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindowsKeyedCollection',
'org.apache.beam.examples.complete.TopWikipediaSessionsTest.testComputeTopUsers',
'org.apache.beam.sdk.transforms.windowing.WindowingTest.testNonPartitioningWindowing',
'org.apache.beam.sdk.io.AvroIOTest.testWriteWindowed',
'org.apache.beam.sdk.io.AvroIOTest.testWindowedAvroIOWriteViaSink',
'org.apache.beam.sdk.extensions.sql.BeamSqlDslAggregationTest.testTriggeredTumble',
'org.apache.beam.sdk.transforms.ReshuffleTest.testReshuffleWithTimestampsStreaming',
'org.apache.beam.sdk.testing.TestStreamTest.testProcessingTimeTrigger',
'org.apache.beam.sdk.transforms.ParDoTest$OnWindowExpirationTests.testOnWindowExpirationSimpleBounded',
'org.apache.beam.sdk.transforms.ParDoTest$OnWindowExpirationTests.testOnWindowExpirationSimpleUnbounded',
'org.apache.beam.runners.dataflow.DataflowRunnerTest.testBatchGroupIntoBatchesOverride',
'org.apache.beam.sdk.transforms.GroupByKeyTest.testCombiningAccumulatingProcessingTime',
// TODO(BEAM-8543): streaming timers are not strictly time ordered
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerOrderingWithCreate',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testTwoTimersSettingEachOther',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testTwoTimersSettingEachOtherWithCreateAsInput',
'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerAlignAfterGcTimeUnbounded',
'org.apache.beam.sdk.metrics.MetricsTest$CommittedMetricTests.testCommittedCounterMetrics',
'org.apache.beam.sdk.testing.TestStreamTest.testMultipleStreams',
'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSameFixedWindows',
'org.apache.beam.sdk.transforms.WaitTest.testWaitWithDifferentFixedWindows',
'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSignalInSlidingWindows',
'org.apache.beam.sdk.transforms.WaitTest.testWaitInGlobalWindow',
'org.apache.beam.sdk.transforms.WaitTest.testWaitBoundedInDefaultWindow',
'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSomeSignalWindowsEmpty',
// TODO(BEAM-3245): respect ParDo lifecycle.
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundle',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundleStateful',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElement',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElementStateful',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetup',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetupStateful',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundle',
'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundleStateful',
'org.apache.beam.sdk.io.CountingSourceTest.testBoundedSourceSplits',
]
))
}
task copyGoogleCloudPlatformTestResources(type: Copy) {
from project(':sdks:java:io:google-cloud-platform').fileTree("src/test/resources")
into "$buildDir/resources/test/"
}
task googleCloudPlatformLegacyWorkerIntegrationTest(type: Test, dependsOn: copyGoogleCloudPlatformTestResources) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
])
include '**/*IT.class'
exclude '**/BigQueryIOReadIT.class'
exclude '**/BigQueryIOStorageReadTableRowIT.class'
exclude '**/PubsubReadIT.class'
exclude '**/FhirIOReadIT.class'
maxParallelForks 4
classpath = configurations.googleCloudPlatformIntegrationTest
testClassesDirs = files(project(":sdks:java:io:google-cloud-platform").sourceSets.test.output.classesDirs)
useJUnit {
excludeCategories "org.apache.beam.sdk.testing.UsesKms"
}
}
task googleCloudPlatformLegacyWorkerKmsIntegrationTest(type: Test) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowPostCommitTempRootKms}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
"--dataflowKmsKey=${dataflowKmsKey}",
])
include '**/*IT.class'
exclude '**/BigQueryKmsKeyIT.class' // Only needs to run on direct runner.
maxParallelForks 4
classpath = configurations.googleCloudPlatformIntegrationTest
testClassesDirs = files(project(":sdks:java:io:google-cloud-platform").sourceSets.test.output.classesDirs)
useJUnit {
includeCategories "org.apache.beam.sdk.testing.UsesKms"
}
}
task googleCloudPlatformFnApiWorkerIntegrationTest(type: Test) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
dependsOn buildAndPushDockerContainer
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}"] + fnApiPipelineOptions
)
include '**/*IT.class'
exclude '**/BigQueryIOReadIT.class'
exclude '**/BigQueryIOStorageQueryIT.class'
exclude '**/BigQueryIOStorageReadIT.class'
exclude '**/BigQueryIOStorageReadTableRowIT.class'
exclude '**/PubsubReadIT.class'
exclude '**/SpannerReadIT.class'
exclude '**/BigtableReadIT.class'
exclude '**/V1ReadIT.class'
exclude '**/SpannerWriteIT.class'
exclude '**/BigQueryNestedRecordsIT.class'
exclude '**/SplitQueryFnIT.class'
exclude '**/*KmsKeyIT.class'
maxParallelForks 4
classpath = configurations.googleCloudPlatformIntegrationTest
testClassesDirs = files(project(":sdks:java:io:google-cloud-platform").sourceSets.test.output.classesDirs)
useJUnit { }
}
task examplesJavaLegacyWorkerIntegrationTest(type: Test) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
])
// The examples/java preCommit task already covers running WordCountIT/WindowedWordCountIT so
// this postCommit integration test excludes them.
include '**/*IT.class'
exclude '**/WordCountIT.class'
exclude '**/WindowedWordCountIT.class'
maxParallelForks 4
classpath = configurations.examplesJavaIntegrationTest
testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)
useJUnit { }
}
// For fn-api runner, only run the IT can be passed for now.
// Should support more ITs in the future.
task examplesJavaFnApiWorkerIntegrationTest(type: Test) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
dependsOn buildAndPushDockerContainer
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}"] + fnApiPipelineOptions
)
// The examples/java preCommit task already covers running WordCountIT/WindowedWordCountIT so
// this postCommit integration test excludes them.
include '**/*IT.class'
exclude '**/WordCountIT.class'
exclude '**/WindowedWordCountIT.class'
exclude '**/TopWikipediaSessionsIT.class'
exclude '**/TfIdfIT.class'
exclude '**/AutoCompleteIT.class'
exclude '**/TrafficMaxLaneFlowIT.class'
exclude '**/TrafficRoutesIT.class'
maxParallelForks 4
classpath = configurations.examplesJavaIntegrationTest
testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)
useJUnit { }
}
task coreSDKJavaLegacyWorkerIntegrationTest(type: Test) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}",
"--dataflowWorkerJar=${dataflowLegacyWorkerJar}",
"--workerHarnessContainerImage=",
])
include '**/*IT.class'
// TODO[Beam-4684]: Support @RequiresStableInput on Dataflow in a more intelligent way
exclude '**/RequiresStableInputIT.class'
maxParallelForks 4
classpath = configurations.coreSDKJavaIntegrationTest
testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs)
useJUnit { }
}
task coreSDKJavaFnApiWorkerIntegrationTest(type: Test) {
group = "Verification"
dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar"
dependsOn buildAndPushDockerContainer
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
"--runner=TestDataflowRunner",
"--project=${dataflowProject}",
"--region=${dataflowRegion}",
"--tempRoot=${dataflowPostCommitTempRoot}"] + fnApiPipelineOptions
)
include '**/*IT.class'
// TODO[Beam-4684]: Support @RequiresStableInput on Dataflow in a more intelligent way
exclude '**/RequiresStableInputIT.class'
maxParallelForks 4
classpath = configurations.coreSDKJavaIntegrationTest
testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs)
useJUnit { }
}
task postCommit {
group = "Verification"
description = "Various integration tests using the Dataflow runner."
dependsOn googleCloudPlatformLegacyWorkerIntegrationTest
dependsOn googleCloudPlatformLegacyWorkerKmsIntegrationTest
dependsOn examplesJavaLegacyWorkerIntegrationTest
dependsOn coreSDKJavaLegacyWorkerIntegrationTest
}
task postCommitPortabilityApi {
group = "Verification"
description = "Various integration tests using the Dataflow FnApi runner."
dependsOn buildAndPushDockerContainer
dependsOn googleCloudPlatformFnApiWorkerIntegrationTest
dependsOn examplesJavaFnApiWorkerIntegrationTest
dependsOn coreSDKJavaFnApiWorkerIntegrationTest
}
def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing'
def gcpRegion = project.findProperty('gcpRegion') ?: 'us-central1'
def gcsBucket = project.findProperty('gcsBucket') ?: 'temp-storage-for-release-validation-tests/nightly-snapshot-validation'
def bqDataset = project.findProperty('bqDataset') ?: 'beam_postrelease_mobile_gaming'
def pubsubTopic = project.findProperty('pubsubTopic') ?: 'java_mobile_gaming_topic'
// Generates :runners:google-cloud-dataflow-java:runQuickstartJavaDataflow
createJavaExamplesArchetypeValidationTask(type: 'Quickstart',
runner: 'Dataflow',
gcpProject: gcpProject,
gcpRegion: gcpRegion,
gcsBucket: gcsBucket)
// Generates :runners:google-cloud-dataflow-java:runMobileGamingJavaDataflow
createJavaExamplesArchetypeValidationTask(type: 'MobileGaming',
runner: 'Dataflow',
gcpProject: gcpProject,
gcpRegion: gcpRegion,
gcsBucket: gcsBucket,
bqDataset: bqDataset,
pubsubTopic: pubsubTopic)
// Standalone task for testing GCS upload, use with -PfilesToStage and -PdataflowTempRoot.
task GCSUpload(type: JavaExec) {
main = 'org.apache.beam.runners.dataflow.util.GCSUploadMain'
classpath = sourceSets.test.runtimeClasspath
args "--stagingLocation=${dataflowUploadTemp}/staging",
"--filesToStage=${testFilesToStage}"
}