| /* |
| * Licensed to the Apache Software Foundation (ASF) under one |
| * or more contributor license agreements. See the NOTICE file |
| * distributed with this work for additional information |
| * regarding copyright ownership. The ASF licenses this file |
| * to you under the Apache License, Version 2.0 (the |
| * License); you may not use this file except in compliance |
| * with the License. You may obtain a copy of the License at |
| * |
| * http://www.apache.org/licenses/LICENSE-2.0 |
| * |
| * Unless required by applicable law or agreed to in writing, software |
| * distributed under the License is distributed on an AS IS BASIS, |
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| * See the License for the specific language governing permissions and |
| * limitations under the License. |
| */ |
| |
| import groovy.json.JsonOutput |
| |
| plugins { id 'org.apache.beam.module' } |
| applyJavaNature( |
| automaticModuleName: 'org.apache.beam.runners.dataflow', |
| classesTriggerCheckerBugs: [ |
| 'PrimitiveParDoSingleFactory': 'https://github.com/typetools/checker-framework/issues/3791', |
| ], |
| ) |
| |
| description = "Apache Beam :: Runners :: Google Cloud Dataflow" |
| |
| /* |
| * We need to rely on manually specifying these evaluationDependsOn to ensure that |
| * the following projects are evaluated before we evaluate this project. This is because |
| * we are attempting to reference parameters such as "sourceSets.test.output" directly. |
| */ |
| evaluationDependsOn(":sdks:java:io:google-cloud-platform") |
| evaluationDependsOn(":sdks:java:core") |
| evaluationDependsOn(":examples:java") |
| evaluationDependsOn(":runners:google-cloud-dataflow-java:worker:legacy-worker") |
| evaluationDependsOn(":runners:google-cloud-dataflow-java:worker") |
| evaluationDependsOn(":sdks:java:container:java8") |
| evaluationDependsOn(":sdks:java:container:java11") |
| |
| processResources { |
| filter org.apache.tools.ant.filters.ReplaceTokens, tokens: [ |
| 'dataflow.legacy_environment_major_version' : '8', |
| 'dataflow.fnapi_environment_major_version' : '8', |
| 'dataflow.legacy_container_version' : 'beam-2.33.0', |
| 'dataflow.fnapi_container_version' : 'beam-2.33.0', |
| 'dataflow.container_base_repository' : 'gcr.io/cloud-dataflow/v1beta3', |
| ] |
| } |
| |
| // Exclude tests that need a runner |
| test { |
| systemProperty "beamTestPipelineOptions", "" |
| systemProperty "beamUseDummyRunner", "true" |
| useJUnit { |
| excludeCategories 'org.apache.beam.sdk.testing.ValidatesRunner' |
| } |
| } |
| |
| configurations { |
| validatesRunner |
| coreSDKJavaIntegrationTest |
| examplesJavaIntegrationTest |
| googleCloudPlatformIntegrationTest |
| } |
| |
| dependencies { |
| compile enforcedPlatform(library.java.google_cloud_platform_libraries_bom) |
| permitUnusedDeclared enforcedPlatform(library.java.google_cloud_platform_libraries_bom) |
| compile library.java.vendored_guava_26_0_jre |
| compile project(path: ":model:pipeline", configuration: "shadow") |
| compile project(path: ":sdks:java:core", configuration: "shadow") |
| compile project(":sdks:java:extensions:google-cloud-platform-core") |
| compile project(":sdks:java:io:kafka") |
| compile project(":sdks:java:io:google-cloud-platform") |
| compile project(":runners:core-construction-java") |
| compile library.java.avro |
| compile library.java.bigdataoss_util |
| compile library.java.commons_codec |
| compile library.java.flogger_system_backend // Avoids conflicts with bigdataoss_util (BEAM-11010) |
| permitUnusedDeclared library.java.flogger_system_backend |
| compile library.java.google_api_client |
| compile library.java.google_api_services_clouddebugger |
| compile library.java.google_api_services_dataflow |
| compile library.java.google_api_services_storage |
| permitUnusedDeclared library.java.google_api_services_storage // BEAM-11761 |
| compile library.java.google_auth_library_credentials |
| compile library.java.google_auth_library_oauth2_http |
| compile library.java.google_http_client |
| compile library.java.google_http_client_jackson2 |
| permitUnusedDeclared library.java.google_http_client_jackson2 // BEAM-11761 |
| compile library.java.hamcrest |
| compile library.java.jackson_annotations |
| compile library.java.jackson_core |
| compile library.java.jackson_databind |
| compile library.java.joda_time |
| compile library.java.slf4j_api |
| compile library.java.vendored_grpc_1_36_0 |
| testCompile library.java.guava_testlib |
| testCompile library.java.junit |
| testCompile project(path: ":sdks:java:io:google-cloud-platform", configuration: "testRuntime") |
| testCompile project(path: ":sdks:java:core", configuration: "shadowTest") |
| testCompile project(path: ":sdks:java:extensions:google-cloud-platform-core", configuration: "testRuntime") |
| testCompile project(path: ":runners:core-construction-java", configuration: "testRuntime") |
| testCompile library.java.google_cloud_dataflow_java_proto_library_all |
| testCompile library.java.jackson_dataformat_yaml |
| testCompile library.java.mockito_core |
| validatesRunner project(path: ":sdks:java:core", configuration: "shadowTest") |
| validatesRunner project(project.path) |
| validatesRunner project(path: project.path, configuration: "testRuntime") |
| validatesRunner library.java.hamcrest_core |
| validatesRunner library.java.hamcrest_library |
| coreSDKJavaIntegrationTest project(project.path) |
| coreSDKJavaIntegrationTest project(path: ":sdks:java:core", configuration: "shadowTest") |
| examplesJavaIntegrationTest project(project.path) |
| examplesJavaIntegrationTest project(path: ":examples:java", configuration: "testRuntime") |
| googleCloudPlatformIntegrationTest project(project.path) |
| googleCloudPlatformIntegrationTest project(path: ":sdks:java:io:google-cloud-platform", configuration: "testRuntime") |
| } |
| |
| def dataflowProject = project.findProperty('dataflowProject') ?: 'apache-beam-testing' |
| def dataflowRegion = project.findProperty('dataflowRegion') ?: 'us-central1' |
| def dataflowValidatesTempRoot = project.findProperty('dataflowTempRoot') ?: 'gs://temp-storage-for-validates-runner-tests' |
| def dataflowPostCommitTempRoot = project.findProperty('dataflowTempRoot') ?: 'gs://temp-storage-for-end-to-end-tests' |
| def dataflowPostCommitTempRootKms = project.findProperty('dataflowTempRootKms') ?: 'gs://temp-storage-for-end-to-end-tests-cmek' |
| def dataflowUploadTemp = project.findProperty('dataflowTempRoot') ?: 'gs://temp-storage-for-upload-tests' |
| def testFilesToStage = project.findProperty('filesToStage') ?: 'test.txt' |
| def dataflowLegacyWorkerJar = project.findProperty('dataflowWorkerJar') ?: project(":runners:google-cloud-dataflow-java:worker:legacy-worker").shadowJar.archivePath |
| def dataflowKmsKey = project.findProperty('dataflowKmsKey') ?: "projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test" |
| |
| def dockerImageRoot = project.findProperty('dockerImageRoot') ?: "us.gcr.io/${dataflowProject}/java-postcommit-it" |
| def dockerImageContainer = "${dockerImageRoot}/java" |
| def dockerTag = new Date().format('yyyyMMddHHmmss') |
| ext.dockerImageName = "${dockerImageContainer}:${dockerTag}" |
| |
| def legacyPipelineOptions = [ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--tempRoot=${dataflowValidatesTempRoot}", |
| "--dataflowWorkerJar=${dataflowLegacyWorkerJar}", |
| "--workerHarnessContainerImage=", |
| ] |
| |
| def runnerV2PipelineOptions = [ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--tempRoot=${dataflowValidatesTempRoot}", |
| "--sdkContainerImage=${dockerImageContainer}:${dockerTag}", |
| // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved. |
| "--experiments=use_unified_worker,use_runner_v2,shuffle_mode=appliance", |
| ] |
| |
| def commonLegacyExcludeCategories = [ |
| 'org.apache.beam.sdk.testing.LargeKeys$Above10MB', |
| 'org.apache.beam.sdk.testing.UsesAttemptedMetrics', |
| 'org.apache.beam.sdk.testing.UsesCrossLanguageTransforms', |
| 'org.apache.beam.sdk.testing.UsesPythonExpansionService', |
| 'org.apache.beam.sdk.testing.UsesDistributionMetrics', |
| 'org.apache.beam.sdk.testing.UsesGaugeMetrics', |
| 'org.apache.beam.sdk.testing.UsesSplittableParDoWithWindowedSideInputs', |
| 'org.apache.beam.sdk.testing.UsesTestStream', |
| 'org.apache.beam.sdk.testing.UsesParDoLifecycle', |
| 'org.apache.beam.sdk.testing.UsesMetricsPusher', |
| 'org.apache.beam.sdk.testing.UsesBundleFinalizer', |
| ] |
| |
| def commonRunnerV2ExcludeCategories = [ |
| 'org.apache.beam.sdk.testing.UsesGaugeMetrics', |
| 'org.apache.beam.sdk.testing.UsesSetState', |
| 'org.apache.beam.sdk.testing.UsesMapState', |
| 'org.apache.beam.sdk.testing.UsesMetricsPusher', |
| 'org.apache.beam.sdk.testing.UsesOrderedListState', |
| 'org.apache.beam.sdk.testing.UsesTestStream', |
| 'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime', |
| 'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput', |
| ] |
| |
| // For the following test tasks using legacy worker, set workerHarnessContainerImage to empty to |
| // make Dataflow pick up the non-versioned container image, which handles a staged worker jar. |
| def createLegacyWorkerValidatesRunnerTest = { Map args -> |
| def name = args.name |
| def pipelineOptions = args.pipelineOptions ?: legacyPipelineOptions |
| def excludedTests = args.excludedTests ?: [] |
| def excludedCategories = args.excludedCategories ?: [] |
| |
| return tasks.create(name: name, type: Test, group: "Verification") { |
| dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar" |
| |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson(pipelineOptions) |
| |
| // Increase test parallelism up to the number of Gradle workers. By default this is equal |
| // to the number of CPU cores, but can be increased by setting --max-workers=N. |
| maxParallelForks Integer.MAX_VALUE |
| classpath = configurations.validatesRunner |
| testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) + |
| files(project(project.path).sourceSets.test.output.classesDirs) |
| useJUnit { |
| includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner' |
| commonLegacyExcludeCategories.each { |
| excludeCategories it |
| } |
| excludedCategories.each { |
| excludeCategories it |
| } |
| filter { |
| excludedTests.each { |
| excludeTestsMatching it |
| } |
| } |
| } |
| } |
| } |
| |
| def createRunnerV2ValidatesRunnerTest = { Map args -> |
| def name = args.name |
| def pipelineOptions = args.pipelineOptions ?: runnerV2PipelineOptions |
| def excludedTests = args.excludedTests ?: [] |
| def excludedCategories = args.excludedCategories ?: [] |
| |
| return tasks.create(name: name, type: Test, group: "Verification") { |
| dependsOn buildAndPushDockerContainer |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson(pipelineOptions) |
| |
| // Increase test parallelism up to the number of Gradle workers. By default this is equal |
| // to the number of CPU cores, but can be increased by setting --max-workers=N. |
| maxParallelForks Integer.MAX_VALUE |
| classpath = configurations.validatesRunner |
| testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) + |
| files(project(project.path).sourceSets.test.output.classesDirs) |
| useJUnit { |
| includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner' |
| commonRunnerV2ExcludeCategories.each { |
| excludeCategories it |
| } |
| excludedCategories.each { |
| excludeCategories it |
| } |
| filter { |
| excludedTests.each { |
| excludeTestsMatching it |
| } |
| } |
| } |
| } |
| } |
| |
| // Push docker images to a container registry for use within tests. |
| // NB: Tasks which consume docker images from the registry should depend on this |
| // task directly ('dependsOn buildAndPushDockerContainer'). This ensures the correct |
| // task ordering such that the registry doesn't get cleaned up prior to task completion. |
| task buildAndPushDockerContainer() { |
| def javaVer = project.hasProperty('compileAndRunTestsWithJava11') ? "java11" : "java8" |
| dependsOn ":sdks:java:container:${javaVer}:docker" |
| def defaultDockerImageName = containerImageName( |
| name: "${project.docker_image_default_repo_prefix}${javaVer}_sdk", |
| root: "apache", |
| tag: project.sdk_version) |
| doLast { |
| exec { |
| commandLine "docker", "tag", "${defaultDockerImageName}", "${dockerImageName}" |
| } |
| exec { |
| commandLine "gcloud", "docker", "--", "push", "${dockerImageName}" |
| } |
| } |
| } |
| |
| // Clean up built images |
| task cleanUpDockerImages() { |
| doLast { |
| exec { |
| commandLine "docker", "rmi", "--force", "${dockerImageName}" |
| } |
| exec { |
| commandLine "gcloud", "--quiet", "container", "images", "untag", "${dockerImageName}" |
| } |
| exec { |
| commandLine "./scripts/cleanup_untagged_gcr_images.sh", "${dockerImageContainer}" |
| } |
| } |
| } |
| |
| afterEvaluate { |
| // Ensure all tasks which use published docker images run before they are cleaned up |
| tasks.each { t -> |
| if (t.dependsOn.contains(buildAndPushDockerContainer) && !t.name.equalsIgnoreCase('printRunnerV2PipelineOptions')) { |
| t.finalizedBy cleanUpDockerImages |
| } |
| } |
| } |
| |
| task printRunnerV2PipelineOptions { |
| dependsOn buildAndPushDockerContainer |
| |
| doLast { |
| println "To run a Dataflow job with runner V2, add the following pipeline options to your command-line:" |
| println runnerV2PipelineOptions.join(' ') |
| println "Please delete your image upon completion with the following command:" |
| println "docker rmi ${dockerImageName}; gcloud container images delete --force-delete-tags ${dockerImageName}" |
| } |
| } |
| |
| task validatesRunner { |
| group = "Verification" |
| description "Validates Dataflow runner" |
| dependsOn(createLegacyWorkerValidatesRunnerTest(name: 'validatesRunnerLegacyWorkerTest')) |
| } |
| |
| task validatesRunnerStreaming { |
| group = "Verification" |
| description "Validates Dataflow runner forcing streaming mode" |
| dependsOn(createLegacyWorkerValidatesRunnerTest( |
| name: 'validatesRunnerLegacyWorkerTestStreaming', |
| pipelineOptions: legacyPipelineOptions + ['--streaming=true'], |
| excludedCategories: [ |
| 'org.apache.beam.sdk.testing.UsesCommittedMetrics', |
| 'org.apache.beam.sdk.testing.UsesMapState', |
| 'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput', |
| 'org.apache.beam.sdk.testing.UsesSetState', |
| ], |
| )) |
| } |
| |
| task setupXVR { |
| dependsOn buildAndPushDockerContainer |
| } |
| |
| createCrossLanguageValidatesRunnerTask( |
| startJobServer: setupXVR, |
| cleanupJobServer: cleanUpDockerImages, |
| classpath: configurations.validatesRunner, |
| numParallelTests: Integer.MAX_VALUE, |
| needsSdkLocation: true, |
| pythonPipelineOptions: [ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--sdk_harness_container_image_overrides=.*java.*,${dockerImageContainer}:${dockerTag}", |
| "--experiments=use_runner_v2", |
| // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved |
| "--experiments=shuffle_mode=appliance", |
| ], |
| javaPipelineOptions: [ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--tempRoot=${dataflowValidatesTempRoot}", |
| "--sdkHarnessContainerImageOverrides=.*java.*,${dockerImageContainer}:${dockerTag}", |
| // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved. |
| "--experiments=use_runner_v2,shuffle_mode=appliance", |
| ], |
| pytestOptions: [ |
| "--capture=no", |
| "--numprocesses=8", |
| "--timeout=4500", |
| "--log-cli-level=INFO", |
| ] |
| ) |
| |
| task validatesRunnerV2 { |
| group = "Verification" |
| description = "Runs the ValidatesRunner tests on Dataflow Runner V2" |
| dependsOn(createRunnerV2ValidatesRunnerTest( |
| name: 'validatesRunnerV2Test', |
| excludedCategories: [ |
| 'org.apache.beam.sdk.testing.UsesOnWindowExpiration', |
| 'org.apache.beam.sdk.testing.UsesStatefulParDo', |
| 'org.apache.beam.sdk.testing.UsesTimersInParDo', |
| 'org.apache.beam.sdk.testing.UsesUnboundedPCollections', |
| 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo', |
| ], |
| excludedTests: [ |
| 'org.apache.beam.sdk.transforms.windowing.WindowingTest.testNonPartitioningWindowing', |
| 'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindowsKeyedCollection', |
| 'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindows', |
| 'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindowsWithoutCustomWindowTypes', |
| |
| 'org.apache.beam.sdk.transforms.ReshuffleTest.testReshuffleWithTimestampsStreaming', |
| |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testFnCallSequenceStateful', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundle', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundleStateful', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElement', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElementStateful', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetup', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetupStateful', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundle', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundleStateful', |
| |
| 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testCombiningAccumulatingProcessingTime', |
| 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys100MB', |
| 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys10MB', |
| // TODO(BEAM-12353): Identify whether it's bug or a feature gap. |
| 'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner', |
| |
| 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2', |
| ] |
| )) |
| } |
| |
| task validatesRunnerV2Streaming { |
| group = "Verification" |
| description = "Runs the ValidatesRunner tests on Dataflow Runner V2 forcing streaming mode" |
| dependsOn(createRunnerV2ValidatesRunnerTest( |
| name: 'validatesRunnerV2TestStreaming', |
| pipelineOptions: runnerV2PipelineOptions + ['--streaming=true'], |
| excludedCategories: [ |
| 'org.apache.beam.sdk.testing.LargeKeys$Above10KB', |
| 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo', |
| 'org.apache.beam.sdk.testing.UsesCommittedMetrics', |
| 'org.apache.beam.sdk.testing.UsesTestStream', |
| ], |
| excludedTests: [ |
| 'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindows', |
| 'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindowsKeyedCollection', |
| 'org.apache.beam.sdk.transforms.windowing.WindowTest.testMergingCustomWindowsWithoutCustomWindowTypes', |
| 'org.apache.beam.examples.complete.TopWikipediaSessionsTest.testComputeTopUsers', |
| 'org.apache.beam.sdk.transforms.windowing.WindowingTest.testNonPartitioningWindowing', |
| |
| 'org.apache.beam.sdk.io.AvroIOTest.testWriteWindowed', |
| 'org.apache.beam.sdk.io.AvroIOTest.testWindowedAvroIOWriteViaSink', |
| 'org.apache.beam.sdk.extensions.sql.BeamSqlDslAggregationTest.testTriggeredTumble', |
| 'org.apache.beam.sdk.transforms.ReshuffleTest.testReshuffleWithTimestampsStreaming', |
| |
| 'org.apache.beam.sdk.transforms.ParDoTest$OnWindowExpirationTests.testOnWindowExpirationSimpleBounded', |
| 'org.apache.beam.sdk.transforms.ParDoTest$OnWindowExpirationTests.testOnWindowExpirationSimpleUnbounded', |
| |
| // TODO(BEAM-11858) reading a side input twice fails |
| 'org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testSameSideInputReadTwice', |
| 'org.apache.beam.sdk.transforms.CombineFnsTest.testComposedCombineWithContext', |
| 'org.apache.beam.sdk.transforms.CombineTest$CombineWithContextTests.testSimpleCombineWithContextEmpty', |
| 'org.apache.beam.sdk.transforms.CombineTest$CombineWithContextTests.testSimpleCombineWithContext', |
| 'org.apache.beam.sdk.transforms.CombineTest$WindowingTests.testFixedWindowsCombineWithContext', |
| 'org.apache.beam.sdk.transforms.CombineTest$WindowingTests.testSessionsCombineWithContext', |
| 'org.apache.beam.sdk.transforms.CombineTest$WindowingTests.testSlidingWindowsCombineWithContext', |
| |
| 'org.apache.beam.runners.dataflow.DataflowRunnerTest.testBatchGroupIntoBatchesOverride', |
| |
| 'org.apache.beam.sdk.transforms.GroupByKeyTest.testCombiningAccumulatingProcessingTime', |
| |
| // TODO(BEAM-11306): Pipeline is hanging for these 3 tests. |
| 'org.apache.beam.sdk.transforms.SplittableDoFnTest.testPairWithIndexBasicUnbounded', |
| 'org.apache.beam.sdk.transforms.SplittableDoFnTest.testOutputAfterCheckpointUnbounded', |
| 'org.apache.beam.sdk.transforms.SplittableDoFnTest.testBundleFinalizationOccursOnUnboundedSplittableDoFn', |
| |
| // TODO(BEAM-8543): streaming timers are not strictly time ordered |
| 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerOrderingWithCreate', |
| 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testTwoTimersSettingEachOther', |
| 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testTwoTimersSettingEachOtherWithCreateAsInput', |
| |
| 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testEventTimeTimerAlignAfterGcTimeUnbounded', |
| |
| 'org.apache.beam.sdk.metrics.MetricsTest$CommittedMetricTests.testCommittedCounterMetrics', |
| |
| 'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSameFixedWindows', |
| 'org.apache.beam.sdk.transforms.WaitTest.testWaitWithDifferentFixedWindows', |
| 'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSignalInSlidingWindows', |
| 'org.apache.beam.sdk.transforms.WaitTest.testWaitInGlobalWindow', |
| 'org.apache.beam.sdk.transforms.WaitTest.testWaitBoundedInDefaultWindow', |
| 'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSomeSignalWindowsEmpty', |
| |
| // TODO(BEAM-3245): respect ParDo lifecycle. |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundle', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundleStateful', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElement', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElementStateful', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetup', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInSetupStateful', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundle', |
| 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundleStateful', |
| |
| // TODO(BEAM-11917) Empty flatten fails in streaming |
| "org.apache.beam.sdk.transforms.FlattenTest.testEmptyFlattenAsSideInput", |
| "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmptyThenParDo", |
| "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmpty", |
| |
| 'org.apache.beam.sdk.io.CountingSourceTest.testBoundedSourceSplits', |
| |
| // TODO(BEAM-12353): Identify whether it's bug or a feature gap. |
| 'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner', |
| ] |
| )) |
| } |
| |
| task copyGoogleCloudPlatformTestResources(type: Copy) { |
| from project(':sdks:java:io:google-cloud-platform').fileTree("src/test/resources") |
| into "$buildDir/resources/test/" |
| } |
| |
| task googleCloudPlatformLegacyWorkerIntegrationTest(type: Test, dependsOn: copyGoogleCloudPlatformTestResources) { |
| group = "Verification" |
| dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar" |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson([ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--tempRoot=${dataflowPostCommitTempRoot}", |
| "--dataflowWorkerJar=${dataflowLegacyWorkerJar}", |
| "--workerHarnessContainerImage=", |
| ]) |
| |
| include '**/*IT.class' |
| exclude '**/BigQueryIOReadIT.class' |
| exclude '**/BigQueryIOStorageReadTableRowIT.class' |
| exclude '**/PubsubReadIT.class' |
| exclude '**/FhirIOReadIT.class' |
| maxParallelForks 4 |
| classpath = configurations.googleCloudPlatformIntegrationTest |
| testClassesDirs = files(project(":sdks:java:io:google-cloud-platform").sourceSets.test.output.classesDirs) |
| useJUnit { |
| excludeCategories "org.apache.beam.sdk.testing.UsesKms" |
| } |
| } |
| |
| task googleCloudPlatformLegacyWorkerKmsIntegrationTest(type: Test) { |
| group = "Verification" |
| dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar" |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson([ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--tempRoot=${dataflowPostCommitTempRootKms}", |
| "--dataflowWorkerJar=${dataflowLegacyWorkerJar}", |
| "--workerHarnessContainerImage=", |
| "--dataflowKmsKey=${dataflowKmsKey}", |
| ]) |
| |
| include '**/*IT.class' |
| exclude '**/BigQueryKmsKeyIT.class' // Only needs to run on direct runner. |
| maxParallelForks 4 |
| classpath = configurations.googleCloudPlatformIntegrationTest |
| testClassesDirs = files(project(":sdks:java:io:google-cloud-platform").sourceSets.test.output.classesDirs) |
| useJUnit { |
| includeCategories "org.apache.beam.sdk.testing.UsesKms" |
| } |
| } |
| |
| task googleCloudPlatformRunnerV2IntegrationTest(type: Test) { |
| group = "Verification" |
| dependsOn buildAndPushDockerContainer |
| |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson(runnerV2PipelineOptions) |
| |
| include '**/*IT.class' |
| exclude '**/BigQueryIOStorageReadTableRowIT.class' |
| exclude '**/SpannerWriteIT.class' |
| exclude '**/*KmsKeyIT.class' |
| exclude '**/FhirIOReadIT.class' |
| exclude '**/FhirIOWriteIT.class' |
| exclude '**/FhirIOLROIT.class' |
| exclude '**/FhirIOSearchIT.class' |
| |
| maxParallelForks 4 |
| classpath = configurations.googleCloudPlatformIntegrationTest |
| testClassesDirs = files(project(":sdks:java:io:google-cloud-platform").sourceSets.test.output.classesDirs) |
| useJUnit { } |
| } |
| |
| task examplesJavaLegacyWorkerIntegrationTest(type: Test) { |
| group = "Verification" |
| dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar" |
| |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson([ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--tempRoot=${dataflowPostCommitTempRoot}", |
| "--dataflowWorkerJar=${dataflowLegacyWorkerJar}", |
| "--workerHarnessContainerImage=", |
| ]) |
| |
| // The examples/java preCommit task already covers running WordCountIT/WindowedWordCountIT so |
| // this postCommit integration test excludes them. |
| include '**/*IT.class' |
| exclude '**/WordCountIT.class' |
| exclude '**/WindowedWordCountIT.class' |
| maxParallelForks 4 |
| classpath = configurations.examplesJavaIntegrationTest |
| testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) |
| useJUnit { } |
| } |
| |
| task examplesJavaRunnerV2IntegrationTest(type: Test) { |
| group = "Verification" |
| dependsOn buildAndPushDockerContainer |
| |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson(runnerV2PipelineOptions) |
| |
| // The examples/java preCommit task already covers running WordCountIT/WindowedWordCountIT so |
| // this postCommit integration test excludes them. |
| include '**/*IT.class' |
| exclude '**/WordCountIT.class' |
| exclude '**/WindowedWordCountIT.class' |
| exclude '**/TopWikipediaSessionsIT.class' |
| exclude '**/AutoCompleteIT.class' |
| // TODO(BEAM-11201): test times out. |
| exclude '**/FhirIOReadIT.class' |
| |
| maxParallelForks 4 |
| classpath = configurations.examplesJavaIntegrationTest |
| testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) |
| useJUnit { } |
| } |
| |
| task coreSDKJavaLegacyWorkerIntegrationTest(type: Test) { |
| group = "Verification" |
| dependsOn ":runners:google-cloud-dataflow-java:worker:legacy-worker:shadowJar" |
| |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson([ |
| "--runner=TestDataflowRunner", |
| "--project=${dataflowProject}", |
| "--region=${dataflowRegion}", |
| "--tempRoot=${dataflowPostCommitTempRoot}", |
| "--dataflowWorkerJar=${dataflowLegacyWorkerJar}", |
| "--workerHarnessContainerImage=", |
| ]) |
| |
| include '**/*IT.class' |
| // TODO[Beam-4684]: Support @RequiresStableInput on Dataflow in a more intelligent way |
| exclude '**/RequiresStableInputIT.class' |
| maxParallelForks 4 |
| classpath = configurations.coreSDKJavaIntegrationTest |
| testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) |
| useJUnit { } |
| } |
| |
| task coreSDKJavaRunnerV2IntegrationTest(type: Test) { |
| group = "Verification" |
| dependsOn buildAndPushDockerContainer |
| |
| systemProperty "beamTestPipelineOptions", JsonOutput.toJson(runnerV2PipelineOptions) |
| |
| include '**/*IT.class' |
| // TODO[Beam-4684]: Support @RequiresStableInput on Dataflow in a more intelligent way |
| exclude '**/RequiresStableInputIT.class' |
| maxParallelForks 4 |
| classpath = configurations.coreSDKJavaIntegrationTest |
| testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs) |
| useJUnit { } |
| } |
| |
| task postCommit { |
| group = "Verification" |
| description = "Various integration tests using the Dataflow runner." |
| dependsOn googleCloudPlatformLegacyWorkerIntegrationTest |
| dependsOn googleCloudPlatformLegacyWorkerKmsIntegrationTest |
| dependsOn examplesJavaLegacyWorkerIntegrationTest |
| dependsOn coreSDKJavaLegacyWorkerIntegrationTest |
| } |
| |
| task postCommitRunnerV2 { |
| group = "Verification" |
| description = "Various integration tests using the Dataflow runner V2." |
| dependsOn googleCloudPlatformRunnerV2IntegrationTest |
| dependsOn examplesJavaRunnerV2IntegrationTest |
| dependsOn coreSDKJavaRunnerV2IntegrationTest |
| } |
| |
| def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing' |
| def gcpRegion = project.findProperty('gcpRegion') ?: 'us-central1' |
| def gcsBucket = project.findProperty('gcsBucket') ?: 'temp-storage-for-release-validation-tests/nightly-snapshot-validation' |
| def bqDataset = project.findProperty('bqDataset') ?: 'beam_postrelease_mobile_gaming' |
| def pubsubTopic = project.findProperty('pubsubTopic') ?: 'java_mobile_gaming_topic' |
| |
| // Generates :runners:google-cloud-dataflow-java:runQuickstartJavaDataflow |
| createJavaExamplesArchetypeValidationTask(type: 'Quickstart', |
| runner: 'Dataflow', |
| gcpProject: gcpProject, |
| gcpRegion: gcpRegion, |
| gcsBucket: gcsBucket) |
| |
| // Generates :runners:google-cloud-dataflow-java:runMobileGamingJavaDataflow |
| createJavaExamplesArchetypeValidationTask(type: 'MobileGaming', |
| runner: 'Dataflow', |
| gcpProject: gcpProject, |
| gcpRegion: gcpRegion, |
| gcsBucket: gcsBucket, |
| bqDataset: bqDataset, |
| pubsubTopic: pubsubTopic) |
| |
| // Generates :runners:google-cloud-dataflow-java:runMobileGamingJavaDataflowBom |
| createJavaExamplesArchetypeValidationTask(type: 'MobileGaming', |
| runner: 'DataflowBom', |
| gcpProject: gcpProject, |
| gcpRegion: gcpRegion, |
| gcsBucket: gcsBucket, |
| bqDataset: bqDataset, |
| pubsubTopic: pubsubTopic) |
| |
| // Standalone task for testing GCS upload, use with -PfilesToStage and -PdataflowTempRoot. |
| task GCSUpload(type: JavaExec) { |
| main = 'org.apache.beam.runners.dataflow.util.GCSUploadMain' |
| classpath = sourceSets.test.runtimeClasspath |
| args "--stagingLocation=${dataflowUploadTemp}/staging", |
| "--filesToStage=${testFilesToStage}" |
| } |