blob: 6281b94a9ab714974a91aa781d1b20b827cc0658 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Main Flink Runner build file shared by all of its build targets.
* The file needs to be parameterized by the Flink version and the source directories.
*
* See build.gradle files for an example of how to use this script.
*/
import groovy.json.JsonOutput
apply plugin: 'org.apache.beam.module'
applyJavaNature(
automaticModuleName: 'org.apache.beam.runners.flink',
archivesBaseName: (project.hasProperty('archives_base_name') ? archives_base_name : archivesBaseName)
)
description = "Apache Beam :: Runners :: Flink $flink_version"
/*
* We need to rely on manually specifying these evaluationDependsOn to ensure that
* the following projects are evaluated before we evaluate this project. This is because
* we are attempting to reference the "sourceSets.test.output" directly.
*/
evaluationDependsOn(":sdks:java:core")
evaluationDependsOn(":runners:core-java")
/*
* Copy & merge source overrides into build directory.
*/
def sourceOverridesBase = "${project.buildDir}/source-overrides/src"
def copySourceOverrides = tasks.register('copySourceOverrides', Copy) {
it.from main_source_overrides
it.into "${sourceOverridesBase}/main/java"
it.duplicatesStrategy DuplicatesStrategy.INCLUDE
}
compileJava.dependsOn copySourceOverrides
def copyResourcesOverrides = tasks.register('copyResourcesOverrides', Copy) {
it.from main_resources_overrides
it.into "${sourceOverridesBase}/main/resources"
it.duplicatesStrategy DuplicatesStrategy.INCLUDE
}
compileJava.dependsOn copyResourcesOverrides
def copyTestSourceOverrides = tasks.register('copyTestSourceOverrides', Copy) {
it.from test_source_overrides
it.into "${sourceOverridesBase}/test/java"
it.duplicatesStrategy DuplicatesStrategy.INCLUDE
}
compileTestJava.dependsOn copyTestSourceOverrides
def copyTestResourcesOverrides = tasks.register('copyTestResourcesOverrides', Copy) {
it.from test_resources_overrides
it.into "${sourceOverridesBase}/test/resources"
it.duplicatesStrategy DuplicatesStrategy.INCLUDE
}
compileJava.dependsOn copyTestResourcesOverrides
/*
* We have to explicitly set all directories here to make sure each
* version of Flink has the correct overrides set.
*/
def sourceBase = "${project.projectDir}/../src"
sourceSets {
main {
java {
srcDirs = ["${sourceBase}/main/java", "${sourceOverridesBase}/main/java"]
}
resources {
srcDirs = ["${sourceBase}/main/resources", "${sourceOverridesBase}/main/resources"]
}
}
test {
java {
srcDirs = ["${sourceBase}/test/java", "${sourceOverridesBase}/test/java"]
}
resources {
srcDirs = ["${sourceBase}/test/resources", "${sourceOverridesBase}/test/resources"]
}
}
}
/*
* By default, Spotless operates on the project files only.
* BeamModulePlugin sets a custom configuration which we override here in order
* for the Flink Runner's multiple source directories to be all scanned.
*/
spotless {
java {
target target + project.fileTree(project.projectDir.parentFile) { include 'src/*/java/**/*.java' }
}
}
test {
systemProperty "log4j.configuration", "log4j-test.properties"
//systemProperty "org.slf4j.simpleLogger.defaultLogLevel", "debug"
jvmArgs "-XX:-UseGCOverheadLimit"
if (System.getProperty("beamSurefireArgline")) {
jvmArgs System.getProperty("beamSurefireArgline")
}
// TODO Running tests of all Flink versions in parallel can be too harsh on Jenkins memory
// Run them serially for now, to avoid "Exit code 137", i.e. Jenkins host killing the Gradle test process
if (project.path == ":runners:flink:1.8") {
mustRunAfter(":runners:flink:1.7:test")
} else if (project.path == ":runners:flink:1.9") {
mustRunAfter(":runners:flink:1.7:test")
mustRunAfter(":runners:flink:1.8:test")
}
}
configurations {
validatesRunner
}
dependencies {
compileOnly project(":sdks:java:build-tools")
compile library.java.vendored_guava_26_0_jre
compile project(path: ":sdks:java:core", configuration: "shadow")
compile project(":runners:core-java")
compile project(":runners:core-construction-java")
compile project(":runners:java-fn-execution")
compile project(":sdks:java:extensions:google-cloud-platform-core")
compile library.java.vendored_grpc_1_21_0
compile library.java.jackson_annotations
compile library.java.slf4j_api
compile library.java.joda_time
compile library.java.commons_compress
compile library.java.args4j
compile "org.apache.flink:flink-clients_2.11:$flink_version"
compile "org.apache.flink:flink-core:$flink_version"
compile "org.apache.flink:flink-metrics-core:$flink_version"
compile "org.apache.flink:flink-java:$flink_version"
compile "org.apache.flink:flink-runtime_2.11:$flink_version"
compile "org.apache.flink:flink-streaming-java_2.11:$flink_version"
testCompile project(path: ":sdks:java:core", configuration: "shadowTest")
// FlinkStateInternalsTest extends abstract StateInternalsTest
testCompile project(path: ":runners:core-java", configuration: "testRuntime")
testCompile library.java.commons_lang3
testCompile library.java.hamcrest_core
testCompile library.java.junit
testCompile library.java.mockito_core
testCompile library.java.powermock
testCompile library.java.google_api_services_bigquery
testCompile project(":sdks:java:io:google-cloud-platform")
testCompile library.java.jackson_dataformat_yaml
testCompile "org.apache.flink:flink-core:$flink_version:tests"
testCompile "org.apache.flink:flink-runtime_2.11:$flink_version:tests"
testCompile "org.apache.flink:flink-streaming-java_2.11:$flink_version:tests"
testCompile "org.apache.flink:flink-test-utils_2.11:$flink_version"
testCompile project(":sdks:java:harness")
testRuntimeOnly library.java.slf4j_simple
validatesRunner project(path: ":sdks:java:core", configuration: "shadowTest")
validatesRunner project(path: ":runners:core-java", configuration: "testRuntime")
validatesRunner project(project.path)
}
class ValidatesRunnerConfig {
String name
boolean streaming
}
def createValidatesRunnerTask(Map m) {
def config = m as ValidatesRunnerConfig
tasks.register(config.name, Test) {
group = "Verification"
def runnerType = config.streaming ? "streaming" : "batch"
description = "Validates the ${runnerType} runner"
def pipelineOptions = JsonOutput.toJson(
["--runner=TestFlinkRunner",
"--streaming=${config.streaming}",
"--parallelism=2",
])
systemProperty "beamTestPipelineOptions", pipelineOptions
classpath = configurations.validatesRunner
testClassesDirs = files(project(":sdks:java:core").sourceSets.test.output.classesDirs, project(":runners:core-java").sourceSets.test.output.classesDirs)
// maxParallelForks decreased from 4 in order to avoid OOM errors
maxParallelForks 2
useJUnit {
includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
excludeCategories 'org.apache.beam.sdk.testing.FlattenWithHeterogeneousCoders'
excludeCategories 'org.apache.beam.sdk.testing.LargeKeys$Above100MB'
excludeCategories 'org.apache.beam.sdk.testing.UsesCommittedMetrics'
if (config.streaming) {
excludeCategories 'org.apache.beam.sdk.testing.UsesImpulse'
excludeCategories 'org.apache.beam.sdk.testing.UsesTestStreamWithMultipleStages' // BEAM-8598
excludeCategories 'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime'
} else {
excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo'
excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream'
}
}
}
}
createValidatesRunnerTask(name: "validatesRunnerBatch", streaming: false)
createValidatesRunnerTask(name: "validatesRunnerStreaming", streaming: true)
tasks.register('validatesRunner') {
group = 'Verification'
description "Validates Flink runner"
dependsOn validatesRunnerBatch
dependsOn validatesRunnerStreaming
}
// Generates :runners:flink:1.9:runQuickstartJavaFlinkLocal
createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'FlinkLocal')