blob: e9f19ca1f6798a7b6a4ec5bd1bf2dc37cd55e39f [file] [log] [blame]
import org.apache.beam.gradle.BeamModulePlugin
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Flink Runner JobServer build file shared by all of its build targets.
*
* See build.gradle files for an example of how to use this script.
*/
apply plugin: 'org.apache.beam.module'
apply plugin: 'application'
// we need to set mainClassName before applying shadow plugin
mainClassName = "org.apache.beam.runners.flink.FlinkJobServerDriver"
archivesBaseName = project.hasProperty('archives_base_name') ? archives_base_name : archivesBaseName
applyJavaNature(
validateShadowJar: false,
exportJavadoc: false,
shadowClosure: {
append "reference.conf"
},
)
// Resolve the Flink project name (and version) the job-server is based on
def flinkRunnerProject = "${project.path.replace(":job-server", "")}"
description = project(flinkRunnerProject).description + " :: Job Server"
/*
* We have to explicitly set all directories here to make sure each
* version of Flink has the correct overrides set.
*/
sourceSets {
main {
java {
srcDirs = main_source_dirs
}
resources {
srcDirs = main_resources_dirs
}
}
test {
java {
srcDirs = test_source_dirs
}
resources {
srcDirs = test_resources_dirs
}
}
}
configurations {
validatesPortableRunner
}
configurations.all {
// replace commons logging with the jcl-over-slf4j bridge
exclude group: "commons-logging", module: "commons-logging"
}
dependencies {
compile project(path: flinkRunnerProject, configuration: "shadow")
compile group: "org.slf4j", name: "jcl-over-slf4j", version: dependencies.create(project.library.java.slf4j_api).getVersion()
validatesPortableRunner project(path: flinkRunnerProject, configuration: "shadowTest")
validatesPortableRunner project(path: ":sdks:java:core", configuration: "shadowTest")
validatesPortableRunner project(path: ":runners:core-java", configuration: "shadowTest")
validatesPortableRunner project(path: ":runners:reference:java", configuration: "shadowTest")
compile project(path: ":sdks:java:extensions:google-cloud-platform-core", configuration: "shadow")
compile library.java.slf4j_simple
// TODO: Enable AWS and HDFS file system.
// For resolving external transform requests
compile project(path: ":sdks:java:io:kafka", configuration: "shadow")
}
// NOTE: runShadow must be used in order to run the job server. The standard run
// task will not work because the flink runner classes only exist in the shadow
// jar.
runShadow {
args = []
if (project.hasProperty('jobHost'))
args += ["--job-host=${project.property('jobHost')}"]
if (project.hasProperty('artifactsDir'))
args += ["--artifacts-dir=${project.property('artifactsDir')}"]
if (project.hasProperty('cleanArtifactsPerJob'))
args += ["--clean-artifacts-per-job=${project.property('cleanArtifactsPerJob')}"]
if (project.hasProperty('flinkMasterUrl'))
args += ["--flink-master-url=${project.property('flinkMasterUrl')}"]
if (project.hasProperty('flinkConfDir'))
args += ["--flink-conf-dir=${project.property('flinkConfDir')}"]
// Enable remote debugging.
jvmArgs = ["-Xdebug", "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005"]
if (project.hasProperty("logLevel"))
jvmArgs += ["-Dorg.slf4j.simpleLogger.defaultLogLevel=${project.property('logLevel')}"]
}
def portableValidatesRunnerTask(String name, Boolean streaming) {
def pipelineOptions = [
// Limit resource consumption via parallelism
"--parallelism=2",
"--shutdownSourcesOnFinalWatermark",
]
if (streaming) {
pipelineOptions += "--streaming"
}
createPortableValidatesRunnerTask(
name: "validatesPortableRunner${name}",
jobServerDriver: "org.apache.beam.runners.flink.FlinkJobServerDriver",
jobServerConfig: "--job-host=localhost,--job-port=0,--artifact-port=0",
testClasspathConfiguration: configurations.validatesPortableRunner,
numParallelTests: 1,
pipelineOpts: pipelineOptions,
environment: BeamModulePlugin.PortableValidatesRunnerConfiguration.Environment.EMBEDDED,
testCategories: {
includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
excludeCategories 'org.apache.beam.sdk.testing.FlattenWithHeterogeneousCoders'
excludeCategories 'org.apache.beam.sdk.testing.LargeKeys$Above100MB'
excludeCategories 'org.apache.beam.sdk.testing.UsesAttemptedMetrics'
excludeCategories 'org.apache.beam.sdk.testing.UsesCommittedMetrics'
excludeCategories 'org.apache.beam.sdk.testing.UsesCounterMetrics'
excludeCategories 'org.apache.beam.sdk.testing.UsesCrossLanguageTransforms'
excludeCategories 'org.apache.beam.sdk.testing.UsesCustomWindowMerging'
excludeCategories 'org.apache.beam.sdk.testing.UsesDistributionMetrics'
excludeCategories 'org.apache.beam.sdk.testing.UsesFailureMessage'
excludeCategories 'org.apache.beam.sdk.testing.UsesGaugeMetrics'
excludeCategories 'org.apache.beam.sdk.testing.UsesParDoLifecycle'
excludeCategories 'org.apache.beam.sdk.testing.UsesMapState'
excludeCategories 'org.apache.beam.sdk.testing.UsesSetState'
if (streaming) {
excludeCategories 'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime'
} else {
excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream'
}
//SplitableDoFnTests
excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo'
excludeCategories 'org.apache.beam.sdk.testing.UsesSplittableParDoWithWindowedSideInputs'
excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo'
},
)
}
project.ext.validatesPortableRunnerBatch = portableValidatesRunnerTask("Batch", false)
project.ext.validatesPortableRunnerStreaming = portableValidatesRunnerTask("Streaming", true)
task validatesPortableRunner() {
dependsOn validatesPortableRunnerBatch
dependsOn validatesPortableRunnerStreaming
}
project.ext.validatesCrossLanguageTransforms =
createPortableValidatesRunnerTask(
name: "validatesCrossLanguageTransforms",
jobServerDriver: "org.apache.beam.runners.flink.FlinkJobServerDriver",
jobServerConfig: "--clean-artifacts-per-job,--job-host=localhost,--job-port=0,--artifact-port=0,--expansion-port=0",
testClasspathConfiguration: configurations.validatesPortableRunner,
numParallelTests: 1,
pipelineOpts: [
// Limit resource consumption via parallelism
"--parallelism=2",
"--shutdownSourcesOnFinalWatermark",
],
testCategories: {
// Only include cross-language transform tests. Avoid to retest everything on Docker environment.
includeCategories 'org.apache.beam.sdk.testing.UsesCrossLanguageTransforms'
},
)
project.evaluationDependsOn ':sdks:python'
validatesCrossLanguageTransforms.dependsOn ':sdks:python:setupVirtualenv'
validatesCrossLanguageTransforms.systemProperty "pythonTestExpansionCommand", ". ${project(':sdks:python').envdir}/bin/activate && pip install -e ${project(':sdks:python').projectDir}[test] && python -m apache_beam.runners.portability.expansion_service_test"