blob: 3594a753e34ed5a9a578723a0858f6aa1e5ff21f [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// There is no actual src dir under legacy-worker/ since legacy-worker is built against the
// same sourceSet as fn-api-worker. The reason why legacy-worker has a separated dir is:
// 1. Have one location for editing worker code which is built and tested in two
// different ways.
// 2. Not have artifacts such as test results and jars write over each other.
// 3. Better compatibility with Intellij since Intellij requires separate content roots.
plugins { id 'org.apache.beam.module' }
archivesBaseName = 'beam-runners-google-cloud-dataflow-java-legacy-worker'
// Set a specific version of 'com.google.apis:google-api-services-dataflow'
// by adding -Pdataflow.version=<version> in Gradle command. Otherwise,
// 'google_clients_version' defined in BeamModulePlugin will be used as default.
def DATAFLOW_VERSION = "dataflow.version"
// Get full dependency of 'com.google.apis:google-api-services-dataflow'
def google_api_services_dataflow = project.hasProperty(DATAFLOW_VERSION) ? "com.google.apis:google-api-services-dataflow:" + getProperty(DATAFLOW_VERSION) : library.java.google_api_services_dataflow
// Returns a string representing the relocated path to be used with the shadow
// plugin when given a suffix such as "com.".
def getWorkerRelocatedPath = { String suffix ->
return ("org.apache.beam.runners.dataflow.worker.repackaged."
+ suffix)
}
// Following listed dependencies will be shaded only in fnapi worker, not legacy
// worker
def sdk_provided_dependencies = [
google_api_services_dataflow,
library.java.avro,
library.java.google_api_client,
library.java.google_http_client,
library.java.google_http_client_jackson,
library.java.jackson_annotations,
library.java.jackson_core,
library.java.jackson_databind,
library.java.joda_time,
library.java.slf4j_api,
library.java.vendored_grpc_1_13_1,
]
def sdk_provided_project_dependencies = [
":model:pipeline",
":runners:google-cloud-dataflow-java",
":sdks:java:core",
":sdks:java:extensions:google-cloud-platform-core",
":sdks:java:io:google-cloud-platform",
]
// Exclude unneeded dependencies when building jar
def excluded_dependencies = [
"com.google.auto.service:auto-service", // Provided scope added from applyJavaNature
"com.google.auto.value:auto-value", // Provided scope added from applyJavaNature
"org.codehaus.jackson:jackson-core-asl", // Exclude an old version of jackson-core-asl introduced by google-http-client-jackson
"org.objenesis:objenesis", // Transitive dependency introduced from Beam
"org.tukaani:xz", // Transitive dependency introduced from Beam
library.java.commons_compress, // Transitive dependency introduced from Beam
library.java.error_prone_annotations, // Provided scope added in worker
library.java.hamcrest_core, // Test only
library.java.hamcrest_library, // Test only
library.java.junit, // Test only
]
applyJavaNature(
publish: false,
exportJavadoc: false,
enableSpotbugs: false /* TODO(BEAM-5658): enable spotbugs */,
shadowJarValidationExcludes: [
"org/apache/beam/runners/dataflow/worker/**",
"org/apache/beam/repackaged/beam_runners_google_cloud_dataflow_java_legacy_worker/**",
// TODO(BEAM-6137): Move DataflowRunnerHarness class under org.apache.beam.runners.dataflow.worker namespace
"com/google/cloud/dataflow/worker/DataflowRunnerHarness.class",
// TODO(BEAM-6136): Enable relocation for conscrypt
"org/conscrypt/**",
],
shadowClosure: DEFAULT_SHADOW_CLOSURE << {
// Each included dependency must also include all of its necessary transitive dependencies
// or have them provided by the users pipeline during job submission. Typically a users
// pipeline includes :runners:google-cloud-dataflow-java and its transitive dependencies
// so those dependencies don't need to be shaded (bundled and relocated) away. All other
// dependencies needed to run the worker must be shaded (bundled and relocated) to prevent
// ClassNotFound and/or MethodNotFound errors during pipeline execution.
//
// Each included dependency should have a matching relocation rule below that ensures
// that the shaded jar is correctly built.
dependencies {
include(project(path: ":model:fn-execution", configuration: "shadow"))
}
relocate("org.apache.beam.model.fnexecution.v1", getWorkerRelocatedPath("org.apache.beam.model.fnexecution.v1"))
dependencies {
include(project(path: ":runners:core-construction-java", configuration: "shadow"))
include(project(path: ":runners:core-java", configuration: "shadow"))
}
relocate("org.apache.beam.runners.core", getWorkerRelocatedPath("org.apache.beam.runners.core"))
relocate("org.apache.beam.repackaged.beam_runners_core_construction_java", getWorkerRelocatedPath("org.apache.beam.repackaged.beam_runners_core_construction_java"))
relocate("org.apache.beam.repackaged.beam_runners_core_java", getWorkerRelocatedPath("org.apache.beam.repackaged.beam_runners_core_java"))
dependencies {
include(project(path: ":runners:java-fn-execution", configuration: "shadow"))
}
relocate("org.apache.beam.runners.fnexecution", getWorkerRelocatedPath("org.apache.beam.runners.fnexecution"))
relocate("org.apache.beam.repackaged.beam_runners_java_fn_execution", getWorkerRelocatedPath("org.apache.beam.repackaged.beam_runners_java_fn_execution"))
dependencies {
include(project(path: ":sdks:java:fn-execution", configuration: "shadow"))
}
relocate("org.apache.beam.sdk.fn", getWorkerRelocatedPath("org.apache.beam.sdk.fn"))
relocate("org.apache.beam.repackaged.beam_sdks_java_fn_execution", getWorkerRelocatedPath("org.apache.beam.repackaged.beam_sdks_java_fn_execution"))
// TODO(BEAM-6136): Enable relocation for conscrypt
dependencies {
include(dependency("org.conscrypt:conscrypt-openjdk:1.1.3:linux-x86_64"))
}
dependencies {
// We have to include jetty-server/jetty-servlet and all of its transitive dependencies
// which includes several org.eclipse.jetty artifacts + servlet-api
include(dependency("org.eclipse.jetty:.*:9.2.10.v20150310"))
include(dependency("javax.servlet:javax.servlet-api:3.1.0"))
}
relocate("org.eclipse.jetty", getWorkerRelocatedPath("org.eclipse.jetty"))
relocate("javax.servlet", getWorkerRelocatedPath("javax.servlet"))
// We don't relocate windmill since it is already underneath the org.apache.beam.runners.dataflow.worker namespace and never
// expect a user pipeline to include it. There is also a JNI component that windmill server relies on which makes
// arbitrary relocation more difficult.
dependencies {
include(project(path: ":runners:google-cloud-dataflow-java:worker:windmill", configuration: "shadow"))
}
// Include original source files extracted under
// '$buildDir/original_sources_to_package' to jar
from "$buildDir/original_sources_to_package"
exclude "META-INF/LICENSE.txt"
exclude "about.html"
})
/******************************************************************************/
// Configure the worker root project
configurations {
sourceFile
// Ban these dependencies from all configurations
all {
// Ban the usage of AppleJavaExtensions in spotbugs.
exclude group: "com.apple", module: "AppleJavaExtensions"
}
}
evaluationDependsOn(":runners:google-cloud-dataflow-java:worker")
compileJava {
source project(":runners:google-cloud-dataflow-java:worker").sourceSets.main.java.srcDirs
}
compileTestJava {
source project(":runners:google-cloud-dataflow-java:worker").sourceSets.test.java.srcDirs
}
dependencies {
// Note that any dependency that is modified here should also be modified within
// runners/google-cloud-dataflow-java/worker/build.gradle using the rules provided
// there.
sdk_provided_dependencies.each {
provided(it)
}
sdk_provided_project_dependencies.each {
provided project(path: it, configuration: "shadow")
}
compile project(path: ":model:fn-execution", configuration: "shadow")
compile project(path: ":runners:core-construction-java", configuration: "shadow")
compile project(path: ":runners:core-java", configuration: "shadow")
compile project(path: ":runners:java-fn-execution", configuration: "shadow")
compile project(path: ":sdks:java:fn-execution", configuration: "shadow")
compile project(path: ":runners:google-cloud-dataflow-java:worker:windmill", configuration: "shadow")
shadow library.java.vendored_guava_20_0
compile "org.conscrypt:conscrypt-openjdk:1.1.3:linux-x86_64"
compile "org.eclipse.jetty:jetty-server:9.2.10.v20150310"
compile "org.eclipse.jetty:jetty-servlet:9.2.10.v20150310"
provided library.java.error_prone_annotations
runtime library.java.slf4j_jdk14
// Any test dependency which intersects with our relocation rules above needs to be relocated
// as well and placed within the testCompile configuration. Otherwise we can place it within
// the shadowTest configuration.
testCompile project(path: ":runners:core-java", configuration: "shadowTest")
shadowTest library.java.guava_testlib
shadowTest project(path: ":sdks:java:extensions:google-cloud-platform-core", configuration: "shadowTest")
shadowTest project(path: ":runners:direct-java", configuration: "shadow")
shadowTest project(path: ":sdks:java:harness", configuration: "shadowTest")
shadowTest project(path: ":sdks:java:core", configuration: "shadowTest")
shadowTest library.java.hamcrest_core
shadowTest library.java.hamcrest_library
shadowTest library.java.junit
shadowTest library.java.mockito_core
}
//TODO(BEAM-5657): checktyle task should be enabled in the future.
checkstyleMain.enabled = false
checkstyleTest.enabled = false
//TODO(BEAM-5659): javadoc task should be enabled in the future.
javadoc.enabled = false