| /* |
| * Licensed to the Apache Software Foundation (ASF) under one |
| * or more contributor license agreements. See the NOTICE file |
| * distributed with this work for additional information |
| * regarding copyright ownership. The ASF licenses this file |
| * to you under the Apache License, Version 2.0 (the |
| * License); you may not use this file except in compliance |
| * with the License. You may obtain a copy of the License at |
| * |
| * http://www.apache.org/licenses/LICENSE-2.0 |
| * |
| * Unless required by applicable law or agreed to in writing, software |
| * distributed under the License is distributed on an AS IS BASIS, |
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| * See the License for the specific language governing permissions and |
| * limitations under the License. |
| */ |
| |
| import groovy.json.JsonOutput |
| |
| plugins { id 'org.apache.beam.module' } |
| applyJavaNature(automaticModuleName: 'org.apache.beam.runners.spark') |
| |
| description = "Apache Beam :: Runners :: Spark" |
| |
| /* |
| * We need to rely on manually specifying these evaluationDependsOn to ensure that |
| * the following projects are evaluated before we evaluate this project. This is because |
| * we are attempting to reference the "sourceSets.test.output" directly. |
| */ |
| evaluationDependsOn(":sdks:java:core") |
| evaluationDependsOn(":sdks:java:io:hadoop-format") |
| |
| configurations { |
| validatesRunner |
| } |
| |
| test { |
| systemProperty "beam.spark.test.reuseSparkContext", "true" |
| systemProperty "spark.ui.enabled", "false" |
| systemProperty "spark.ui.showConsoleProgress", "false" |
| systemProperty "beamTestPipelineOptions", """[ |
| "--runner=TestSparkRunner", |
| "--streaming=false", |
| "--enableSparkMetricSinks=true" |
| ]""" |
| // Only one SparkContext may be running in a JVM (SPARK-2243) |
| forkEvery 1 |
| maxParallelForks 4 |
| useJUnit { |
| excludeCategories "org.apache.beam.runners.spark.StreamingTest" |
| excludeCategories "org.apache.beam.runners.spark.UsesCheckpointRecovery" |
| } |
| } |
| |
| dependencies { |
| compile project(path: ":model:pipeline", configuration: "shadow") |
| compile project(path: ":sdks:java:core", configuration: "shadow") |
| compile project(":runners:core-construction-java") |
| compile project(":runners:core-java") |
| compile project(":runners:java-fn-execution") |
| compile project(":sdks:java:extensions:google-cloud-platform-core") |
| compile library.java.jackson_annotations |
| compile library.java.slf4j_api |
| compile library.java.joda_time |
| compile library.java.args4j |
| provided library.java.spark_core |
| provided library.java.spark_streaming |
| provided library.java.spark_network_common |
| provided library.java.hadoop_common |
| provided library.java.commons_lang3 |
| provided library.java.commons_io_2x |
| provided library.java.hamcrest_core |
| provided library.java.hamcrest_library |
| provided "com.esotericsoftware.kryo:kryo:2.21" |
| runtimeOnly library.java.jackson_module_scala |
| runtimeOnly "org.scala-lang:scala-library:2.11.8" |
| testCompile project(":sdks:java:io:kafka") |
| testCompile project(path: ":sdks:java:core", configuration: "shadowTest") |
| // SparkStateInternalsTest extends abstract StateInternalsTest |
| testCompile project(path: ":runners:core-java", configuration: "testRuntime") |
| testCompile project(":sdks:java:harness") |
| testCompile library.java.avro |
| testCompile library.java.kafka |
| testCompile library.java.kafka_clients |
| testCompile library.java.junit |
| testCompile library.java.mockito_core |
| testCompile library.java.jackson_dataformat_yaml |
| testCompile "org.apache.zookeeper:zookeeper:3.4.11" |
| validatesRunner project(path: ":sdks:java:core", configuration: "shadowTest") |
| validatesRunner project(":sdks:java:io:hadoop-format") |
| validatesRunner project(":sdks:java:io:hadoop-format").sourceSets.test.output |
| validatesRunner project(path: ":examples:java", configuration: "testRuntime") |
| validatesRunner project(path: project.path, configuration: "testRuntime") |
| validatesRunner project(project.path) |
| validatesRunner project(path: project.path, configuration: "provided") |
| } |
| |
| configurations.testRuntimeClasspath { |
| // Testing the Spark runner causes a StackOverflowError if slf4j-jdk14 is on the classpath |
| exclude group: "org.slf4j", module: "slf4j-jdk14" |
| } |
| |
| configurations.validatesRunner { |
| // Testing the Spark runner causes a StackOverflowError if slf4j-jdk14 is on the classpath |
| exclude group: "org.slf4j", module: "slf4j-jdk14" |
| } |
| |
| task validatesRunnerBatch(type: Test) { |
| group = "Verification" |
| def pipelineOptions = JsonOutput.toJson([ |
| "--runner=TestSparkRunner", |
| "--streaming=false", |
| "--enableSparkMetricSinks=false", |
| ]) |
| systemProperty "beamTestPipelineOptions", pipelineOptions |
| systemProperty "beam.spark.test.reuseSparkContext", "true" |
| systemProperty "spark.ui.enabled", "false" |
| systemProperty "spark.ui.showConsoleProgress", "false" |
| |
| classpath = configurations.validatesRunner |
| testClassesDirs += files(project(":sdks:java:core").sourceSets.test.output.classesDirs) |
| testClassesDirs += files(project.sourceSets.test.output.classesDirs) |
| |
| // Only one SparkContext may be running in a JVM (SPARK-2243) |
| forkEvery 1 |
| maxParallelForks 4 |
| useJUnit { |
| includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner' |
| includeCategories 'org.apache.beam.runners.spark.UsesCheckpointRecovery' |
| excludeCategories 'org.apache.beam.sdk.testing.UsesCustomWindowMerging' |
| // Unbounded |
| excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedPCollections' |
| excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream' |
| // Metrics |
| excludeCategories 'org.apache.beam.sdk.testing.UsesCommittedMetrics' |
| // SDF |
| excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo' |
| // Portability |
| excludeCategories 'org.apache.beam.sdk.testing.UsesImpulse' |
| excludeCategories 'org.apache.beam.sdk.testing.UsesCrossLanguageTransforms' |
| } |
| jvmArgs '-Xmx3g' |
| } |
| |
| task validatesRunnerStreaming(type: Test) { |
| group = "Verification" |
| def pipelineOptions = JsonOutput.toJson([ |
| "--runner=TestSparkRunner", |
| "--forceStreaming=true", |
| "--enableSparkMetricSinks=true", |
| ]) |
| systemProperty "beamTestPipelineOptions", pipelineOptions |
| |
| classpath = configurations.validatesRunner |
| testClassesDirs += files(project.sourceSets.test.output.classesDirs) |
| |
| // Only one SparkContext may be running in a JVM (SPARK-2243) |
| forkEvery 1 |
| maxParallelForks 4 |
| useJUnit { |
| includeCategories 'org.apache.beam.runners.spark.StreamingTest' |
| } |
| } |
| |
| task validatesRunner { |
| group = "Verification" |
| description "Validates Spark runner" |
| dependsOn validatesRunnerBatch |
| dependsOn validatesRunnerStreaming |
| } |
| |
| // Generates :runners:spark:runQuickstartJavaSpark |
| createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'Spark') |