blob: 8a8765e87fee2e0873a0630a65ed697f9c7eabf6 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
def basePath = '..'
/* All properties required for loading the Spark build script */
project.ext {
// Spark 3 version as defined in BeamModulePlugin
spark_version = spark3_version
spark_scala_version = '2.12'
copySourceBase = false // disabled to use Spark 3 as primary dev version
archives_base_name = 'beam-runners-spark-3'
}
// Load the main build script which contains all build logic.
apply from: "$basePath/spark_runner.gradle"
// Generates runQuickstartJavaSpark task (can only support 1 version of Spark)
createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'Spark')
// Additional supported Spark versions (used in compatibility tests)
def sparkVersions = [
"35": "3.5.5",
"34": "3.4.4",
"33": "3.3.4",
// "32": "3.2.4", // tests on 3.2 failed due to incompatible with slf4j 2
"31": "3.1.3",
]
sparkVersions.each { kv ->
configurations.create("sparkVersion$kv.key")
configurations."sparkVersion$kv.key" {
resolutionStrategy {
spark.components.each { component -> force "$component:$kv.value" }
}
}
dependencies {
// Spark versions prior to 3.4.0 are compiled against SLF4J 1.x. The
// `org.apache.spark.internal.Logging.isLog4j12()` function references an
// SLF4J 1.x binding class (org.slf4j.impl.StaticLoggerBinder) which is
// no longer available in SLF4J 2.x. This results in a
// `java.lang.NoClassDefFoundError`.
//
// The workaround is to provide an SLF4J 1.x binding module out of group
// `org.slf4j` to resolve the issue.
// Module `org.apache.logging.log4j:log4j-slf4j-impl` is an example that
// provides a compatible SLF4J 1.x binding regardless SLF4J upgrade.
// Binding/provider modules under group `org.slf4j` (e.g.,
// slf4j-simple, slf4j-reload4j) get upgraded as a new SLF4J version is in
// use, and therefore do not contain the 1.x binding classes.
//
// Notice that Spark 3.1.x uses `ch.qos.logback:logback-classic` and is
// unaffected by the SLF4J upgrade. Spark 3.3.x already uses
// `log4j-slf4j-impl` so it is also unaffected.
if ("$kv.key" >= "320" && "$kv.key" <= "324") {
"sparkVersion$kv.key" library.java.log4j2_slf4j_impl
}
spark.components.each { component -> "sparkVersion$kv.key" "$component:$kv.value" }
}
tasks.register("sparkVersion${kv.key}Test", Test) {
group = "Verification"
description = "Verifies code compatibility with Spark $kv.value"
classpath = configurations."sparkVersion$kv.key" + sourceSets.test.runtimeClasspath
systemProperties test.systemProperties
include "**/*.class"
maxParallelForks 4
}
}
tasks.register("sparkVersionsTest") {
group = "Verification"
dependsOn sparkVersions.collect{k,v -> "sparkVersion${k}Test"}
}