blob: 3fc872bb5d02cb1467c002d3777825b141fb51fe [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.stream.Collectors
plugins { id 'org.apache.beam.module' }
applyJavaNature(
automaticModuleName: 'org.apache.beam.sdk.io.hdfs')
description = "Apache Beam :: SDKs :: Java :: IO :: Hadoop File System"
ext.summary = "Library to read and write Hadoop/HDFS file formats from Beam."
def hadoopVersions = [
"285": "2.8.5",
"292": "2.9.2",
"2102": "2.10.2",
"324": "3.2.4",
]
hadoopVersions.each {kv -> configurations.create("hadoopVersion$kv.key")}
dependencies {
implementation library.java.vendored_guava_32_1_2_jre
implementation project(path: ":sdks:java:core", configuration: "shadow")
implementation library.java.jackson_core
implementation library.java.jackson_databind
implementation library.java.slf4j_api
compileOnly library.java.hadoop_client
permitUnusedDeclared library.java.hadoop_client
compileOnly library.java.hadoop_common
compileOnly library.java.hadoop_mapreduce_client_core
permitUnusedDeclared library.java.hadoop_mapreduce_client_core
testImplementation project(path: ":sdks:java:core", configuration: "shadowTest")
testImplementation library.java.mockito_core
testImplementation library.java.junit
testImplementation library.java.hadoop_minicluster
testImplementation library.java.hadoop_hdfs_tests
testRuntimeOnly library.java.slf4j_jdk14
testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow")
hadoopVersions.each {kv ->
"hadoopVersion$kv.key" "org.apache.hadoop:hadoop-client:$kv.value"
"hadoopVersion$kv.key" "org.apache.hadoop:hadoop-common:$kv.value"
"hadoopVersion$kv.key" "org.apache.hadoop:hadoop-mapreduce-client-core:$kv.value"
"hadoopVersion$kv.key" "org.apache.hadoop:hadoop-minicluster:$kv.value"
"hadoopVersion$kv.key" "org.apache.hadoop:hadoop-hdfs:$kv.value:tests"
"hadoopVersion$kv.key" library.java.slf4j_jdk14
}
}
hadoopVersions.each {kv ->
configurations."hadoopVersion$kv.key" {
resolutionStrategy {
force "org.apache.hadoop:hadoop-client:$kv.value"
force "org.apache.hadoop:hadoop-common:$kv.value"
force "org.apache.hadoop:hadoop-mapreduce-client-core:$kv.value"
force "org.apache.hadoop:hadoop-minicluster:$kv.value"
force "org.apache.hadoop:hadoop-hdfs:$kv.value:tests"
force "org.apache.hadoop:hadoop-hdfs-client:$kv.value:tests"
force library.java.slf4j_jdk14
}
}
}
// Hadoop dependencies require old version of Guava (BEAM-11626)
configurations.all (Configuration it) -> {
// error-prone requires newer guava, don't override for annotation processing
// https://github.com/google/error-prone/issues/2745
if (it.name == "annotationProcessor" || it.name =="testAnnotationProcessor") {
return
}
resolutionStrategy {
force 'com.google.guava:guava:25.1-jre'
}
}
task hadoopVersionsTest(group: "Verification") {
description = "Runs Hadoop file system tests with different Hadoop versions"
def taskNames = hadoopVersions.keySet().stream()
.map{num -> "hadoopVersion${num}Test"}
.collect(Collectors.toList())
dependsOn taskNames
}
hadoopVersions.each { kv ->
task "hadoopVersion${kv.key}Test"(type: Test, group: "Verification") {
description = "Runs Hadoop file system tests with Hadoop version $kv.value"
classpath = configurations."hadoopVersion$kv.key" + sourceSets.test.runtimeClasspath
outputs.upToDateWhen { false }
include '**/*Test.class'
}
}