blob: c08ae5d8cc1f3d332802e2b2858d411b72a35810 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
String flinkMajorVersion = '1.18'
String scalaVersion = System.getProperty("scalaVersion") != null ? System.getProperty("scalaVersion") : System.getProperty("defaultScalaVersion")
project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
dependencies {
implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow')
api project(':iceberg-api')
implementation project(':iceberg-common')
implementation project(':iceberg-core')
api project(':iceberg-data')
implementation project(':iceberg-orc')
implementation project(':iceberg-parquet')
implementation project(':iceberg-hive-metastore')
compileOnly libs.flink118.avro
// for dropwizard histogram metrics implementation
compileOnly libs.flink118.metrics.dropwizard
compileOnly libs.flink118.streaming.java
compileOnly "${libs.flink118.streaming.java.get().module}:${libs.flink118.streaming.java.get().getVersion()}:tests"
compileOnly libs.flink118.table.api.java.bridge
compileOnly "org.apache.flink:flink-table-planner_${scalaVersion}:${libs.versions.flink118.get()}"
compileOnly libs.flink118.connector.base
compileOnly libs.flink118.connector.files
compileOnly libs.hadoop2.hdfs
compileOnly libs.hadoop2.common
compileOnly(libs.hadoop2.minicluster) {
exclude group: 'org.apache.avro', module: 'avro'
}
implementation(libs.parquet.avro) {
exclude group: 'org.apache.avro', module: 'avro'
// already shaded by Parquet
exclude group: 'it.unimi.dsi'
exclude group: 'org.codehaus.jackson'
}
compileOnly libs.avro.avro
implementation("${libs.orc.core.get().module}:${libs.versions.orc.get()}:nohive") {
exclude group: 'org.apache.hadoop'
exclude group: 'commons-lang'
// These artifacts are shaded and included in the orc-core fat jar
exclude group: 'com.google.protobuf', module: 'protobuf-java'
exclude group: 'org.apache.hive', module: 'hive-storage-api'
exclude group: 'org.slf4j'
}
testImplementation libs.flink118.connector.test.utils
testImplementation libs.flink118.core
testImplementation libs.flink118.runtime
testImplementation(libs.flink118.test.utilsjunit) {
exclude group: 'junit'
}
testImplementation(libs.flink118.test.utils) {
exclude group: "org.apache.curator", module: 'curator-test'
exclude group: 'junit'
}
testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts')
testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts')
testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts')
testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts')
// By default, hive-exec is a fat/uber jar and it exports a guava library
// that's really old. We use the core classifier to be able to override our guava
// version. Luckily, hive-exec seems to work okay so far with this version of guava
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.apache.hive', module: 'hive-llap-tez'
exclude group: 'org.apache.logging.log4j'
exclude group: 'com.google.protobuf', module: 'protobuf-java'
exclude group: 'org.apache.calcite'
exclude group: 'org.apache.calcite.avatica'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
testImplementation(libs.hive2.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.apache.hbase'
exclude group: 'org.apache.logging.log4j'
exclude group: 'co.cask.tephra'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
exclude group: 'org.eclipse.jetty.aggregate', module: 'jetty-all'
exclude group: 'org.eclipse.jetty.orbit', module: 'javax.servlet'
exclude group: 'org.apache.parquet', module: 'parquet-hadoop-bundle'
exclude group: 'com.tdunning', module: 'json'
exclude group: 'javax.transaction', module: 'transaction-api'
exclude group: 'com.zaxxer', module: 'HikariCP'
exclude group: 'org.slf4j'
}
testImplementation libs.awaitility
testImplementation libs.assertj.core
}
test {
useJUnitPlatform()
}
}
project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
apply plugin: 'com.github.johnrengelman.shadow'
tasks.jar.dependsOn tasks.shadowJar
sourceSets {
integration {
java.srcDir "$projectDir/src/integration/java"
resources.srcDir "$projectDir/src/integration/resources"
}
}
configurations {
implementation {
// included in Flink
exclude group: 'org.slf4j'
exclude group: 'org.apache.commons'
exclude group: 'commons-pool'
exclude group: 'commons-codec'
exclude group: 'org.xerial.snappy'
exclude group: 'javax.xml.bind'
exclude group: 'javax.annotation'
}
}
dependencies {
implementation(project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}")) {
exclude group: 'org.apache.flink'
}
implementation project(':iceberg-aws')
implementation project(':iceberg-azure')
implementation(project(':iceberg-aliyun')) {
exclude group: 'edu.umd.cs.findbugs', module: 'findbugs'
exclude group: 'org.apache.httpcomponents', module: 'httpclient'
exclude group: 'commons-logging', module: 'commons-logging'
}
implementation project(':iceberg-gcp')
implementation(project(':iceberg-nessie')) {
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
// for dropwizard histogram metrics implementation
implementation libs.flink118.metrics.dropwizard
// for integration testing with the flink-runtime-jar
// all of those dependencies are required because the integration test extends FlinkTestBase
integrationCompileOnly project(':iceberg-api')
integrationImplementation libs.junit.vintage.engine
integrationImplementation libs.assertj.core
integrationImplementation project(path: ":iceberg-flink:iceberg-flink-${flinkMajorVersion}", configuration: "testArtifacts")
integrationImplementation project(path: ':iceberg-api', configuration: 'testArtifacts')
integrationImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts')
integrationImplementation(libs.flink118.test.utils) {
exclude group: "org.apache.curator", module: 'curator-test'
exclude group: 'junit'
}
integrationImplementation libs.flink118.table.api.java.bridge
integrationImplementation "org.apache.flink:flink-table-planner_${scalaVersion}:${libs.versions.flink118.get()}"
integrationImplementation libs.hadoop2.common
integrationImplementation libs.hadoop2.hdfs
integrationImplementation(libs.hadoop2.minicluster) {
exclude group: 'org.apache.avro', module: 'avro'
}
integrationImplementation(libs.hive2.metastore) {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.apache.hbase'
exclude group: 'org.apache.logging.log4j'
exclude group: 'co.cask.tephra'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
exclude group: 'org.eclipse.jetty.aggregate', module: 'jetty-all'
exclude group: 'org.eclipse.jetty.orbit', module: 'javax.servlet'
exclude group: 'org.apache.parquet', module: 'parquet-hadoop-bundle'
exclude group: 'com.tdunning', module: 'json'
exclude group: 'javax.transaction', module: 'transaction-api'
exclude group: 'com.zaxxer', module: 'HikariCP'
exclude group: 'org.slf4j'
}
integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.apache.hive', module: 'hive-llap-tez'
exclude group: 'org.apache.logging.log4j'
exclude group: 'com.google.protobuf', module: 'protobuf-java'
exclude group: 'org.apache.calcite'
exclude group: 'org.apache.calcite.avatica'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
}
shadowJar {
configurations = [project.configurations.runtimeClasspath]
zip64 true
// include the LICENSE and NOTICE files for the shaded Jar
from(projectDir) {
include 'LICENSE'
include 'NOTICE'
}
// Relocate dependencies to avoid conflicts
relocate 'org.apache.avro', 'org.apache.iceberg.shaded.org.apache.avro'
relocate 'org.apache.parquet', 'org.apache.iceberg.shaded.org.apache.parquet'
relocate 'com.google.errorprone', 'org.apache.iceberg.shaded.com.google.errorprone'
relocate 'com.google.flatbuffers', 'org.apache.iceberg.shaded.com.google.flatbuffers'
relocate 'com.fasterxml', 'org.apache.iceberg.shaded.com.fasterxml'
relocate 'com.github.benmanes', 'org.apache.iceberg.shaded.com.github.benmanes'
relocate 'org.checkerframework', 'org.apache.iceberg.shaded.org.checkerframework'
relocate 'shaded.parquet', 'org.apache.iceberg.shaded.org.apache.parquet.shaded'
relocate 'org.apache.orc', 'org.apache.iceberg.shaded.org.apache.orc'
relocate 'io.airlift', 'org.apache.iceberg.shaded.io.airlift'
relocate 'org.threeten.extra', 'org.apache.iceberg.shaded.org.threeten.extra'
relocate 'org.apache.hc.client5', 'org.apache.iceberg.shaded.org.apache.hc.client5'
relocate 'org.apache.hc.core5', 'org.apache.iceberg.shaded.org.apache.hc.core5'
archiveClassifier.set(null)
}
task integrationTest(type: Test) {
description = "Test Flink Runtime Jar against Flink ${flinkMajorVersion}"
group = "verification"
jvmArgs += project.property('extraJvmArgs')
testClassesDirs = sourceSets.integration.output.classesDirs
classpath = sourceSets.integration.runtimeClasspath + files(shadowJar.archiveFile.get().asFile.path)
inputs.file(shadowJar.archiveFile.get().asFile.path)
}
integrationTest.dependsOn shadowJar
check.dependsOn integrationTest
jar {
enabled = false
}
}