blob: 4e8de83d81ecec2ae83d1bfc787c9365ac2472e4 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import groovy.transform.Memoized
buildscript {
repositories {
jcenter()
gradlePluginPortal()
maven { url "http://palantir.bintray.com/releases" }
maven { url "https://plugins.gradle.org/m2/" }
}
dependencies {
classpath 'com.github.jengelman.gradle.plugins:shadow:5.0.0'
classpath 'com.palantir.baseline:gradle-baseline-java:0.55.0'
classpath 'com.diffplug.spotless:spotless-plugin-gradle:3.14.0'
classpath 'gradle.plugin.org.inferred:gradle-processors:2.1.0'
classpath 'me.champeau.gradle:jmh-gradle-plugin:0.4.8'
}
}
plugins {
id 'com.palantir.git-version' version '0.9.1'
id 'nebula.dependency-recommender' version '9.0.2'
id 'nebula.dependency-lock' version '9.0.0'
}
if (JavaVersion.current() == JavaVersion.VERSION_1_8) {
project.ext.jdkVersion = '8'
} else if (JavaVersion.current() == JavaVersion.VERSION_11) {
project.ext.jdkVersion = '11'
} else {
throw new GradleException("This build must be run with JDK 8 or 11")
}
dependencyRecommendations {
propertiesFile file: file('versions.props')
}
allprojects {
group = "org.apache.iceberg"
version = getProjectVersion()
repositories {
maven { url "http://palantir.bintray.com/releases" }
mavenCentral()
mavenLocal()
}
}
subprojects {
apply plugin: 'nebula.dependency-recommender'
apply plugin: 'nebula.dependency-lock'
apply plugin: 'java'
dependencyLock {
includeTransitives = true
}
dependencyResolutionVerifierExtension {
shouldFailTheBuild = false
}
configurations {
testCompile.extendsFrom compileOnly
compileClasspath {
// do not exclude Guava so the bundle project can reference classes.
if (project.name != 'iceberg-bundled-guava') {
exclude group: 'com.google.guava', module: 'guava'
}
// contains a copy of Guava
exclude group: 'org.apache.spark', module: 'spark-network-common_2.12'
}
all {
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.mortbay.jetty'
resolutionStrategy {
force 'com.fasterxml.jackson.module:jackson-module-scala_2.11:2.10.2'
force 'com.fasterxml.jackson.module:jackson-module-scala_2.12:2.10.2'
force 'com.fasterxml.jackson.module:jackson-module-paranamer:2.10.2'
}
}
testArtifacts
}
compileJava {
options.encoding = "UTF-8"
}
compileTestJava {
options.encoding = "UTF-8"
}
ext {
jmhVersion = '1.21'
}
sourceCompatibility = '1.8'
targetCompatibility = '1.8'
dependencies {
compile 'org.slf4j:slf4j-api'
compile 'com.github.stephenc.findbugs:findbugs-annotations'
testCompile 'junit:junit'
testCompile 'org.slf4j:slf4j-simple'
testCompile 'org.mockito:mockito-core'
}
test {
testLogging {
events "failed"
exceptionFormat "full"
}
}
}
project(':iceberg-bundled-guava') {
apply plugin: 'com.github.johnrengelman.shadow'
tasks.jar.dependsOn tasks.shadowJar
dependencies {
compileOnly('com.google.guava:guava') {
exclude group: 'com.google.code.findbugs'
// may be LGPL - use ALv2 findbugs-annotations instead
exclude group: 'com.google.errorprone'
exclude group: 'com.google.j2objc'
}
}
shadowJar {
classifier null
configurations = [project.configurations.compileOnly]
zip64 true
// include the LICENSE and NOTICE files for the shaded Jar
from(projectDir) {
include 'LICENSE'
include 'NOTICE'
}
dependencies {
exclude(dependency('com.github.stephenc.findbugs:findbugs-annotations'))
exclude(dependency('org.slf4j:slf4j-api'))
exclude(dependency('org.checkerframework:checker-qual'))
}
relocate 'com.google.common', 'org.apache.iceberg.relocated.com.google.common'
minimize()
}
jar {
classifier 'empty'
}
}
project(':iceberg-api') {
dependencies {
compile project(path: ':iceberg-bundled-guava', configuration: 'shadow')
testCompile "org.apache.avro:avro"
testCompile 'joda-time:joda-time'
}
}
project(':iceberg-common') {
dependencies {
compile project(path: ':iceberg-bundled-guava', configuration: 'shadow')
}
}
project(':iceberg-core') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile("org.apache.avro:avro") {
exclude group: 'org.tukaani' // xz compression is not supported
}
compile "com.fasterxml.jackson.core:jackson-databind"
compile "com.fasterxml.jackson.core:jackson-core"
compile "com.github.ben-manes.caffeine:caffeine"
compileOnly("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
}
}
project(':iceberg-data') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-core')
compileOnly project(':iceberg-parquet')
compileOnly project(':iceberg-orc')
compileOnly("org.apache.hadoop:hadoop-common") {
exclude group: 'commons-beanutils'
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
testCompile("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-core', configuration: 'testArtifacts')
}
test {
// Only for TestSplitScan as of Gradle 5.0+
maxHeapSize '1500m'
}
}
project(':iceberg-flink') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile project(':iceberg-core')
compile project(':iceberg-data')
compile project(':iceberg-orc')
compile project(':iceberg-parquet')
compileOnly "org.apache.flink:flink-streaming-java_2.12"
compileOnly "org.apache.flink:flink-streaming-java_2.12::tests"
compileOnly "org.apache.flink:flink-table-api-java-bridge_2.12"
compileOnly "org.apache.flink:flink-table-planner-blink_2.12"
compileOnly "org.apache.flink:flink-table-planner_2.12"
compileOnly "org.apache.hadoop:hadoop-hdfs"
compileOnly "org.apache.hadoop:hadoop-common"
compileOnly("org.apache.hadoop:hadoop-minicluster") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile "org.apache.flink:flink-core"
testCompile "org.apache.flink:flink-runtime_2.12"
testCompile "org.apache.flink:flink-test-utils-junit"
testCompile("org.apache.flink:flink-test-utils_2.12") {
exclude group: "org.apache.curator", module: 'curator-test'
}
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-data', configuration: 'testArtifacts')
}
}
project(':iceberg-hive') {
dependencies {
compile project(':iceberg-core')
compileOnly "org.apache.avro:avro"
compileOnly("org.apache.hive:hive-metastore") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.apache.hbase'
exclude group: 'org.apache.logging.log4j'
exclude group: 'co.cask.tephra'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
exclude group: 'org.eclipse.jetty.aggregate', module: 'jetty-all'
exclude group: 'org.eclipse.jetty.orbit', module: 'javax.servlet'
exclude group: 'org.apache.parquet', module: 'parquet-hadoop-bundle'
exclude group: 'com.tdunning', module: 'json'
exclude group: 'javax.transaction', module: 'transaction-api'
exclude group: 'com.zaxxer', module: 'HikariCP'
}
// By default, hive-exec is a fat/uber jar and it exports a guava library
// that's really old. We use the core classifier to be able to override our guava
// version. Luckily, hive-exec seems to work okay so far with this version of guava
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
testCompile("org.apache.hive:hive-exec::core") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.apache.hive', module: 'hive-llap-tez'
exclude group: 'org.apache.logging.log4j'
exclude group: 'com.google.protobuf', module: 'protobuf-java'
exclude group: 'org.apache.calcite'
exclude group: 'org.apache.calcite.avatica'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
testCompile("org.apache.hive:hive-metastore") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.pentaho' // missing dependency
exclude group: 'org.apache.hbase'
exclude group: 'org.apache.logging.log4j'
exclude group: 'co.cask.tephra'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
exclude group: 'org.eclipse.jetty.aggregate', module: 'jetty-all'
exclude group: 'org.eclipse.jetty.orbit', module: 'javax.servlet'
exclude group: 'org.apache.parquet', module: 'parquet-hadoop-bundle'
exclude group: 'com.tdunning', module: 'json'
exclude group: 'javax.transaction', module: 'transaction-api'
exclude group: 'com.zaxxer', module: 'HikariCP'
}
compileOnly("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
}
}
project(':iceberg-mr') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-core')
compile project(':iceberg-orc')
compile project(':iceberg-parquet')
compile project(':iceberg-data')
compileOnly("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
}
compileOnly("org.apache.hive:hive-serde")
testCompile project(path: ':iceberg-data', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-core', configuration: 'testArtifacts')
}
}
project(':iceberg-orc') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-core')
compile("org.apache.orc:orc-core::nohive") {
exclude group: 'org.apache.hadoop'
exclude group: 'commons-lang'
// These artifacts are shaded and included in the orc-core fat jar
exclude group: 'com.google.protobuf', module: 'protobuf-java'
exclude group: 'org.apache.hive', module: 'hive-storage-api'
}
compileOnly("org.apache.hadoop:hadoop-common") {
exclude group: 'commons-beanutils'
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
compileOnly("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
}
}
project(':iceberg-parquet') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-core')
compile("org.apache.parquet:parquet-avro") {
exclude group: 'org.apache.avro', module: 'avro'
// already shaded by Parquet
exclude group: 'it.unimi.dsi'
exclude group: 'org.codehaus.jackson'
}
compileOnly "org.apache.avro:avro"
compileOnly("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile project(path: ':iceberg-core', configuration: 'testArtifacts')
}
}
project(':iceberg-arrow') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-parquet')
compile("org.apache.arrow:arrow-vector") {
exclude group: 'io.netty', module: 'netty-buffer'
exclude group: 'io.netty', module: 'netty-common'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
compile("org.apache.arrow:arrow-memory") {
exclude group: 'io.netty', module: 'netty-common'
exclude group: 'com.google.code.findbugs', module: 'jsr305'
}
}
}
project(':iceberg-spark') {
configurations.all {
resolutionStrategy {
// Spark 2.4.4 can only use the below datanucleus version, the versions introduced
// by Hive 2.3.6 will meet lots of unexpected issues, so here force to use the versions
// introduced by Hive 1.2.1.
force 'org.datanucleus:datanucleus-api-jdo:3.2.6'
force 'org.datanucleus:datanucleus-core:3.2.10'
force 'org.datanucleus:datanucleus-rdbms:3.2.9'
}
}
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile project(':iceberg-core')
compile project(':iceberg-data')
compile project(':iceberg-orc')
compile project(':iceberg-parquet')
compile project(':iceberg-arrow')
compile project(':iceberg-hive')
compile project(':iceberg-arrow')
compileOnly "org.apache.avro:avro"
compileOnly("org.apache.spark:spark-hive_2.11") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile "org.apache.hadoop:hadoop-hdfs::tests"
testCompile "org.apache.hadoop:hadoop-common::tests"
testCompile("org.apache.hadoop:hadoop-minicluster") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile project(path: ':iceberg-hive', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-data', configuration: 'testArtifacts')
}
test {
// For vectorized reads
// Allow unsafe memory access to avoid the costly check arrow does to check if index is within bounds
systemProperty("arrow.enable_unsafe_memory_access", "true")
// Disable expensive null check for every get(index) call.
// Iceberg manages nullability checks itself instead of relying on arrow.
systemProperty("arrow.enable_null_check_for_get", "false")
// Vectorized reads need more memory
maxHeapSize '2500m'
}
}
if (jdkVersion == '8') {
apply from: 'jmh.gradle'
project(':iceberg-spark2') {
configurations.all {
resolutionStrategy {
// Spark 2.4.4 can only use the below datanucleus version, the versions introduced
// by Hive 2.3.6 will meet lots of unexpected issues, so here force to use the versions
// introduced by Hive 1.2.1.
force 'org.datanucleus:datanucleus-api-jdo:3.2.6'
force 'org.datanucleus:datanucleus-core:3.2.10'
force 'org.datanucleus:datanucleus-rdbms:3.2.9'
}
}
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile project(':iceberg-core')
compile project(':iceberg-data')
compile project(':iceberg-orc')
compile project(':iceberg-parquet')
compile project(':iceberg-arrow')
compile project(':iceberg-hive')
compile project(':iceberg-spark')
compileOnly "org.apache.avro:avro"
compileOnly("org.apache.spark:spark-hive_2.11") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile project(path: ':iceberg-spark', configuration: 'testArtifacts')
testCompile "org.apache.hadoop:hadoop-hdfs::tests"
testCompile "org.apache.hadoop:hadoop-common::tests"
testCompile("org.apache.hadoop:hadoop-minicluster") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile project(path: ':iceberg-hive', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
}
test {
// For vectorized reads
// Allow unsafe memory access to avoid the costly check arrow does to check if index is within bounds
systemProperty("arrow.enable_unsafe_memory_access", "true")
// Disable expensive null check for every get(index) call.
// Iceberg manages nullability checks itself instead of relying on arrow.
systemProperty("arrow.enable_null_check_for_get", "false")
// Vectorized reads need more memory
maxHeapSize '2500m'
}
}
// the runtime jar is a self-contained artifact for testing in a notebook
project(':iceberg-spark-runtime') {
apply plugin: 'com.github.johnrengelman.shadow'
tasks.jar.dependsOn tasks.shadowJar
configurations {
compile {
exclude group: 'org.apache.spark'
// included in Spark
exclude group: 'org.slf4j'
exclude group: 'org.apache.commons'
exclude group: 'commons-pool'
exclude group: 'commons-codec'
exclude group: 'org.xerial.snappy'
exclude group: 'javax.xml.bind'
exclude group: 'javax.annotation'
}
}
dependencies {
compile project(':iceberg-spark2')
compile 'org.apache.spark:spark-hive_2.11'
}
shadowJar {
configurations = [project.configurations.compile]
zip64 true
// include the LICENSE and NOTICE files for the shaded Jar
from(projectDir) {
include 'LICENSE'
include 'NOTICE'
}
// Relocate dependencies to avoid conflicts
relocate 'com.google', 'org.apache.iceberg.shaded.com.google'
relocate 'com.fasterxml', 'org.apache.iceberg.shaded.com.fasterxml'
relocate 'com.github.benmanes', 'org.apache.iceberg.shaded.com.github.benmanes'
relocate 'org.checkerframework', 'org.apache.iceberg.shaded.org.checkerframework'
relocate 'org.apache.avro', 'org.apache.iceberg.shaded.org.apache.avro'
relocate 'avro.shaded', 'org.apache.iceberg.shaded.org.apache.avro.shaded'
relocate 'com.thoughtworks.paranamer', 'org.apache.iceberg.shaded.com.thoughtworks.paranamer'
relocate 'org.apache.parquet', 'org.apache.iceberg.shaded.org.apache.parquet'
relocate 'shaded.parquet', 'org.apache.iceberg.shaded.org.apache.parquet.shaded'
// relocate Avro's jackson dependency to share parquet-jackson locations
relocate 'org.codehaus.jackson', 'org.apache.iceberg.shaded.org.apache.parquet.shaded.org.codehaus.jackson'
relocate 'org.apache.orc', 'org.apache.iceberg.shaded.org.apache.orc'
relocate 'io.airlift', 'org.apache.iceberg.shaded.io.airlift'
// relocate Arrow and related deps to shade Iceberg specific version
relocate 'io.netty.buffer', 'org.apache.iceberg.shaded.io.netty.buffer'
relocate 'org.apache.arrow', 'org.apache.iceberg.shaded.org.apache.arrow'
relocate 'com.carrotsearch', 'org.apache.iceberg.shaded.com.carrotsearch'
relocate 'org.threeten.extra', 'org.apache.iceberg.shaded.org.threeten.extra'
classifier null
}
jar {
classifier = 'empty'
}
}
}
project(':iceberg-spark3') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile project(':iceberg-core')
compile project(':iceberg-data')
compile project(':iceberg-orc')
compile project(':iceberg-parquet')
compile project(':iceberg-arrow')
compile project(':iceberg-hive')
compile project(':iceberg-spark')
compileOnly "org.apache.avro:avro"
compileOnly("org.apache.spark:spark-hive_2.12") {
exclude group: 'org.apache.avro', module: 'avro'
exclude group: 'org.apache.arrow'
}
testCompile project(path: ':iceberg-spark', configuration: 'testArtifacts')
testCompile "org.apache.hadoop:hadoop-hdfs::tests"
testCompile "org.apache.hadoop:hadoop-common::tests"
testCompile("org.apache.hadoop:hadoop-minicluster") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile project(path: ':iceberg-hive', configuration: 'testArtifacts')
testCompile project(path: ':iceberg-api', configuration: 'testArtifacts')
}
test {
// For vectorized reads
// Allow unsafe memory access to avoid the costly check arrow does to check if index is within bounds
systemProperty("arrow.enable_unsafe_memory_access", "true")
// Disable expensive null check for every get(index) call.
// Iceberg manages nullability checks itself instead of relying on arrow.
systemProperty("arrow.enable_null_check_for_get", "false")
// Vectorized reads need more memory
maxHeapSize '2500m'
}
}
project(':iceberg-spark3-runtime') {
apply plugin: 'com.github.johnrengelman.shadow'
tasks.jar.dependsOn tasks.shadowJar
configurations {
compile {
exclude group: 'org.apache.spark'
// included in Spark
exclude group: 'org.slf4j'
exclude group: 'org.apache.commons'
exclude group: 'commons-pool'
exclude group: 'commons-codec'
exclude group: 'org.xerial.snappy'
exclude group: 'javax.xml.bind'
exclude group: 'javax.annotation'
}
}
dependencies {
compile project(':iceberg-spark3')
compile 'org.apache.spark:spark-hive_2.11'
}
shadowJar {
configurations = [project.configurations.compile]
zip64 true
// include the LICENSE and NOTICE files for the shaded Jar
from(projectDir) {
include 'LICENSE'
include 'NOTICE'
}
// Relocate dependencies to avoid conflicts
relocate 'com.google', 'org.apache.iceberg.shaded.com.google'
relocate 'com.fasterxml', 'org.apache.iceberg.shaded.com.fasterxml'
relocate 'com.github.benmanes', 'org.apache.iceberg.shaded.com.github.benmanes'
relocate 'org.checkerframework', 'org.apache.iceberg.shaded.org.checkerframework'
relocate 'org.apache.avro', 'org.apache.iceberg.shaded.org.apache.avro'
relocate 'avro.shaded', 'org.apache.iceberg.shaded.org.apache.avro.shaded'
relocate 'com.thoughtworks.paranamer', 'org.apache.iceberg.shaded.com.thoughtworks.paranamer'
relocate 'org.apache.parquet', 'org.apache.iceberg.shaded.org.apache.parquet'
relocate 'shaded.parquet', 'org.apache.iceberg.shaded.org.apache.parquet.shaded'
// relocate Avro's jackson dependency to share parquet-jackson locations
relocate 'org.codehaus.jackson', 'org.apache.iceberg.shaded.org.apache.parquet.shaded.org.codehaus.jackson'
relocate 'org.apache.orc', 'org.apache.iceberg.shaded.org.apache.orc'
relocate 'io.airlift', 'org.apache.iceberg.shaded.io.airlift'
// relocate Arrow and related deps to shade Iceberg specific version
relocate 'io.netty.buffer', 'org.apache.iceberg.shaded.io.netty.buffer'
relocate 'org.apache.arrow', 'org.apache.iceberg.shaded.org.apache.arrow'
relocate 'com.carrotsearch', 'org.apache.iceberg.shaded.com.carrotsearch'
relocate 'org.threeten.extra', 'org.apache.iceberg.shaded.org.threeten.extra'
classifier null
}
jar {
classifier = 'empty'
}
}
project(':iceberg-pig') {
dependencies {
compile project(':iceberg-api')
compile project(':iceberg-common')
compile project(':iceberg-core')
compile project(':iceberg-parquet')
compile "org.apache.commons:commons-lang3"
compileOnly("org.apache.pig:pig") {
exclude group: "junit", module: "junit"
}
compileOnly("org.apache.hadoop:hadoop-mapreduce-client-core")
compileOnly("org.apache.hadoop:hadoop-client") {
exclude group: 'org.apache.avro', module: 'avro'
}
testCompile "org.apache.hadoop:hadoop-hdfs::tests"
testCompile "org.apache.hadoop:hadoop-common::tests"
testCompile("org.apache.hadoop:hadoop-minicluster") {
exclude group: 'org.apache.avro', module: 'avro'
}
}
}
@Memoized
boolean isVersionFileExists() {
return file('version.txt').exists()
}
@Memoized
String getVersionFromFile() {
return file('version.txt').text.trim()
}
String getProjectVersion() {
if (isVersionFileExists()) {
return getVersionFromFile()
}
try {
return gitVersion()
} catch (NullPointerException e) {
throw new Exception("Neither version.txt nor git version exists")
}
}
String getJavadocVersion() {
if (isVersionFileExists()) {
return getVersionFromFile()
}
try {
// use the branch name in place of version in Javadoc
return versionDetails().branchName
} catch (NullPointerException e) {
throw new Exception("Neither version.txt nor git version exists")
}
}
apply from: 'baseline.gradle'
apply from: 'deploy.gradle'
apply from: 'tasks.gradle'