blob: 66741c1a57a6d9074ad34b7c981a41c863cdec9e [file] [log] [blame]
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>hudi</artifactId>
<groupId>org.apache.hudi</groupId>
<version>0.13.0-rc3</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>hudi-spark${sparkbundle.version}-bundle_${scala.binary.version}</artifactId>
<packaging>jar</packaging>
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<skipTests>true</skipTests>
<javax.servlet.version>3.1.0</javax.servlet.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${maven-shade-plugin.version}</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<createSourcesJar>${shadeSources}</createSourcesJar>
<dependencyReducedPomLocation>${project.build.directory}/dependency-reduced-pom.xml
</dependencyReducedPomLocation>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer" />
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer">
<addHeader>true</addHeader>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
<resource>META-INF/LICENSE</resource>
<file>target/classes/META-INF/LICENSE</file>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>META-INF/services/org.apache.spark.sql.sources.DataSourceRegister</resource>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
</transformers>
<artifactSet>
<includes combine.children="append">
<include>org.apache.hudi:hudi-common</include>
<include>org.apache.hudi:hudi-client-common</include>
<include>org.apache.hudi:hudi-spark-client</include>
<include>org.apache.hudi:hudi-spark-common_${scala.binary.version}</include>
<include>org.apache.hudi:hudi-spark_${scala.binary.version}</include>
<include>org.apache.hudi:${hudi.spark.module}_${scala.binary.version}</include>
<include>org.apache.hudi:${hudi.spark.common.modules.1}</include>
<include>org.apache.hudi:${hudi.spark.common.modules.2}</include>
<include>org.apache.hudi:hudi-hive-sync</include>
<include>org.apache.hudi:hudi-sync-common</include>
<include>org.apache.hudi:hudi-hadoop-mr</include>
<include>org.apache.hudi:hudi-timeline-service</include>
<include>javax.servlet:javax.servlet-api</include>
<include>com.beust:jcommander</include>
<include>io.javalin:javalin</include>
<!-- Spark only has mortbay jetty -->
<include>org.eclipse.jetty:*</include>
<include>org.eclipse.jetty.websocket:*</include>
<include>org.jetbrains.kotlin:*</include>
<include>org.rocksdb:rocksdbjni</include>
<include>org.antlr:stringtemplate</include>
<include>com.lmax:disruptor</include>
<include>com.github.davidmoten:guava-mini</include>
<include>com.github.davidmoten:hilbert-curve</include>
<include>com.github.ben-manes.caffeine:caffeine</include>
<include>org.apache.parquet:parquet-avro</include>
<include>com.twitter:bijection-avro_${scala.binary.version}</include>
<include>com.twitter:bijection-core_${scala.binary.version}</include>
<include>io.dropwizard.metrics:metrics-core</include>
<include>io.dropwizard.metrics:metrics-graphite</include>
<include>io.dropwizard.metrics:metrics-jmx</include>
<include>io.prometheus:simpleclient</include>
<include>io.prometheus:simpleclient_httpserver</include>
<include>io.prometheus:simpleclient_dropwizard</include>
<include>io.prometheus:simpleclient_pushgateway</include>
<include>io.prometheus:simpleclient_common</include>
<include>com.yammer.metrics:metrics-core</include>
<include>org.apache.hive:hive-common</include>
<include>org.apache.hive:hive-service</include>
<include>org.apache.hive:hive-service-rpc</include>
<include>org.apache.hive:hive-metastore</include>
<include>org.apache.hive:hive-jdbc</include>
<include>org.apache.curator:curator-framework</include>
<include>org.apache.curator:curator-client</include>
<include>org.apache.curator:curator-recipes</include>
<include>commons-codec:commons-codec</include>
<include>commons-io:commons-io</include>
<include>org.openjdk.jol:jol-core</include>
</includes>
</artifactSet>
<relocations combine.children="append">
<!-- NOTE: We have to relocate all classes w/in org.apache.spark.sql.avro to avoid
potential classpath collisions in case users would like to also use "spark-avro" w/in
their runtime, since Hudi carries some of the same classes as "spark-avro" -->
<relocation>
<pattern>org.apache.spark.sql.avro.</pattern>
<shadedPattern>org.apache.hudi.org.apache.spark.sql.avro.</shadedPattern>
</relocation>
<relocation>
<pattern>javax.servlet.</pattern>
<shadedPattern>org.apache.hudi.javax.servlet.</shadedPattern>
</relocation>
<relocation>
<pattern>com.yammer.metrics.</pattern>
<shadedPattern>org.apache.hudi.com.yammer.metrics.</shadedPattern>
</relocation>
<relocation>
<pattern>com.beust.jcommander.</pattern>
<shadedPattern>org.apache.hudi.com.beust.jcommander.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.commons.io.</pattern>
<shadedPattern>org.apache.hudi.org.apache.commons.io.</shadedPattern>
</relocation>
<!-- TODO: clean up hive dep - Revisit GH ISSUE #533 & PR#633-->
<relocation>
<pattern>org.apache.hive.jdbc.</pattern>
<shadedPattern>${spark.bundle.hive.shade.prefix}org.apache.hive.jdbc.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.hive.metastore.</pattern>
<shadedPattern>${spark.bundle.hive.shade.prefix}org.apache.hadoop.hive.metastore.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hive.common.</pattern>
<shadedPattern>${spark.bundle.hive.shade.prefix}org.apache.hive.common.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.hive.common.</pattern>
<shadedPattern>${spark.bundle.hive.shade.prefix}org.apache.hadoop.hive.common.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.hive.conf.</pattern>
<shadedPattern>${spark.bundle.hive.shade.prefix}org.apache.hadoop.hive.conf.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hive.service.</pattern>
<shadedPattern>${spark.bundle.hive.shade.prefix}org.apache.hive.service.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.hive.service.</pattern>
<shadedPattern>${spark.bundle.hive.shade.prefix}org.apache.hadoop.hive.service.</shadedPattern>
</relocation>
<relocation>
<pattern>com.codahale.metrics.</pattern>
<shadedPattern>org.apache.hudi.com.codahale.metrics.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.commons.codec.</pattern>
<shadedPattern>org.apache.hudi.org.apache.commons.codec.</shadedPattern>
</relocation>
<relocation>
<pattern>org.eclipse.jetty.</pattern>
<shadedPattern>org.apache.hudi.org.apache.jetty.</shadedPattern>
</relocation>
<relocation>
<pattern>com.google.common.</pattern>
<shadedPattern>org.apache.hudi.com.google.common.</shadedPattern>
</relocation>
<relocation>
<pattern>org.openjdk.jol.</pattern>
<shadedPattern>org.apache.hudi.org.openjdk.jol.</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
<exclude>META-INF/services/javax.*</exclude>
<exclude>**/*.proto</exclude>
<exclude>hbase-webapps/**</exclude>
<!-- hbase-default.xml comes from hbase-common, hbase related classes used in hudi are in shaded
pattern, the default classes in hbase-default.xml can cause ClassNotFoundException. -->
<exclude>hbase-default.xml</exclude>
</excludes>
</filter>
</filters>
<finalName>${project.artifactId}-${project.version}</finalName>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
<resource>
<directory>src/test/resources</directory>
</resource>
</resources>
</build>
<dependencies>
<!-- Hoodie - Spark -->
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-spark_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Hoodie - Other -->
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-hadoop-mr</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-hive-sync</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-timeline-service</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Parquet -->
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<scope>compile</scope>
</dependency>
<!-- Hive -->
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-service</artifactId>
<version>${hive.version}</version>
<scope>${spark.bundle.hive.scope}</scope>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-service-rpc</artifactId>
<version>${hive.version}</version>
<scope>${spark.bundle.hive.scope}</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
<scope>${spark.bundle.hive.scope}</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-metastore</artifactId>
<version>${hive.version}</version>
<scope>${spark.bundle.hive.scope}</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-common</artifactId>
<version>${hive.version}</version>
<scope>${spark.bundle.hive.scope}</scope>
</dependency>
<!-- zookeeper -->
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${zk-curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
<version>${zk-curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${zk-curator.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.version}</version>
</dependency>
<!-- TODO: Reinvestigate PR 633 -->
</dependencies>
<profiles>
<profile>
<id>spark-bundle-shade-hive</id>
<properties>
<spark.bundle.hive.scope>compile</spark.bundle.hive.scope>
<spark.bundle.hive.shade.prefix>org.apache.hudi.</spark.bundle.hive.shade.prefix>
</properties>
</profile>
<profile>
<id>hudi-platform-service</id>
<activation>
<property>
<name>deployArtifacts</name>
<value>true</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-metaserver-client</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${maven-shade-plugin.version}</version>
<executions>
<execution>
<configuration>
<artifactSet>
<includes combine.children="append">
<include>org.apache.hudi:hudi-metaserver-client</include>
</includes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>