blob: ef38eeeb0c7d942652296943816e04f2b39596d4 [file] [log] [blame]
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>hudi</artifactId>
<groupId>org.apache.hudi</groupId>
<version>0.11.0-rc1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>hudi-flink${flink.bundle.version}-bundle_${scala.binary.version}</artifactId>
<packaging>jar</packaging>
<properties>
<checkstyle.skip>true</checkstyle.skip>
<main.basedir>${project.parent.basedir}</main.basedir>
<flink.bundle.hive.scope>provided</flink.bundle.hive.scope>
<flink.bundle.shade.prefix>org.apache.hudi.</flink.bundle.shade.prefix>
<javax.servlet.version>3.1.0</javax.servlet.version>
<!-- override to be same with flink 1.12.2 -->
<parquet.version>1.11.1</parquet.version>
<hive.version>2.3.1</hive.version>
<thrift.version>0.9.3</thrift.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${maven-shade-plugin.version}</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<createSourcesJar>${shadeSources}</createSourcesJar>
<dependencyReducedPomLocation>${project.build.directory}/dependency-reduced-pom.xml
</dependencyReducedPomLocation>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer">
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer">
<addHeader>true</addHeader>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
<resource>META-INF/LICENSE</resource>
<file>target/classes/META-INF/LICENSE</file>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
</transformers>
<artifactSet>
<includes>
<include>org.apache.hudi:hudi-common</include>
<include>org.apache.hudi:hudi-client-common</include>
<include>org.apache.hudi:hudi-flink-client</include>
<include>org.apache.hudi:hudi-flink</include>
<include>org.apache.hudi:${hudi.flink.module}</include>
<include>org.apache.hudi:hudi-hive-sync</include>
<include>org.apache.hudi:hudi-sync-common</include>
<include>org.apache.hudi:hudi-hadoop-mr</include>
<include>org.apache.hudi:hudi-timeline-service</include>
<include>org.apache.hudi:hudi-aws</include>
<include>com.yammer.metrics:metrics-core</include>
<include>com.beust:jcommander</include>
<include>io.javalin:javalin</include>
<include>org.jetbrains.kotlin:*</include>
<include>org.rocksdb:rocksdbjni</include>
<include>org.apache.httpcomponents:httpclient</include>
<include>org.apache.httpcomponents:httpcore</include>
<include>org.apache.httpcomponents:fluent-hc</include>
<include>org.antlr:stringtemplate</include>
<!-- Parquet -->
<include>org.apache.parquet:parquet-avro</include>
<include>org.apache.parquet:parquet-hadoop</include>
<include>org.apache.parquet:parquet-column</include>
<include>org.apache.parquet:parquet-common</include>
<include>org.apache.parquet:parquet-format-structures</include>
<include>org.apache.parquet:parquet-encoding</include>
<include>org.apache.parquet:parquet-jackson</include>
<include>org.apache.avro:avro</include>
<include>joda-time:joda-time</include>
<include>com.fasterxml.jackson.core:jackson-annotations</include>
<include>com.fasterxml.jackson.core:jackson-databind</include>
<include>com.fasterxml.jackson.core:jackson-core</include>
<include>com.github.davidmoten:guava-mini</include>
<include>com.github.davidmoten:hilbert-curve</include>
<include>com.twitter:bijection-avro_${scala.binary.version}</include>
<include>com.twitter:bijection-core_${scala.binary.version}</include>
<include>io.dropwizard.metrics:metrics-core</include>
<include>io.dropwizard.metrics:metrics-graphite</include>
<include>io.dropwizard.metrics:metrics-jmx</include>
<include>io.prometheus:simpleclient</include>
<include>io.prometheus:simpleclient_httpserver</include>
<include>io.prometheus:simpleclient_dropwizard</include>
<include>io.prometheus:simpleclient_pushgateway</include>
<include>io.prometheus:simpleclient_common</include>
<include>com.yammer.metrics:metrics-core</include>
<!-- Used for HUDI TimelineService -->
<include>org.eclipse.jetty:*</include>
<include>org.eclipse.jetty.websocket:*</include>
<include>javax.servlet:javax.servlet-api</include>
<!-- Used for HUDI write handle -->
<inclide>com.esotericsoftware:kryo-shaded</inclide>
<include>org.apache.flink:flink-hadoop-compatibility_${scala.binary.version}</include>
<include>org.apache.flink:flink-json</include>
<include>org.apache.flink:flink-parquet_${scala.binary.version}</include>
<include>org.apache.hive:hive-common</include>
<include>org.apache.hive:hive-service</include>
<include>org.apache.hive:hive-service-rpc</include>
<include>org.apache.hive:hive-exec</include>
<include>org.apache.hive:hive-standalone-metastore</include>
<include>org.apache.hive:hive-metastore</include>
<include>org.apache.hive:hive-jdbc</include>
<include>org.datanucleus:datanucleus-core</include>
<include>org.datanucleus:datanucleus-api-jdo</include>
<include>org.apache.thrift:libfb303</include>
<include>org.apache.orc:orc-core</include>
<include>org.apache.hbase:hbase-common</include>
<include>org.apache.hbase:hbase-client</include>
<include>org.apache.hbase:hbase-hadoop-compat</include>
<include>org.apache.hbase:hbase-hadoop2-compat</include>
<include>org.apache.hbase:hbase-metrics</include>
<include>org.apache.hbase:hbase-metrics-api</include>
<include>org.apache.hbase:hbase-server</include>
<include>org.apache.hbase:hbase-protocol-shaded</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-miscellaneous</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-netty</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-protobuf</include>
<include>org.apache.htrace:htrace-core4</include>
<include>commons-codec:commons-codec</include>
<include>commons-io:commons-io</include>
</includes>
</artifactSet>
<relocations>
<relocation>
<pattern>javax.servlet.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}javax.servlet.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.avro.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}org.apache.avro.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.commons.io.</pattern>
<shadedPattern>org.apache.hudi.org.apache.commons.io.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.hbase.</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.hbase.</shadedPattern>
<excludes>
<exclude>org.apache.hadoop.hbase.KeyValue$KeyComparator</exclude>
</excludes>
</relocation>
<relocation>
<pattern>org.apache.hbase.</pattern>
<shadedPattern>org.apache.hudi.org.apache.hbase.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.htrace.</pattern>
<shadedPattern>org.apache.hudi.org.apache.htrace.</shadedPattern>
</relocation>
<relocation>
<pattern>com.yammer.metrics.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}com.yammer.metrics.</shadedPattern>
</relocation>
<relocation>
<pattern>com.beust.jcommander.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}com.beust.jcommander.</shadedPattern>
</relocation>
<relocation>
<pattern>com.codahale.metrics.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}com.codahale.metrics.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.commons.codec.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}org.apache.commons.codec.</shadedPattern>
</relocation>
<relocation>
<pattern>org.eclipse.jetty.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}org.apache.jetty.</shadedPattern>
</relocation>
<!-- Shade kryo-shaded because it may conflict with kryo used by flink -->
<relocation>
<pattern>com.esotericsoftware.kryo.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}com.esotericsoftware.kryo.</shadedPattern>
</relocation>
<relocation>
<pattern>com.fasterxml.jackson.</pattern>
<shadedPattern>${flink.bundle.shade.prefix}com.fasterxml.jackson.</shadedPattern>
</relocation>
<!-- The classes below in org.apache.hadoop.metrics2 package come from
hbase-hadoop-compat and hbase-hadoop2-compat, which have to be shaded one by one,
instead of shading all classes under org.apache.hadoop.metrics2 including ones
from hadoop. -->
<relocation>
<pattern>org.apache.hadoop.metrics2.MetricHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.MetricHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.MetricsExecutor</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.MetricsExecutor
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.impl.JmxCacheBuster</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.impl.JmxCacheBuster</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MetricsExecutorImpl</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MetricsExecutorImpl
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableFastCounter</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableFastCounter
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableRangeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableRangeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableSizeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableSizeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableTimeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableTimeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.util.MetricQuantile</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.util.MetricQuantile
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.util.MetricSampleQuantiles</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.util.MetricSampleQuantiles
</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
<exclude>META-INF/services/javax.*</exclude>
<exclude>**/*.proto</exclude>
<exclude>hbase-webapps/**</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
<resource>
<directory>src/test/resources</directory>
</resource>
</resources>
</build>
<dependencies>
<!-- Hoodie -->
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-common</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>rocksdbjni</artifactId>
<groupId>org.rocksdb</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-client-common</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-flink-client</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>${hudi.flink.module}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-flink</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-hadoop-mr</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-hive-sync</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-timeline-service</artifactId>
<version>${project.version}</version>
<exclusions>
<exclusion>
<artifactId>rocksdbjni</artifactId>
<groupId>org.rocksdb</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.version}</version>
</dependency>
<!-- Flink -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-hadoop-compatibility_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-parquet_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<!-- Parquet -->
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>${parquet.version}</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>${parquet.version}</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</exclusion>
<exclusion>
<groupId>it.unimi.dsi</groupId>
<artifactId>fastutil</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
<version>${parquet.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-common</artifactId>
<version>${parquet.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-encoding</artifactId>
<version>${parquet.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-format-structures</artifactId>
<version>${parquet.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-jackson</artifactId>
<version>${parquet.version}</version>
<scope>compile</scope>
</dependency>
<!-- Avro -->
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<!-- Override the version to be same with Flink avro -->
<version>1.10.0</version>
<scope>compile</scope>
</dependency>
<!-- Hive -->
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-service</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-metastore</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.datanucleus</groupId>
<artifactId>datanucleus-core</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-common</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.datanucleus</groupId>
<artifactId>datanucleus-core</artifactId>
<scope>${flink.bundle.hive.scope}</scope>
<version>5.0.1</version>
</dependency>
<dependency>
<groupId>org.datanucleus</groupId>
<artifactId>datanucleus-api-jdo</artifactId>
<scope>${flink.bundle.hive.scope}</scope>
<version>5.0.1</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
<version>${jetty.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<version>${jetty.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
<version>${jetty.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
<version>${jetty.version}</version>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libfb303</artifactId>
<version>${thrift.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
</dependency>
<!-- kryo -->
<dependency>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo-shaded</artifactId>
<version>4.0.2</version>
</dependency>
<!-- ORC -->
<dependency>
<groupId>org.apache.orc</groupId>
<artifactId>orc-core</artifactId>
<version>${orc.version}</version>
<classifier>nohive</classifier>
<scope>${flink.bundle.hive.scope}</scope>
</dependency>
<!-- Fasterxml -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<scope>compile</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>flink-bundle-shade-hive1</id>
<properties>
<hive.version>1.1.0</hive.version>
<thrift.version>0.9.2</thrift.version>
<flink.bundle.hive.scope>compile</flink.bundle.hive.scope>
</properties>
</profile>
<profile>
<id>flink-bundle-shade-hive2</id>
<properties>
<hive.version>2.3.1</hive.version>
<flink.bundle.hive.scope>compile</flink.bundle.hive.scope>
</properties>
<dependencies>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-service-rpc</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
</dependency>
</dependencies>
</profile>
<profile>
<id>flink-bundle-shade-hive3</id>
<properties>
<hive.version>3.1.2</hive.version>
<flink.bundle.hive.scope>compile</flink.bundle.hive.scope>
</properties>
<dependencies>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-service-rpc</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-standalone-metastore</artifactId>
<version>${hive.version}</version>
<scope>${flink.bundle.hive.scope}</scope>
</dependency>
</dependencies>
</profile>
</profiles>
</project>