| <?xml version="1.0" encoding="UTF-8"?> |
| <!-- |
| Licensed to the Apache Software Foundation (ASF) under one |
| or more contributor license agreements. See the NOTICE file |
| distributed with this work for additional information |
| regarding copyright ownership. The ASF licenses this file |
| to you under the Apache License, Version 2.0 (the |
| "License"); you may not use this file except in compliance |
| with the License. You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, |
| software distributed under the License is distributed on an |
| "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| KIND, either express or implied. See the License for the |
| specific language governing permissions and limitations |
| under the License. |
| --> |
| <project xmlns="http://maven.apache.org/POM/4.0.0" |
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> |
| |
| <modelVersion>4.0.0</modelVersion> |
| |
| <parent> |
| <groupId>org.apache.flink</groupId> |
| <artifactId>flink-connector-hive-parent</artifactId> |
| <version>3.0-SNAPSHOT</version> |
| </parent> |
| |
| <artifactId>flink-sql-connector-hive-3.1.3_${scala.binary.version}</artifactId> |
| <name>Flink : Connectors : SQL : Hive 3.1.3</name> |
| |
| <packaging>jar</packaging> |
| |
| <properties> |
| <japicmp.skip>true</japicmp.skip> |
| </properties> |
| |
| <dependencyManagement> |
| <dependencies> |
| <dependency> |
| <groupId>com.google.guava</groupId> |
| <artifactId>guava</artifactId> |
| <version>11.0.2</version> |
| </dependency> |
| <dependency> |
| <groupId>io.netty</groupId> |
| <artifactId>netty</artifactId> |
| <version>3.5.13.Final</version> |
| </dependency> |
| <dependency> |
| <groupId>org.antlr</groupId> |
| <artifactId>antlr-runtime</artifactId> |
| <version>3.5.2</version> |
| </dependency> |
| <dependency> |
| <groupId>org.eclipse.jetty</groupId> |
| <artifactId>jetty-util</artifactId> |
| <version>9.3.20.v20170531</version> |
| </dependency> |
| <dependency> |
| <groupId>org.eclipse.jetty</groupId> |
| <artifactId>jetty-server</artifactId> |
| <version>9.3.20.v20170531</version> |
| </dependency> |
| <dependency> |
| <groupId>org.apache.zookeeper</groupId> |
| <artifactId>zookeeper</artifactId> |
| <version>3.4.9</version> |
| </dependency> |
| <dependency> |
| <groupId>com.nimbusds</groupId> |
| <artifactId>nimbus-jose-jwt</artifactId> |
| <version>4.41.1</version> |
| </dependency> |
| <dependency> |
| <groupId>com.google.inject</groupId> |
| <artifactId>guice</artifactId> |
| <version>4.0</version> |
| </dependency> |
| </dependencies> |
| </dependencyManagement> |
| <dependencies> |
| <dependency> |
| <groupId>org.antlr</groupId> |
| <artifactId>antlr-runtime</artifactId> |
| </dependency> |
| <dependency> |
| <groupId>org.apache.flink</groupId> |
| <artifactId>flink-connector-hive_${scala.binary.version}</artifactId> |
| <version>${flink.version}</version> |
| </dependency> |
| |
| <dependency> |
| <groupId>org.apache.hive</groupId> |
| <artifactId>hive-exec</artifactId> |
| <version>3.1.3</version> |
| <exclusions> |
| <exclusion> |
| <groupId>log4j</groupId> |
| <artifactId>log4j</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.slf4j</groupId> |
| <artifactId>slf4j-log4j12</artifactId> |
| </exclusion> |
| <!-- Rely on the Guava version of Hadoop. --> |
| <exclusion> |
| <groupId>com.google.guava</groupId> |
| <artifactId>guava</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.apache.avro</groupId> |
| <artifactId>avro</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>ch.qos.reload4j</groupId> |
| <artifactId>reload4j</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.slf4j</groupId> |
| <artifactId>slf4j-reload4j</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>jline</groupId> |
| <artifactId>jline</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.apache.hive</groupId> |
| <artifactId>hive-shims</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>com.github.joshelser</groupId> |
| <artifactId>dropwizard-metrics-hadoop-metrics2-reporter</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>com.google.inject.extensions</groupId> |
| <artifactId>guice-servlet</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.eclipse.jetty</groupId> |
| <artifactId>jetty-webapp</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.eclipse.jetty</groupId> |
| <artifactId>jetty-servlet</artifactId> |
| </exclusion> |
| </exclusions> |
| </dependency> |
| |
| <!-- hadoop dependency to make the copied code compile --> |
| <dependency> |
| <groupId>org.apache.hadoop</groupId> |
| <artifactId>hadoop-mapreduce-client-core</artifactId> |
| <version>3.1.0</version> |
| <scope>provided</scope> |
| <exclusions> |
| <exclusion> |
| <groupId>log4j</groupId> |
| <artifactId>log4j</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.slf4j</groupId> |
| <artifactId>slf4j-log4j12</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.apache.avro</groupId> |
| <artifactId>avro</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.apache.avro</groupId> |
| <artifactId>avro-mapred</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>ch.qos.reload4j</groupId> |
| <artifactId>reload4j</artifactId> |
| </exclusion> |
| <exclusion> |
| <groupId>org.slf4j</groupId> |
| <artifactId>slf4j-reload4j</artifactId> |
| </exclusion> |
| </exclusions> |
| </dependency> |
| |
| <dependency> |
| <groupId>org.apache.thrift</groupId> |
| <artifactId>libfb303</artifactId> |
| <version>0.9.3</version> |
| </dependency> |
| |
| <!-- avro is included in hive-exec. we use the same dependencies but with a different version --> |
| <dependency> |
| <groupId>org.apache.avro</groupId> |
| <artifactId>avro</artifactId> |
| <version>1.8.2</version> |
| </dependency> |
| |
| <dependency> |
| <groupId>org.apache.avro</groupId> |
| <artifactId>avro-mapred</artifactId> |
| <classifier>hadoop2</classifier> |
| <version>1.8.2</version> |
| <exclusions> |
| <exclusion> |
| <groupId>org.mortbay.jetty</groupId> |
| <artifactId>servlet-api</artifactId> |
| </exclusion> |
| </exclusions> |
| </dependency> |
| |
| <!--Hive 3.1.x depends on calcite-1.16 --> |
| <dependency> |
| <groupId>org.apache.calcite</groupId> |
| <artifactId>calcite-core</artifactId> |
| <version>1.16.0</version> |
| <exclusions> |
| <exclusion> |
| <groupId>com.google.protobuf</groupId> |
| <artifactId>protobuf-java</artifactId> |
| </exclusion> |
| </exclusions> |
| </dependency> |
| </dependencies> |
| |
| <build> |
| <plugins> |
| <!-- disable spotless since we need copy hive code --> |
| <plugin> |
| <groupId>com.diffplug.spotless</groupId> |
| <artifactId>spotless-maven-plugin</artifactId> |
| <configuration> |
| <skip>true</skip> |
| </configuration> |
| </plugin> |
| |
| <plugin> |
| <groupId>org.apache.maven.plugins</groupId> |
| <artifactId>maven-shade-plugin</artifactId> |
| <executions> |
| <execution> |
| <id>shade-flink</id> |
| <phase>package</phase> |
| <goals> |
| <goal>shade</goal> |
| </goals> |
| <configuration> |
| <filters> |
| <filter> |
| <artifact>org.apache.hive:hive-exec</artifact> |
| <excludes> |
| <exclude>avro/shaded/com/google/**</exclude> |
| <exclude>org/apache/avro/**</exclude> |
| <exclude>META-INF/maven/org.apache.avro/**</exclude> |
| <exclude>org/apache/hadoop/hive/conf/HiveConf.class</exclude> |
| <exclude>org/apache/hadoop/hive/metastore/HiveMetaStoreClient.class</exclude> |
| </excludes> |
| </filter> |
| <filter> |
| <artifact>org.apache.calcite:calcite-core</artifact> |
| <includes> |
| <!-- We need include RelOptRule, otherwise, it'll throw class not found exception in Hive dialect. |
| When use Hive dialect, user need to put hive-connector to FLINK_HOME/lib. And then |
| in Hive parser, it will call method Hive#closeCurrent; But the class Hive is loaded by |
| app classloader, see in org.apache.flink.table.planner.loader#PlannerModule. When load the class |
| Hive, it requires org.apache.calcite.plan.RelOptRule, but this class won't be in app classloader, |
| and then it'll throw class not found exception. So, we need to include this class. |
| --> |
| <include>org/apache/calcite/plan/RelOptRule.class</include> |
| </includes> |
| </filter> |
| </filters> |
| <artifactSet> |
| <includes> |
| <include>org.apache.flink:flink-connector-hive_${scala.binary.version}</include> |
| <include>org.apache.hive:hive-exec</include> |
| <include>org.apache.thrift:libfb303</include> |
| <include>org.antlr:antlr-runtime</include> |
| <include>org.apache.avro:avro</include> |
| <include>org.apache.avro:avro-mapred</include> |
| <!-- calcite --> |
| <include>org.apache.calcite:calcite-core</include> |
| </includes> |
| </artifactSet> |
| <relocations> |
| <relocation> |
| <pattern>org.apache.parquet</pattern> |
| <shadedPattern>org.apache.hive.shaded.parquet</shadedPattern> |
| </relocation> |
| <relocation> |
| <pattern>shaded.parquet</pattern> |
| <shadedPattern>org.apache.hive.reshaded.parquet</shadedPattern> |
| </relocation> |
| <!-- Guava dependencies that still end up in the shaded JAR are relocated to avoid clashes. --> |
| <relocation> |
| <pattern>com.google</pattern> |
| <shadedPattern>org.apache.flink.hive.shaded.com.google</shadedPattern> |
| </relocation> |
| </relocations> |
| </configuration> |
| </execution> |
| </executions> |
| </plugin> |
| </plugins> |
| </build> |
| </project> |