blob: cbe4b67b2d8e052026f11b06faae88faef4c918b [file] [log] [blame]
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- The HifIO tests for Cassandra and Elasticsearch both work only with
jdk1.8, but Beam's enforcer rules require jdk1.7 and jdk1.8 support. This
child module contains only those tests and overrides the enforcer rules to
allow 1.8 only behavior without making all of HifIO work only with jdk1.8. -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.beam</groupId>
<artifactId>beam-sdks-java-io-hadoop-parent</artifactId>
<version>2.2.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>beam-sdks-java-io-hadoop-jdk1.8-tests</artifactId>
<name>Apache Beam :: SDKs :: Java :: IO :: Hadoop :: jdk1.8-tests</name>
<description>Integration tests and junits which need JDK1.8.</description>
<build>
<plugins>
<!-- Overridden enforcer plugin for JDK1.8 for running tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<executions>
<execution>
<id>enforce</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<enforceBytecodeVersion>
<maxJdkVersion>1.8</maxJdkVersion>
<excludes>
<!-- Supplied by the user JDK and compiled with matching
version. Is not shaded, so safe to ignore. -->
<exclude>jdk.tools:jdk.tools</exclude>
</excludes>
</enforceBytecodeVersion>
<requireJavaVersion>
<version>[1.8,)</version>
</requireJavaVersion>
</rules>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>1.0-beta-6</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<!-- Don't run the tests in parallel since HIFIOWithElasticTest uses
a static port number which can lead to port in use errors. -->
<parallel>none</parallel>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
<!--The dataflow-runner and spark-runner profiles support using those runners
during an integration test. These are not the long-term way we want to support
using runners in ITs (e.g. it is annoying to add to all IO modules.) We cannot
create a dependency IO -> Runners since the runners depend on IO (e.g. kafka
depends on spark.) -->
<profiles>
<!-- Include the Apache Spark runner -P spark-runner -->
<profile>
<id>spark-runner</id>
<dependencies>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-runners-spark</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<scope>runtime</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</profile>
<!-- Include the Google Cloud Dataflow runner -P dataflow-runner -->
<profile>
<id>dataflow-runner</id>
<dependencies>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-runners-google-cloud-dataflow-java</artifactId>
<scope>runtime</scope>
</dependency>
</dependencies>
</profile>
</profiles>
<properties>
<guava.version>19.0</guava.version>
<transport.netty4.client.version>5.0.0</transport.netty4.client.version>
<netty.transport.native.epoll.version>4.1.0.CR3</netty.transport.native.epoll.version>
<elasticsearch.version>5.0.0</elasticsearch.version>
<cassandra.driver.mapping.version>3.1.1</cassandra.driver.mapping.version>
<cassandra.all.verison>3.9</cassandra.all.verison>
<cassandra.driver.core.version>3.1.1</cassandra.driver.core.version>
<commons.io.version>2.4</commons.io.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-sdks-java-io-hadoop-input-format</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<!-- compile dependencies -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<scope>provided</scope>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-runners-direct-java</artifactId>
<scope>test</scope>
</dependency>
<!-- We need to depend on the non test-jar artifact to get
the repackaged transitive dependencies on the classpath. -->
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-sdks-java-io-common</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-sdks-java-io-common</artifactId>
<scope>test</scope>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>transport-netty4-client</artifactId>
<version>${transport.netty4.client.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>${elasticsearch.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport-native-epoll</artifactId>
<version>${netty.transport.native.epoll.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>${elasticsearch.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-hadoop</artifactId>
<version>${elasticsearch.version}</version>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-mapping</artifactId>
<version>${cassandra.driver.mapping.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra-all</artifactId>
<version>${cassandra.all.verison}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>${cassandra.driver.core.version}</version>
<scope>test</scope>
</dependency>
<!-- runtime dependencies -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
<scope>runtime</scope>
</dependency>
</dependencies>
</project>