blob: 8460857962b300e753956385f4a8c1944d6b7f4c [file] [log] [blame]
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix</artifactId>
<version>4.11.1-HBase-1.1-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Apache Phoenix</name>
<description>A SQL layer over HBase</description>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
<comments />
</license>
</licenses>
<organization>
<name>Apache Software Foundation</name>
<url>http://www.apache.org</url>
</organization>
<modules>
<module>phoenix-core</module>
<module>phoenix-flume</module>
<module>phoenix-kafka</module>
<module>phoenix-pig</module>
<module>phoenix-queryserver-client</module>
<module>phoenix-queryserver</module>
<module>phoenix-pherf</module>
<module>phoenix-spark</module>
<module>phoenix-hive</module>
<module>phoenix-client</module>
<module>phoenix-server</module>
<module>phoenix-assembly</module>
<module>phoenix-tracing-webapp</module>
</modules>
<repositories>
<repository>
<id>apache release</id>
<url>https://repository.apache.org/content/repositories/releases/</url>
</repository>
</repositories>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>14</version>
</parent>
<scm>
<connection>scm:git:http://git-wip-us.apache.org/repos/asf/phoenix.git</connection>
<url>https://git-wip-us.apache.org/repos/asf/phoenix.git</url>
<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/phoenix.git</developerConnection>
</scm>
<properties>
<!-- General Properties -->
<antlr-input.dir>src/main/antlr3</antlr-input.dir>
<antlr-output.dir>target/generated-sources/antlr3</antlr-output.dir>
<test.output.tofile>true</test.output.tofile>
<top.dir>${project.basedir}</top.dir>
<!-- Hadoop Versions -->
<hbase.version>1.1.9</hbase.version>
<hadoop-two.version>2.7.1</hadoop-two.version>
<!-- Dependency versions -->
<commons-cli.version>1.2</commons-cli.version>
<hive.version>1.2.1</hive.version>
<hadoop.version>2.7.1</hadoop.version>
<pig.version>0.13.0</pig.version>
<jackson.version>1.9.2</jackson.version>
<antlr.version>3.5.2</antlr.version>
<log4j.version>1.2.17</log4j.version>
<slf4j.version>1.6.4</slf4j.version>
<protobuf-java.version>2.5.0</protobuf-java.version>
<commons-configuration.version>1.6</commons-configuration.version>
<commons-io.version>2.1</commons-io.version>
<commons-lang.version>2.5</commons-lang.version>
<commons-logging.version>1.2</commons-logging.version>
<commons-csv.version>1.0</commons-csv.version>
<sqlline.version>1.2.0</sqlline.version>
<guava.version>13.0.1</guava.version>
<flume.version>1.4.0</flume.version>
<kafka.version>0.9.0.0</kafka.version>
<findbugs-annotations.version>1.3.9-1</findbugs-annotations.version>
<jcip-annotations.version>1.0-1</jcip-annotations.version>
<jline.version>2.11</jline.version>
<snappy.version>0.3</snappy.version>
<netty.version>4.0.23.Final</netty.version>
<commons-codec.version>1.7</commons-codec.version>
<htrace.version>3.1.0-incubating</htrace.version>
<collections.version>3.2.2</collections.version>
<!-- Do not change jodatime.version until HBASE-15199 is fixed -->
<jodatime.version>1.6</jodatime.version>
<joni.version>2.1.2</joni.version>
<avatica.version>1.10.0</avatica.version>
<jettyVersion>8.1.7.v20120910</jettyVersion>
<tephra.version>0.12.0-incubating</tephra.version>
<spark.version>2.0.2</spark.version>
<scala.version>2.11.8</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<!-- Test Dependencies -->
<mockito-all.version>1.8.5</mockito-all.version>
<junit.version>4.12</junit.version>
<!-- Plugin versions -->
<maven-eclipse-plugin.version>2.9</maven-eclipse-plugin.version>
<maven-build-helper-plugin.version>1.9.1</maven-build-helper-plugin.version>
<maven-surefire-plugin.version>2.20</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>2.20</maven-failsafe-plugin.version>
<maven-dependency-plugin.version>2.1</maven-dependency-plugin.version>
<maven.assembly.version>2.5.2</maven.assembly.version>
<!-- Plugin options -->
<numForkedUT>6</numForkedUT>
<numForkedIT>6</numForkedIT>
<it.failIfNoSpecifiedTests>false</it.failIfNoSpecifiedTests>
<surefire.failIfNoSpecifiedTests>false</surefire.failIfNoSpecifiedTests>
<!-- Set default encoding so multi-byte tests work correctly on the Mac -->
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.0</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.antlr</groupId>
<artifactId>antlr3-maven-plugin</artifactId>
<versionRange>[3.5,)</versionRange>
<goals>
<goal>antlr</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore />
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-install-plugin</artifactId>
<version>2.5.2</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>${maven-eclipse-plugin.version}</version>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven.assembly.version}</version>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<!-- Avoid defining exclusions in pluginManagement as they are global.
We already inherit some from the ASF parent pom. -->
</plugin>
<!-- We put slow-running tests into src/it and run them during the
integration-test phase using the failsafe plugin. This way
developers can run unit tests conveniently from the IDE or via
"mvn package" from the command line without triggering time
consuming integration tests. -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>${maven-build-helper-plugin.version}</version>
<executions>
<execution>
<id>add-test-source</id>
<phase>validate</phase>
<goals>
<goal>add-test-source</goal>
</goals>
<configuration>
<sources>
<source>${basedir}/src/it/java</source>
</sources>
</configuration>
</execution>
<execution>
<id>add-test-resource</id>
<phase>validate</phase>
<goals>
<goal>add-test-resource</goal>
</goals>
<configuration>
<resources>
<resource>
<directory>${basedir}/src/it/resources</directory>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
<executions>
<execution>
<id>ParallelStatsEnabledTest</id>
<configuration>
<encoding>UTF-8</encoding>
<forkCount>${numForkedIT}</forkCount>
<runOrder>alphabetical</runOrder>
<reuseForks>true</reuseForks>
<runOrder>alphabetical</runOrder>
<!--parallel>methods</parallel>
<threadCount>20</threadCount-->
<argLine>-Xmx3000m -XX:MaxPermSize=300m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<shutdown>kill</shutdown>
<testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
<groups>org.apache.phoenix.end2end.ParallelStatsEnabledTest</groups>
<properties>
<property>
<name>listener</name>
<value>org.apache.phoenix.end2end.ParallelRunListener</value>
</property>
</properties>
</configuration>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
<execution>
<id>ParallelStatsDisabledTest</id>
<configuration>
<encoding>UTF-8</encoding>
<forkCount>${numForkedIT}</forkCount>
<runOrder>alphabetical</runOrder>
<reuseForks>true</reuseForks>
<runOrder>alphabetical</runOrder>
<!--parallel>methods</parallel>
<threadCount>20</threadCount-->
<!-- We're intermittantly hitting this assertion:
Caused by: java.lang.AssertionError: we should never remove a different context
at org.apache.hadoop.hbase.regionserver.HRegion$RowLockContext.cleanUp(HRegion.java:5206)
at org.apache.hadoop.hbase.regionserver.HRegion$RowLockImpl.release(HRegion.java:5246)
at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2898)
at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2835)
at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:490) -->
<!--enableAssertions>false</enableAssertions-->
<argLine>-Xmx3000m -XX:MaxPermSize=300m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<shutdown>kill</shutdown>
<testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
<groups>org.apache.phoenix.end2end.ParallelStatsDisabledTest</groups>
<properties>
<property>
<name>listener</name>
<value>org.apache.phoenix.end2end.ParallelRunListener</value>
</property>
</properties>
</configuration>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
<execution>
<id>ClientManagedTimeTests</id>
<configuration>
<encoding>UTF-8</encoding>
<forkCount>${numForkedIT}</forkCount>
<runOrder>alphabetical</runOrder>
<reuseForks>true</reuseForks>
<argLine>-enableassertions -Xmx3000m -XX:MaxPermSize=300m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
<groups>org.apache.phoenix.end2end.ClientManagedTimeTest</groups>
<shutdown>kill</shutdown>
<properties>
<property>
<name>listener</name>
<value>org.apache.phoenix.end2end.ParallelRunListener</value>
</property>
</properties>
</configuration>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
<execution>
<id>HBaseManagedTimeTests</id>
<configuration>
<encoding>UTF-8</encoding>
<forkCount>${numForkedIT}</forkCount>
<runOrder>alphabetical</runOrder>
<reuseForks>true</reuseForks>
<argLine>-enableassertions -Xmx3000m -XX:MaxPermSize=300m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
<groups>org.apache.phoenix.end2end.HBaseManagedTimeTest</groups>
<shutdown>kill</shutdown>
</configuration>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
<execution>
<id>NeedTheirOwnClusterTests</id>
<configuration>
<encoding>UTF-8</encoding>
<forkCount>${numForkedIT}</forkCount>
<runOrder>alphabetical</runOrder>
<reuseForks>false</reuseForks>
<argLine>-enableassertions -Xmx3000m -XX:MaxPermSize=300m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
<groups>org.apache.phoenix.end2end.NeedsOwnMiniClusterTest</groups>
<shutdown>kill</shutdown>
<properties>
<property>
<name>listener</name>
<value>org.apache.phoenix.end2end.ParallelRunListener</value>
</property>
</properties>
</configuration>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
<executions>
<execution>
<id>create-mrapp-generated-classpath</id>
<phase>generate-test-resources</phase>
<goals>
<goal>build-classpath</goal>
</goals>
<configuration>
<outputFile>${project.build.directory}/classes/mrapp-generated-classpath
</outputFile>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
</plugin>
<plugin>
<!-- Allows us to get the apache-ds bundle artifacts -->
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<version>2.5.3</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>2.13</version>
<executions>
<execution>
<id>validate</id>
<phase>validate</phase>
<configuration>
<skip>true</skip>
<configLocation>${top.dir}/src/main/config/checkstyle/checker.xml</configLocation>
<suppressionsLocation>${top.dir}/src/main/config/checkstyle/suppressions.xml</suppressionsLocation>
<consoleOutput>true</consoleOutput>
<headerLocation>${top.dir}/src/main/config/checkstyle/header.txt</headerLocation>
<failOnViolation><!--true-->false</failOnViolation>
<includeTestSourceDirectory><!--true-->false</includeTestSourceDirectory>
</configuration>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<phase>prepare-package</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.9</version>
<configuration>
<quiet>true</quiet>
<links>
<link>http://hbase.apache.org/apidocs/</link>
</links>
</configuration>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<!-- TODO turn back on javadocs - disabled now for testing -->
<!-- <goal>jar</goal> -->
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<forkCount>${numForkedUT}</forkCount>
<reuseForks>true</reuseForks>
<argLine>-enableassertions -Xmx3000m -XX:MaxPermSize=300m
-Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<shutdown>kill</shutdown>
</configuration>
</plugin>
<!-- All projects create a test jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<executions>
<execution>
<phase>prepare-package
</phase>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<version>3.2</version>
<configuration>
<reportPlugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>2.5.2</version>
</plugin>
</reportPlugins>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<!-- Header on changelog isn't normal -->
<exclude>CHANGES</exclude>
<!-- IDE configuration -->
<exclude>dev/phoenix.importorder</exclude>
<!-- Release L&N -->
<exclude>dev/release_files/LICENSE</exclude>
<exclude>dev/release_files/NOTICE</exclude>
<!-- Exclude data files for examples -->
<exclude>docs/*.csv</exclude>
<exclude>examples/*.csv</exclude>
<!-- Exclude SQL files from rat. Sqlline 1.1.9 doesn't work with
comments on the first line of a file. -->
<exclude>examples/*.sql</exclude>
<exclude>examples/pig/testdata</exclude>
<!-- precommit? -->
<exclude>**/patchprocess/**</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<!-- Allows us to get the apache-ds bundle artifacts -->
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<extensions>true</extensions>
<inherited>true</inherited>
</plugin>
</plugins>
</build>
<dependencyManagement>
<dependencies>
<!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>${project.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-flume</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-kafka</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-pig</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-spark</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-queryserver</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-queryserver-client</artifactId>
<version>${project.version}</version>
</dependency>
<!-- HBase dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-testing-util</artifactId>
<version>${hbase.version}</version>
<scope>test</scope>
<optional>true</optional>
<exclusions>
<exclusion>
<groupId>org.jruby</groupId>
<artifactId>jruby-complete</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-it</artifactId>
<version>${hbase.version}</version>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.jruby</groupId>
<artifactId>jruby-complete</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>${hbase.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>${hbase.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
<version>${hbase.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop2-compat</artifactId>
<version>${hbase.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop2-compat</artifactId>
<version>${hbase.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<!-- Hadoop Dependencies -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop-two.version}</version>
<exclusions>
<exclusion>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId>
<version>${hadoop-two.version}</version>
<optional>true</optional>
<scope>test</scope>
</dependency>
<!-- Required for mini-cluster since hbase built against old version of hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-two.version}</version>
<type>test-jar</type> <!-- this does not work which is typical for maven.-->
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop-two.version}</version>
</dependency>
<!-- General Dependencies -->
<dependency>
<groupId>org.apache.pig</groupId>
<artifactId>pig</artifactId>
<version>${pig.version}</version>
<classifier>h2</classifier>
<exclusions>
<exclusion>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.calcite.avatica</groupId>
<artifactId>avatica</artifactId>
<version>${avatica.version}</version>
</dependency>
<dependency>
<groupId>org.apache.calcite.avatica</groupId>
<artifactId>avatica-core</artifactId>
<version>${avatica.version}</version>
</dependency>
<dependency>
<groupId>org.apache.calcite.avatica</groupId>
<artifactId>avatica-server</artifactId>
<version>${avatica.version}</version>
</dependency>
<!-- Transaction dependencies -->
<dependency>
<groupId>org.apache.tephra</groupId>
<artifactId>tephra-api</artifactId>
<version>${tephra.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tephra</groupId>
<artifactId>tephra-core</artifactId>
<version>${tephra.version}</version>
<exclusions>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.tephra</groupId>
<artifactId>tephra-core</artifactId>
<type>test-jar</type>
<version>${tephra.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.tephra</groupId>
<artifactId>tephra-hbase-compat-1.1</artifactId>
<version>${tephra.version}</version>
</dependency>
<!-- Make sure we have all the antlr dependencies -->
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr-runtime</artifactId>
<version>${antlr.version}</version>
</dependency>
<dependency>
<groupId>jline</groupId>
<artifactId>jline</artifactId>
<version>2.11</version>
</dependency>
<dependency>
<groupId>sqlline</groupId>
<artifactId>sqlline</artifactId>
<version>${sqlline.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flume</groupId>
<artifactId>flume-ng-core</artifactId>
<version>${flume.version}</version>
<exclusions>
<exclusion>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
<version>${findbugs-annotations.version}</version>
</dependency>
<dependency>
<groupId>com.github.stephenc.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip-annotations.version}</version>
</dependency>
<dependency>
<groupId>org.iq80.snappy</groupId>
<artifactId>snappy</artifactId>
<version>${snappy.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>${jackson.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>${jackson.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
<version>${jackson.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
<version>${jackson.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>${mockito-all.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${protobuf-java.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons-logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
<version>${htrace.version}</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<version>${netty.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons-codec.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${collections.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-csv</artifactId>
<version>${commons-csv.version}</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>${jodatime.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<profiles>
<!-- this profile should be activated for release builds -->
<profile>
<id>release</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.6</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>spark16</id>
<properties>
<spark.version>1.6.1</spark.version>
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
</properties>
</profile>
</profiles>
</project>