blob: 3b639193f690e8e7ed2da2825e94ad95e2b44a94 [file] [log] [blame]
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>17</version>
</parent>
<groupId>org.apache.carbondata</groupId>
<artifactId>carbondata-parent</artifactId>
<name>Apache CarbonData :: Parent</name>
<description>Apache CarbonData is an indexed columnar data format for fast analytics
on big data platform, e.g.Apache Hadoop, Apache Spark, etc.
</description>
<url>http://carbondata.apache.org</url>
<inceptionYear>2016</inceptionYear>
<packaging>pom</packaging>
<version>1.1.2-SNAPSHOT</version>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://git-wip-us.apache.org/repos/asf/carbondata.git</connection>
<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/carbondata.git</developerConnection>
<url>https://git-wip-us.apache.org/repos/asf/carbondata.git</url>
<tag>HEAD</tag>
</scm>
<issueManagement>
<system>jira</system>
<url>https://issues.apache.org/jira/browse/CARBONDATA</url>
</issueManagement>
<mailingLists>
<mailingList>
<name>CarbonData Dev</name>
<subscribe>dev-subscribe@carbondata.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@carbondata.apache.org</unsubscribe>
<post>dev@carbondata.apache.org</post>
</mailingList>
<mailingList>
<name>CarbonData Issues</name>
<subscribe>issues-subscribe@carbondata.apache.org</subscribe>
<unsubscribe>issues-unsubscribe@carbondata.apache.org</unsubscribe>
<post>issues@carbondata.apache.org</post>
</mailingList>
<mailingList>
<name>CarbonData User</name>
<subscribe>user-subscribe@carbondata.apache.org</subscribe>
<unsubscribe>user-unsubscribe@carbondata.apache.org</unsubscribe>
<post>user@carbondata.apache.org</post>
</mailingList>
<mailingList>
<name>CarbonData Commits</name>
<subscribe>commits-subscribe@carbondata.apache.org</subscribe>
<unsubscribe>commits-unsubscribe@carbondata.apache.org</unsubscribe>
<post>commits@carbondata.apache.org</post>
</mailingList>
</mailingLists>
<developers>
<developer>
<name>The Apache CarbonData Team</name>
<email>dev@carbondata.apache.org</email>
<url>http://carbondata.apache.org</url>
<organization>Apache Software Foundation</organization>
<organizationUrl>http://www.apache.org</organizationUrl>
</developer>
</developers>
<modules>
<module>common</module>
<module>core</module>
<module>processing</module>
<module>hadoop</module>
<module>integration/spark-common</module>
<module>integration/spark-common-test</module>
<module>assembly</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<snappy.version>1.1.2.6</snappy.version>
<hadoop.version>2.2.0</hadoop.version>
<hadoop.deps.scope>compile</hadoop.deps.scope>
<spark.deps.scope>compile</spark.deps.scope>
<scala.deps.scope>compile</scala.deps.scope>
<dev.path>${basedir}/dev</dev.path>
</properties>
<repositories>
<repository>
<id>central</id>
<!-- This should be at top, it makes maven try the central repo first and then others and hence faster dep resolution -->
<name>Maven Repository</name>
<url>http://repo1.maven.org/maven2</url>
<releases>
<enabled>true</enabled>
</releases>
</repository>
<repository>
<id>pentaho-releases</id>
<url>http://repository.pentaho.org/artifactory/repo/</url>
</repository>
</repositories>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<scope>${hadoop.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<scope>${hadoop.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
<scope>${hadoop.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-repl_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${scala.version}</version>
<scope>${scala.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
<scope>${scala.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
<scope>${scala.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-actors</artifactId>
<version>${scala.version}</version>
<scope>${scala.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scalap</artifactId>
<version>${scala.version}</version>
<scope>${scala.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.jmockit</groupId>
<artifactId>jmockit</artifactId>
<version>1.10</version>
<exclusions>
<exclusion>
<groupId>*</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<additionalparam>-Xdoclint:missing</additionalparam>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>2.17</version>
<configuration>
<configLocation>${dev.path}/javastyle-config.xml</configLocation>
<suppressionsLocation>${dev.path}/javastyle-suppressions.xml</suppressionsLocation>
<headerLocation>${dev.path}/java.header</headerLocation>
<consoleOutput>true</consoleOutput>
<failsOnError>true</failsOnError>
<linkXRef>false</linkXRef>
<failOnViolation>true</failOnViolation>
<includeTestSourceDirectory>false</includeTestSourceDirectory>
<sourceDirectory>${basedir}/src/main/java</sourceDirectory>
<testSourceDirectory>${basedir}/src/test/java</testSourceDirectory>
<outputFile>${basedir}/target/checkstyle-output.xml</outputFile>
</configuration>
<executions>
<execution>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
<version>0.8.0</version>
<executions>
<execution>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
<configuration>
<verbose>false</verbose>
<failOnViolation>true</failOnViolation>
<includeTestSourceDirectory>false</includeTestSourceDirectory>
<failOnWarning>false</failOnWarning>
<sourceDirectory>${basedir}/src/main/scala</sourceDirectory>
<testSourceDirectory>${basedir}/src/test/scala</testSourceDirectory>
<configLocation>${dev.path}/scalastyle-config.xml</configLocation>
<outputFile>${basedir}/target/scalastyle-output.xml</outputFile>
<outputEncoding>${project.build.sourceEncoding}</outputEncoding>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<!--This profile does not build spark module, so user should explicitly give spark profile also like spark-1.6 -->
<id>build-with-format</id>
<modules>
<module>format</module>
</modules>
</profile>
<profile>
<!-- This profile only should be used for release prepare to cover all the modules -->
<id>build-all</id>
<properties>
<spark.version>1.6.2</spark.version>
<scala.binary.version>2.10</scala.binary.version>
<scala.version>2.10.4</scala.version>
<maven.test.skip>true</maven.test.skip>
<flink.version>1.1.4</flink.version>
</properties>
<modules>
<module>format</module>
<module>integration/spark</module>
<module>examples/spark</module>
<module>integration/spark2</module>
<module>examples/spark2</module>
<module>examples/flink</module>
</modules>
</profile>
<profile>
<id>hadoop-2.2.0</id>
<!-- default -->
<properties>
<hadoop.version>2.2.0</hadoop.version>
</properties>
</profile>
<profile>
<id>hadoop-2.7.2</id>
<properties>
<hadoop.version>2.7.2</hadoop.version>
</properties>
</profile>
<profile>
<id>spark-1.5</id>
<properties>
<spark.version>1.5.2</spark.version>
<scala.binary.version>2.10</scala.binary.version>
<scala.version>2.10.4</scala.version>
</properties>
<modules>
<module>integration/spark</module>
<module>examples/spark</module>
</modules>
</profile>
<profile>
<id>spark-1.6</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<spark.version>1.6.2</spark.version>
<scala.binary.version>2.10</scala.binary.version>
<scala.version>2.10.4</scala.version>
</properties>
<modules>
<module>integration/spark</module>
<module>examples/spark</module>
</modules>
</profile>
<profile>
<id>spark-2.1</id>
<properties>
<spark.version>2.1.0</spark.version>
<scala.binary.version>2.11</scala.binary.version>
<scala.version>2.11.8</scala.version>
</properties>
<modules>
<module>integration/spark2</module>
<module>examples/spark2</module>
</modules>
</profile>
<profile>
<id>flink</id>
<properties>
<flink.version>1.1.4</flink.version>
<scala.binary.version>2.10</scala.binary.version>
</properties>
<modules>
<module>examples/flink</module>
</modules>
</profile>
<profile>
<id>findbugs</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<excludeFilterFile>${dev.path}/findbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
<findbugsXmlOutput>true</findbugsXmlOutput>
<xmlOutput>true</xmlOutput>
<effort>Max</effort>
</configuration>
<executions>
<execution>
<id>analyze-compile</id>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>include-all</id>
</profile>
<profile>
<id>rat</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>0.12</version>
<executions>
<execution>
<phase>verify</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
<configuration>
<reportFile>${project.build.directory}/${project.build.finalName}.rat</reportFile>
<excludeSubProjects>false</excludeSubProjects>
<consoleOutput>true</consoleOutput>
<useDefaultExcludes>true</useDefaultExcludes>
<excludes>
<exclude>**/target/**/*</exclude>
<exclude>.github/**/*</exclude>
<exclude>**/*.iml</exclude>
<exclude>**/src/test/**/*</exclude>
<exclude>examples/**/*.csv</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>