blob: 4bd0c9dc2369c405488ed1b963b72f4e0ec0f690 [file]
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>23</version>
</parent>
<groupId>org.apache.datafusion</groupId>
<artifactId>comet-parent-spark${spark.version.short}_${scala.binary.version}</artifactId>
<version>0.14.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Comet Project Parent POM</name>
<modules>
<module>common</module>
<module>spark</module>
<module>spark-integration</module>
<module>fuzz-testing</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>11</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
<maven-compiler-plugin.version>3.11.0</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.6.0</maven-assembly-plugin.version>
<maven-shade-plugin.version>3.2.4</maven-shade-plugin.version>
<maven-surefire-plugin.version>3.5.4</maven-surefire-plugin.version>
<maven-source-plugin.version>3.3.0</maven-source-plugin.version>
<maven-enforcer-plugin.version>3.3.0</maven-enforcer-plugin.version>
<maven-failsafe-plugin.version>3.1.0</maven-failsafe-plugin.version>
<asm.version>9.1</asm.version>
<build-helper-maven-plugin.version>3.4.0</build-helper-maven-plugin.version>
<flatten-maven-plugin.version>1.3.0</flatten-maven-plugin.version>
<scalastyle-maven-plugin.version>1.0.0</scalastyle-maven-plugin.version>
<git-commit-id-maven-plugin.version>4.9.9</git-commit-id-maven-plugin.version>
<exec-maven-plugin.version>3.1.0</exec-maven-plugin.version>
<protoc-jar-maven-plugin.version>3.11.4</protoc-jar-maven-plugin.version>
<scalafix-maven-plugin.version>0.1.7_0.10.4</scalafix-maven-plugin.version>
<extra-enforcer-rules.version>1.7.0</extra-enforcer-rules.version>
<scalafmt.version>3.6.1</scalafmt.version>
<apache-rat-plugin.version>0.16.1</apache-rat-plugin.version>
<scala.version>2.12.18</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scala.plugin.version>4.9.6</scala.plugin.version>
<scalatest.version>3.2.16</scalatest.version>
<scalatest-maven-plugin.version>2.2.0</scalatest-maven-plugin.version>
<spark.version>3.5.8</spark.version>
<spark.version.short>3.5</spark.version.short>
<spark.maven.scope>provided</spark.maven.scope>
<protobuf.version>3.25.5</protobuf.version>
<parquet.version>1.13.1</parquet.version>
<parquet.maven.scope>provided</parquet.maven.scope>
<hadoop.version>3.3.4</hadoop.version>
<arrow.version>18.3.0</arrow.version>
<codehaus.jackson.version>1.9.13</codehaus.jackson.version>
<spotless.version>2.43.0</spotless.version>
<jacoco.version>0.8.11</jacoco.version>
<semanticdb.version>4.8.8</semanticdb.version>
<slf4j.version>2.0.7</slf4j.version>
<guava.version>33.2.1-jre</guava.version>
<testcontainers.version>1.21.0</testcontainers.version>
<amazon-awssdk-v2.version>2.31.51</amazon-awssdk-v2.version>
<jni.dir>${project.basedir}/../native/target/debug</jni.dir>
<platform>darwin</platform>
<arch>x86_64</arch>
<comet.shade.packageName>org.apache.comet.shaded</comet.shade.packageName>
<!-- Used by some tests inherited from Spark to get project root directory -->
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
<!-- Reverse default (skip installation), and then enable only for child modules -->
<maven.deploy.skip>true</maven.deploy.skip>
<!-- For testing with JDK 17 -->
<extraJavaTestArgs>
-XX:+IgnoreUnrecognizedVMOptions
--add-opens=java.base/java.lang=ALL-UNNAMED
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED
--add-opens=java.base/java.io=ALL-UNNAMED
--add-opens=java.base/java.net=ALL-UNNAMED
--add-opens=java.base/java.nio=ALL-UNNAMED
--add-opens=java.base/java.util=ALL-UNNAMED
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED
--add-opens=java.base/sun.security.action=ALL-UNNAMED
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED
-Djdk.reflect.useDirectMethodHandle=false
</extraJavaTestArgs>
<argLine>-ea -Xmx4g -Xss4m ${extraJavaTestArgs}</argLine>
<shims.majorVerSrc>spark-3.x</shims.majorVerSrc>
<shims.minorVerSrc>spark-3.5</shims.minorVerSrc>
</properties>
<dependencyManagement>
<dependencies>
<!-- Spark dependencies -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.maven.scope}</scope>
<exclusions>
<exclusion>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-format-structures</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<!-- We're using "org.slf4j:jcl-over-slf4j" -->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.maven.scope}</scope>
<exclusions>
<!-- We're using "org.slf4j:jcl-over-slf4j" -->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<!-- Comet uses arrow-memory-unsafe -->
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Arrow dependencies -->
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-vector</artifactId>
<version>${arrow.version}</version>
<!-- Exclude the following in favor of those from Spark -->
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-common</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-memory-unsafe</artifactId>
<version>${arrow.version}</version>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-c-data</artifactId>
<version>${arrow.version}</version>
</dependency>
<!-- Parquet dependencies -->
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
<version>${parquet.version}</version>
<scope>${parquet.maven.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-format-structures</artifactId>
<version>${parquet.version}</version>
<scope>${parquet.maven.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>${parquet.version}</version>
<scope>${parquet.maven.scope}</scope>
<exclusions>
<!-- Exclude the following in favor of jakarta.annotation:jakarta.annotation-api -->
<exclusion>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>${parquet.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>${parquet.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<!-- Exclude the following in favor of jakarta.annotation:jakarta.annotation-api -->
<exclusion>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Others -->
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-collection-compat_${scala.binary.version}</artifactId>
<version>2.12.0</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${protobuf.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
<scope>${spark.maven.scope}</scope>
</dependency>
<!-- Shaded deps marked as provided. These are promoted to compile scope
in the modules where we want the shaded classes to appear in the
associated jar. -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<!-- End of shaded deps -->
<!-- Test dependencies -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.datafusion</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- We are using arrow-memory-unsafe -->
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- We're using "org.slf4j:jcl-over-slf4j" -->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<!-- We are using arrow-memory-unsafe -->
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- We're using "org.slf4j:jcl-over-slf4j" -->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.datafusion</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- We're using "org.slf4j:jcl-over-slf4j" -->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.datafusion</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<version>3.27.7</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>${scalatest.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
<artifactId>junit-4-13_${scala.binary.version}</artifactId>
<version>3.2.16.0</version>
<scope>test</scope>
</dependency>
<!-- For benchmarks to access S3 -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hadoop-cloud_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<!-- We're using hadoop-client -->
<exclusion>
<groupId>org.apache.hadoop.thirdparty</groupId>
<artifactId>hadoop-shaded-guava</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<!-- We're using "org.slf4j:jcl-over-slf4j" -->
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- TestContainers for testing reading Parquet on S3 -->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>minio</artifactId>
<version>${testcontainers.version}</version>
<scope>test</scope>
</dependency>
<!--
AWS SDK modules for Iceberg REST catalog + S3 tests.
iceberg-spark-runtime treats the AWS SDK as provided scope, so tests
that exercise Iceberg's S3FileIO (via ResolvingFileIO) must supply these.
AwsProperties references all service client types in method signatures,
and Java serialization introspection resolves them at class-load time.
-->
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>s3</artifactId>
<version>${amazon-awssdk-v2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>sts</artifactId>
<version>${amazon-awssdk-v2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>dynamodb</artifactId>
<version>${amazon-awssdk-v2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>glue</artifactId>
<version>${amazon-awssdk-v2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>kms</artifactId>
<version>${amazon-awssdk-v2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>${codehaus.jackson.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.rogach</groupId>
<artifactId>scallop_${scala.binary.version}</artifactId>
<version>5.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-minicluster</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
<profiles>
<profile>
<id>release</id>
<properties>
<jni.dir>${project.basedir}/../native/target/release</jni.dir>
</properties>
</profile>
<profile>
<id>Win-x86</id>
<activation>
<os>
<family>Windows</family>
<arch>x86</arch>
</os>
</activation>
<properties>
<platform>win32</platform>
<arch>x86_64</arch>
</properties>
</profile>
<profile>
<id>Win-amd64</id>
<activation>
<os>
<family>Windows</family>
<arch>amd64</arch>
</os>
</activation>
<properties>
<platform>win32</platform>
<arch>amd64</arch>
</properties>
</profile>
<profile>
<id>Darwin-x86</id>
<activation>
<os>
<family>mac</family>
<arch>x86</arch>
</os>
</activation>
<properties>
<platform>darwin</platform>
<arch>x86_64</arch>
</properties>
</profile>
<profile>
<id>Darwin-aarch64</id>
<activation>
<os>
<family>mac</family>
<arch>aarch64</arch>
</os>
</activation>
<properties>
<platform>darwin</platform>
<arch>aarch64</arch>
</properties>
</profile>
<profile>
<id>Linux-amd64</id>
<activation>
<os>
<family>Linux</family>
<arch>amd64</arch>
</os>
</activation>
<properties>
<platform>linux</platform>
<arch>amd64</arch>
</properties>
</profile>
<profile>
<id>Linux-aarch64</id>
<activation>
<os>
<family>Linux</family>
<arch>aarch64</arch>
</os>
</activation>
<properties>
<platform>linux</platform>
<arch>aarch64</arch>
</properties>
</profile>
<profile>
<id>spark-3.4</id>
<properties>
<scala.version>2.12.17</scala.version>
<spark.version>3.4.3</spark.version>
<spark.version.short>3.4</spark.version.short>
<parquet.version>1.13.1</parquet.version>
<slf4j.version>2.0.6</slf4j.version>
<shims.minorVerSrc>spark-3.4</shims.minorVerSrc>
<java.version>11</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
</properties>
</profile>
<profile>
<id>spark-3.5</id>
<properties>
<scala.version>2.12.18</scala.version>
<spark.version>3.5.8</spark.version>
<spark.version.short>3.5</spark.version.short>
<parquet.version>1.13.1</parquet.version>
<slf4j.version>2.0.7</slf4j.version>
<shims.minorVerSrc>spark-3.5</shims.minorVerSrc>
<java.version>11</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
</properties>
</profile>
<profile>
<!-- FIXME: this is WIP. Tests may fail https://github.com/apache/datafusion-comet/issues/551 -->
<id>spark-4.0</id>
<properties>
<!-- Use Scala 2.13 by default -->
<scala.version>2.13.16</scala.version>
<scala.binary.version>2.13</scala.binary.version>
<spark.version>4.0.1</spark.version>
<spark.version.short>4.0</spark.version.short>
<parquet.version>1.15.2</parquet.version>
<semanticdb.version>4.13.6</semanticdb.version>
<slf4j.version>2.0.16</slf4j.version>
<shims.majorVerSrc>spark-4.0</shims.majorVerSrc>
<shims.minorVerSrc>not-needed-yet</shims.minorVerSrc>
<!-- Use jdk17 by default -->
<java.version>17</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
</properties>
</profile>
<profile>
<id>scala-2.12</id>
</profile>
<profile>
<id>scala-2.13</id>
<properties>
<scala.version>2.13.16</scala.version>
<scala.binary.version>2.13</scala.binary.version>
<semanticdb.version>4.13.6</semanticdb.version>
</properties>
</profile>
<profile>
<id>jdk11</id>
<activation>
<jdk>11</jdk>
</activation>
<properties>
<java.version>11</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
</properties>
</profile>
<profile>
<id>jdk17</id>
<activation>
<jdk>17</jdk>
</activation>
<properties>
<java.version>17</java.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
</properties>
</profile>
<profile>
<id>semanticdb</id>
<properties>
<scalastyle.skip>true</scalastyle.skip>
<spotless.check.skip>true</spotless.check.skip>
<enforcer.skip>true</enforcer.skip>
</properties>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>${scala.plugin.version}</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<args>
<arg>-Ywarn-unused</arg> <!-- if you need exactly RemoveUnused -->
</args>
<javacArgs>
<javacArg>-source</javacArg>
<javacArg>${java.version}</javacArg>
<javacArg>-target</javacArg>
<javacArg>${java.version}</javacArg>
<javacArg>-Xlint:all,-serial,-path,-try</javacArg>
</javacArgs>
<compilerPlugins>
<compilerPlugin>
<groupId>org.scalameta</groupId>
<artifactId>semanticdb-scalac_${scala.version}</artifactId>
<version>${semanticdb.version}</version>
</compilerPlugin>
</compilerPlugins>
</configuration>
</plugin>
<plugin>
<groupId>io.github.evis</groupId>
<artifactId>scalafix-maven-plugin_${scala.binary.version}</artifactId>
<version>${scalafix-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
</build>
</profile>
<profile>
<id>strict-warnings</id>
<build>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<configuration>
<args>
<arg>-deprecation</arg>
<arg>-unchecked</arg>
<arg>-feature</arg>
<arg>-Xlint:_</arg>
<arg>-Ywarn-dead-code</arg>
<arg>-Ywarn-numeric-widen</arg>
<arg>-Ywarn-value-discard</arg>
<arg>-Ywarn-unused:imports,patvars,privates,locals,params,-implicits</arg>
<arg>-Xfatal-warnings</arg>
</args>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>${scala.plugin.version}</version>
<executions>
<execution>
<id>eclipse-add-source</id>
<goals>
<goal>add-source</goal>
</goals>
</execution>
<execution>
<id>scala-compile-first</id>
<phase>process-resources</phase>
<goals>
<goal>compile</goal>
<goal>add-source</goal>
</goals>
</execution>
<execution>
<id>scala-test-compile-first</id>
<phase>process-test-resources</phase>
<goals>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
<checkMultipleScalaVersions>true</checkMultipleScalaVersions>
<failOnMultipleScalaVersions>true</failOnMultipleScalaVersions>
<recompileMode>incremental</recompileMode>
<args>
<arg>-unchecked</arg>
<arg>-deprecation</arg>
<arg>-feature</arg>
<arg>-explaintypes</arg>
<arg>-Xlint:adapted-args</arg>
</args>
<jvmArgs>
<jvmArg>-Xms1024m</jvmArg>
<jvmArg>-Xmx1024m</jvmArg>
</jvmArgs>
<javacArgs>
<javacArg>-source</javacArg>
<javacArg>${maven.compiler.source}</javacArg>
<javacArg>-target</javacArg>
<javacArg>${maven.compiler.target}</javacArg>
<javacArg>-Xlint:all,-serial,-path,-try</javacArg>
</javacArgs>
</configuration>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<version>${scalatest-maven-plugin.version}</version>
<configuration>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>SparkTestSuite.txt</filereports>
<stdout>D</stdout>
<stderr/>
<tagsToExclude>org.apache.comet.IntegrationTestSuite</tagsToExclude>
<systemProperties>
<!-- emit test logs to target/unit-tests.log -->
<log4j.configurationFile>file:src/test/resources/log4j2.properties</log4j.configurationFile>
<java.awt.headless>true</java.awt.headless>
<java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
</systemProperties>
</configuration>
<executions>
<execution>
<id>test</id>
<goals>
<goal>test</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${maven-shade-plugin.version}</version>
<dependencies>
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
<version>${asm.version}</version>
</dependency>
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
<version>${asm.version}</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<systemPropertyVariables>
<log4j.configurationFile>file:src/test/resources/log4j2.properties</log4j.configurationFile>
</systemPropertyVariables>
<failIfNoSpecifiedTests>false</failIfNoSpecifiedTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
<configuration>
<attach>true</attach>
</configuration>
<executions>
<execution>
<id>create-source-jar</id>
<goals>
<goal>jar-no-fork</goal>
<goal>test-jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<skipMain>true</skipMain>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
</plugin>
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<version>${spotless.version}</version>
<configuration>
<java>
<toggleOffOn />
<googleJavaFormat />
<removeUnusedImports />
<importOrder>
<order>java|javax,scala,org,org.apache,com,org.apache.comet,\#,\#org.apache.comet</order>
</importOrder>
<licenseHeader>
<file>${maven.multiModuleProjectDirectory}/dev/copyright/java-header.txt</file>
</licenseHeader>
</java>
<scala>
<includes>
<include>src/main/scala/**/*.scala</include>
<include>src/test/scala/**/*.scala</include>
<!-- Include spark shim sources -->
<include>src/main/spark-*/**/*.scala</include>
<include>src/test/spark-*/**/*.scala</include>
</includes>
<toggleOffOn />
<scalafmt>
<version>${scalafmt.version}</version>
<file>${maven.multiModuleProjectDirectory}/scalafmt.conf</file>
</scalafmt>
<licenseHeader>
<file>${maven.multiModuleProjectDirectory}/dev/copyright/scala-header.txt</file>
</licenseHeader>
</scala>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>flatten-maven-plugin</artifactId>
<version>${flatten-maven-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>${build-helper-maven-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
<version>${scalastyle-maven-plugin.version}</version>
<configuration>
<verbose>false</verbose>
<failOnViolation>true</failOnViolation>
<includeTestSourceDirectory>false</includeTestSourceDirectory>
<failOnWarning>false</failOnWarning>
<sourceDirectory>${basedir}/src/main/scala</sourceDirectory>
<testSourceDirectory>${basedir}/src/test/scala</testSourceDirectory>
<configLocation>${maven.multiModuleProjectDirectory}/dev/scalastyle-config.xml</configLocation>
<outputFile>${basedir}/target/scalastyle-output.xml</outputFile>
<inputEncoding>${project.build.sourceEncoding}</inputEncoding>
<outputEncoding>${project.reporting.outputEncoding}</outputEncoding>
</configuration>
<executions>
<execution>
<goals>
<goal>check</goal>
</goals>
<phase>compile</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>check</goal>
</goals>
<phase>compile</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
<configuration>
<trimStackTrace>false</trimStackTrace>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache-rat-plugin.version}</version>
<executions>
<execution>
<phase>verify</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
<configuration>
<consoleOutput>true</consoleOutput>
<excludes>
<exclude>**/*.iml</exclude>
<exclude>**/*.log</exclude>
<exclude>**/*.md.vm</exclude>
<exclude>**/.classpath</exclude>
<exclude>**/.project</exclude>
<exclude>**/.settings/**</exclude>
<exclude>**/build/**</exclude>
<exclude>**/target/**</exclude>
<exclude>**/apache-spark/**</exclude>
<exclude>**/apache-iceberg/**</exclude>
<exclude>.dockerignore</exclude>
<exclude>.git/**</exclude>
<exclude>.github/**</exclude>
<exclude>.gitignore</exclude>
<exclude>.gitmodules</exclude>
<exclude>**/.idea/**</exclude>
<exclude>**/dependency-reduced-pom.xml</exclude>
<exclude>**/testdata/**</exclude>
<exclude>**/.lldbinit</exclude>
<exclude>rust-toolchain</exclude>
<exclude>Makefile</exclude>
<exclude>dev/Dockerfile*</exclude>
<exclude>dev/diffs/**</exclude>
<exclude>dev/deploy-file</exclude>
<exclude>**/test/resources/**</exclude>
<exclude>**/benchmarks/*.txt</exclude>
<exclude>**/inspections/*.txt</exclude>
<exclude>tpcds-kit/**</exclude>
<exclude>tpcds-sf-1/**</exclude>
<exclude>tpch/**</exclude>
<exclude>docs/*.txt</exclude>
<exclude>docs/logos/*.png</exclude>
<exclude>docs/logos/*.svg</exclude>
<exclude>docs/source/_static/images/**</exclude>
<exclude>docs/source/contributor-guide/*.svg</exclude>
<exclude>dev/release/rat_exclude_files.txt</exclude>
<exclude>dev/release/requirements.txt</exclude>
<exclude>native/proto/src/generated/**</exclude>
<exclude>benchmarks/tpc/queries/**</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>${maven-enforcer-plugin.version}</version>
<executions>
<execution>
<id>no-duplicate-declared-dependencies</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<banCircularDependencies/>
<banDuplicatePomDependencyVersions/>
<banDuplicateClasses>
<scopes>
<scope>compile</scope>
<scope>provided</scope>
</scopes>
<ignoreClasses>
<ignoreClass>org.apache.spark.unused.UnusedStubClass</ignoreClass>
</ignoreClasses>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<ignoreClasses>
<!-- the followings conflict between spark-sql and spark-sql:test -->
<ignoreClass>javax.annotation.meta.TypeQualifier</ignoreClass>
<ignoreClass>javax.annotation.Nonnull</ignoreClass>
<ignoreClass>javax.annotation.meta.When</ignoreClass>
<ignoreClass>javax.annotation.Nonnull$Checker</ignoreClass>
<ignoreClass>javax.annotation.meta.TypeQualifierValidator</ignoreClass>
<!-- this class is not properly excluded from comet-spark right now -->
<ignoreClass>org.apache.parquet.filter2.predicate.SparkFilterApi</ignoreClass>
<!-- we explicitly include a duplicate to allow older versions of Spark -->
<!-- this can be removed once we no longer support spark 3.x -->
<ignoreClass>org.apache.spark.sql.ExtendedExplainGenerator</ignoreClass>
</ignoreClasses>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<ignoreClasses>
<!-- the followings conflict between spark-sql and findbugs -->
<ignoreClass>javax.annotation.meta.TypeQualifier</ignoreClass>
<ignoreClass>javax.annotation.Nonnull</ignoreClass>
<ignoreClass>javax.annotation.meta.When</ignoreClass>
<ignoreClass>javax.annotation.Nonnull$Checker</ignoreClass>
<ignoreClass>javax.annotation.meta.TypeQualifierValidator</ignoreClass>
<ignoreClass>javax.annotation.Nullable</ignoreClass>
<ignoreClass>javax.annotation.meta.TypeQualifierNickname</ignoreClass>
</ignoreClasses>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<ignoreClasses>
<ignoreClass>com.google.thirdparty.publicsuffix.TrieParser</ignoreClass>
<ignoreClass>com.google.thirdparty.publicsuffix.PublicSuffixPatterns</ignoreClass>
<ignoreClass>com.google.thirdparty.publicsuffix.PublicSuffixType</ignoreClass>
</ignoreClasses>
</dependency>
</dependencies>
<findAllDuplicates>true</findAllDuplicates>
<ignoreWhenIdentical>true</ignoreWhenIdentical>
</banDuplicateClasses>
</rules>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>${extra-enforcer-rules.version}</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>flatten-maven-plugin</artifactId>
<executions>
<!-- enable flattening -->
<execution>
<id>flatten</id>
<phase>process-resources</phase>
<goals>
<goal>flatten</goal>
</goals>
</execution>
<!-- ensure proper cleanup -->
<execution>
<id>flatten.clean</id>
<phase>clean</phase>
<goals>
<goal>clean</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<id>default-prepare-agent</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>