blob: 9bb1b125eef3c9381466b8f043ecd31ff0d56dc7 [file] [log] [blame]
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="Pig" default="jar"
xmlns:artifact="urn:maven-artifact-ant"
xmlns:ivy="antlib:org.apache.ivy.ant">
<!-- Load all the default properties, and any the user wants -->
<!-- to contribute (without having to type -D or edit this file -->
<taskdef resource="net/sf/antcontrib/antcontrib.properties">
<classpath>
<pathelement location="${basedir}/ivy/ant-contrib-1.0b3.jar"/>
</classpath>
</taskdef>
<property file="${user.home}/build.properties" />
<property file="${basedir}/build.properties" />
<!-- name and version properties -->
<property name="name" value="pig" />
<property name="Name" value="Pig" />
<property name="ant-task.version" value="2.0.10" />
<property name="pig.pom" value="${basedir}/ivy/pig.pom" />
<property name="pigsmoke.pom" value="${basedir}/ivy/pigsmoke.pom" />
<property name="pigunit.pom" value="${basedir}/ivy/pigunit.pom" />
<property name="piggybank.pom" value="${basedir}/ivy/piggybank.pom" />
<property name="pig.version" value="0.18.0" />
<property name="pig.version.suffix" value="-SNAPSHOT" />
<property name="version" value="${pig.version}${pig.version.suffix}" />
<property name="final.name" value="${name}-${version}" />
<property name="year" value="2007-2016" />
<!-- source properties -->
<property name="lib.dir" value="${basedir}/lib" />
<property name="spark.lib.dir" value="${basedir}/lib/spark" />
<property name="src.dir" value="${basedir}/src" />
<property name="python.src.dir" value="${src.dir}/python" />
<property name="src.lib.dir" value="${basedir}/lib-src" />
<property name="src.gen.dir" value="${basedir}/src-gen" />
<property name="docs.dir" value="${basedir}/src/docs" />
<property name="legacy.dir" value="${basedir}/legacy" />
<!-- build properties -->
<property name="build.dir" value="${basedir}/build" />
<property name="build.classes" value="${build.dir}/classes" />
<property name="build.docs" value="${build.dir}/docs" />
<property name="build.javadoc" value="${build.docs}/api" />
<property name="tar.dist.dir" value="${build.dir}/tar/${final.name}" />
<!-- property name="build.encoding" value="ISO-8859-1" / -->
<property name="build.encoding" value="UTF8" />
<!-- javac properties -->
<property name="javac.debug" value="on" />
<property name="javac.optimize" value="on" />
<property name="javac.deprecation" value="off" />
<property name="javac.version" value="1.8" />
<property name="javac.args" value="" />
<condition property="javac.args.warnings" value="-Xmaxwarns 1000000 -Xlint -Xlint:-deprecation" else="-Xmaxwarns 1000000">
<isset property="all.warnings" />
</condition>
<!-- artifact jar file names -->
<property name="artifact.pig.jar" value="${final.name}.jar"/>
<property name="artifact.pig-h2.jar" value="${final.name}-h2.jar"/>
<property name="artifact.pig-sources.jar" value="${final.name}-sources.jar"/>
<property name="artifact.pig-javadoc.jar" value="${final.name}-javadoc.jar"/>
<property name="artifact.pig.tar" value="${final.name}.tar.gz"/>
<!-- jar names. TODO we might want to use the svn reversion name in the name in case it is a dev version -->
<property name="output.jarfile.withouthadoop" value="${build.dir}/${final.name}-withouthadoop.jar" />
<property name="output.jarfile.withouthadoop-h2" value="${legacy.dir}/${final.name}-withouthadoop-h2.jar" />
<property name="output.jarfile.core" value="${build.dir}/${artifact.pig.jar}" />
<property name="output.jarfile.core-h2" value="${build.dir}/${artifact.pig-h2.jar}" />
<property name="output.jarfile.sources" value="${build.dir}/${artifact.pig-sources.jar}" />
<property name="output.jarfile.javadoc" value="${build.dir}/${artifact.pig-javadoc.jar}" />
<!-- Maintain old pig.jar in top level directory. -->
<property name="output.jarfile.backcompat-core-h2" value="${basedir}/${final.name}-core-h2.jar" />
<!-- test properties -->
<condition property="test.exec.type" value="${exectype}" else="mr">
<!-- By default, test.exec.type is mr -->
<isset property="exectype"/>
</condition>
<property name="test.src.dir" value="${basedir}/test" />
<property name="test.build.dir" value="${build.dir}/test" />
<property name="test.build.classes" value="${test.build.dir}/classes" />
<property name="test.log.dir" value="${test.build.dir}/logs" />
<property name="test.timeout" value="7200000" />
<property name="test.junit.output.format" value="plain" />
<property name="test.commit.file" value="${test.src.dir}/commit-tests"/>
<property name="test.unit.file" value="${test.src.dir}/unit-tests"/>
<property name="test.smoke.file" value="${test.src.dir}/smoke-tests"/>
<property name="test.all.file" value="${test.src.dir}/all-tests"/>
<property name="test.spark.file" value="${test.src.dir}/spark-tests"/>
<property name="test.spark_local.file" value="${test.src.dir}/spark-local-tests"/>
<property name="test.exclude.file" value="${test.src.dir}/excluded-tests"/>
<property name="test.exclude.file.mr" value="${test.src.dir}/excluded-tests-mr"/>
<property name="test.exclude.file.tez" value="${test.src.dir}/excluded-tests-tez"/>
<property name="test.exclude.file.spark" value="${test.src.dir}/excluded-tests-spark"/>
<property name="pigunit.jarfile" value="pigunit.jar" />
<property name="piggybank.jarfile" value="${basedir}/contrib/piggybank/java/piggybank.jar" />
<property name="smoke.tests.jarfile" value="${build.dir}/${final.name}-smoketests.jar" />
<property name="test.pigunit.src.dir" value="${test.src.dir}/org/apache/pig/test/pigunit" />
<property name="test.pigunit.file" value="${test.src.dir}/pigunit-tests"/>
<property name="pigtest.jarfile" value="pigtest.jar" />
<!-- test configuration, use ${user.home}/build.properties to configure values -->
<property name="ssh.gateway" value="" />
<property name="hod.server" value="" />
<property name="test.output" value="no"/>
<!-- e2e test properties -->
<property name="test.e2e.dir" value="${basedir}/test/e2e/pig"/>
<!-- pigmix properties -->
<property name="pigmix.dir" value="${basedir}/test/perf/pigmix"/>
<!-- parser properties -->
<property name="src.gen.query.parser.dir" value="${src.gen.dir}/org/apache/pig/impl/logicalLayer/parser" />
<property name="src.gen.script.parser.dir" value="${src.gen.dir}/org/apache/pig/tools/pigscript/parser" />
<property name="src.gen.param.parser.dir" value="${src.gen.dir}/org/apache/pig/tools/parameters" />
<property name="src.gen.dot.parser.dir" value="${test.src.dir}/org/apache/pig/test/utils/dotGraph/parser" />
<property name="src.gen.textdata.parser.dir" value="${src.gen.dir}/org/apache/pig/data/parser" />
<!-- Antlr properties -->
<property name="grammar.name" value="Query"/>
<property name="grammar.name.lower" value="query"/>
<property name="grammar.package" value="org.apache.pig.parser"/>
<property name="grammar.package.dir" value="org/apache/pig/parser"/>
<property name="grammar.src.dir" value="${src.dir}/${grammar.package.dir}"/>
<!-- rats properties -->
<property name="rat.reporting.classname" value="rat.Report"/>
<!-- env properties -->
<property environment="env"/>
<condition property="isWindows">
<os family="windows"/>
</condition>
<target name="setTezEnv">
<propertyreset name="test.timeout" value="900000" />
<propertyreset name="hadoopversion" value="2" />
<propertyreset name="isHadoop2" value="true" />
<propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
<propertyreset name="src.shims.test.dir" value="${basedir}/shims/test/hadoop${hadoopversion}" />
<propertyreset name="test.exec.type" value="tez" />
</target>
<target name="setSparkEnv">
<propertyreset name="test.exec.type" value="spark" />
</target>
<target name="setWindowsPath" if="${isWindows}">
<property name="build.path" value="${env.Path};${hadoop.root}\bin" />
</target>
<target name="setLinuxPath" unless="${isWindows}">
<property name="build.path" value="${env.PATH};" />
</target>
<!-- javadoc properties -->
<property name="javadoc.link.java" value="http://download.oracle.com/javase/1.5.0/docs/api/" />
<!-- test patch properties -->
<property name="scratch.dir" value="${user.home}/tmp"/>
<property name="svn.cmd" value="svn"/>
<property name="grep.cmd" value="grep"/>
<property name="patch.cmd" value="patch"/>
<property name="make.cmd" value="make"/>
<property name="test_patch_sh" value="${test.src.dir}/bin/test-patch.sh"/>
<property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
<property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
<property name="clover.pdf.report.dir" location="${build.dir}/test/clover/pdf/reports"/>
<property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
<available property="clover.present" file="${clover.jar}" />
<!-- check if clover reports should be generated -->
<condition property="clover.enabled">
<and>
<isset property="run.clover"/>
<isset property="clover.present"/>
</and>
</condition>
<condition property="staging">
<equals arg1="${repo}" arg2="staging"/>
</condition>
<!-- IVY properteis set here -->
<property name="ivy.repo.dir" value="${user.home}/ivyrepo" />
<property name="ivy.dir" location="ivy" />
<property name="loglevel" value="quiet" />
<loadproperties srcfile="${ivy.dir}/libraries.properties" />
<!--
Hadoop master version
(Value 23 is translated for backward compatibility in old build scripts)
-->
<if>
<equals arg1="${hadoopversion}" arg2="23"/>
<then>
<echo>Property setting hadoopversion=23 is deprecated. Overwriting to hadoopversion=2</echo>
<var name="hadoopversion" unset="true"/>
<property name="hadoopversion" value="2" />
</then>
</if>
<property name="hadoopversion" value="2" />
<condition property="isHadoop2">
<equals arg1="${hadoopversion}" arg2="2"/>
</condition>
<!--
HBase master version
(Value 95 is translated for backward compatibility in old build scripts)
-->
<if>
<equals arg1="${hbaseversion}" arg2="95"/>
<then>
<echo>Property setting hbaseversion=95 is deprecated. Overwriting to hbaseversion=1</echo>
<var name="hbaseversion" unset="true"/>
<property name="hbaseversion" value="1" />
</then>
</if>
<property name="hbaseversion" value="1" />
<property name="sparkversion" value="1" />
<condition property="src.exclude.dir" value="**/Spark2*.java" else="**/Spark1*.java">
<equals arg1="${sparkversion}" arg2="1"/>
</condition>
<property name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
<property name="src.shims.test.dir" value="${basedir}/shims/test/hadoop${hadoopversion}" />
<property name="asfrepo" value="https://repository.apache.org"/>
<property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
<property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
<property name="asfstagingrepo" value="${asfrepo}/service/local/staging/deploy/maven2"/>
<property name="staging_repo_id" value="apache.staging.https"/>
<property name="snapshots_repo_id" value="apache.snapshots.https"/>
<property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
<property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
<property name="ant_task_repo_url" value="${mvnrepo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
<property name="ivy_repo_url" value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
<property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
<property name="ivy.org" value="org.apache.pig"/>
<property name="build.dir" location="build" />
<property name="build.ivy.dir" location="${build.dir}/ivy" />
<property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
<property name="ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}"/>
<property name="ivy.lib.dir.spark" location="${ivy.lib.dir}/spark" />
<property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
<property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven" />
<property name="pom.xml" location="${build.ivy.maven.dir}/pom.xml"/>
<property name="build.ivy.maven.pom" location="${build.ivy.maven.dir}/pig-${version}.pom" />
<property name="build.ivy.maven.jar" location="${build.ivy.maven.dir}/pig-${version}-core.jar" />
<property name="javacc.home" location="${ivy.lib.dir}" />
<property name="jackson_core.jar" location="${test.src.dir}/resources/jackson-core-asl-${jackson-pig-3039-test.version}.jar"/>
<property name="jackson_mapper.jar" location="${test.src.dir}/resources/jackson-mapper-asl-${jackson-pig-3039-test.version}.jar"/>
<property name="jackson_core_repo_url"
value="${mvnrepo}/org/codehaus/jackson/jackson-core-asl/${jackson-pig-3039-test.version}/jackson-core-asl-${jackson-pig-3039-test.version}.jar"/>
<property name="jackson_mapper_repo_url"
value="${mvnrepo}/org/codehaus/jackson/jackson-mapper-asl/${jackson-pig-3039-test.version}/jackson-mapper-asl-${jackson-pig-3039-test.version}.jar"/>
<property name="test.spark.spark_master" value="yarn-client" />
<!--this is the naming policy for artifacts we want pulled down-->
<property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[artifact]-[revision](-[classifier]).[ext]"/>
<!--this is how artifacts that get built are named-->
<property name="ivy.publish.pattern" value="[artifact]-[revision].[ext]"/>
<!-- jdiff properties -->
<property name="jdiff.jar" value="${ivy.lib.dir}/jdiff-${jdiff.version}.jar"/>
<property name="xerces.jar" value="${ivy.lib.dir}/xercesImpl-${xerces.version}.jar"/>
<property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
<property name="jdiff.xml.dir" value="${docs.dir}/jdiff"/>
<property name="jdiff.stable" value="0.17.0"/>
<property name="jdiff.stable.javadoc" value="http://hadoop.apache.org/${name}/docs/r${jdiff.stable}/api/"/>
<!-- Packaging properties -->
<property name="package.release" value="1"/>
<property name="package.prefix" value="/usr"/>
<property name="package.conf.dir" value="/etc/pig"/>
<property name="package.log.dir" value="/var/log/pig"/>
<property name="package.buildroot" value="/tmp/pig_package_build_${user.name}"/>
<property name="package.build.dir" value="/tmp/pig_package_build_${user.name}/BUILD"/>
<!-- Eclipse properties -->
<property name="build.dir.eclipse" value="${build.dir}"/>
<property name="build.dir.eclipse-main-classes" value="${build.classes}"/>
<property name="build.dir.eclipse-test-classes" value="${test.build.classes}"/>
<!--property name="build.dir.eclipse-test-generated-classes" value="${build.dir.eclipse}/classes-test-generated"/-->
<condition property="ant-eclipse.jar.exists">
<available file="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar"/>
</condition>
<target name="ant-eclipse-download" unless="ant-eclipse.jar.exists"
description="Downloads the ant-eclipse binary.">
<get src="http://downloads.sourceforge.net/project/ant-eclipse/ant-eclipse/1.0/ant-eclipse-1.0.bin.tar.bz2"
dest="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" usetimestamp="false" />
<untar src="${build.dir}/ant-eclipse-1.0.bin.tar.bz2"
dest="${build.dir}" compression="bzip2">
<patternset>
<include name="lib/ant-eclipse-1.0-jvm1.2.jar"/>
</patternset>
</untar>
<delete file="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" />
</target>
<target name="eclipse-files"
depends="compile-test,ant-eclipse-download"
description="Create eclipse project files">
<pathconvert property="eclipse.project">
<path path="${basedir}"/>
<regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
</pathconvert>
<path id="eclipse.classpath">
<fileset dir="${ivy.lib.dir}">
<include name="**.*jar"/>
</fileset>
<fileset dir="${ivy.lib.dir.spark}">
<include name="**.*jar"/>
</fileset>
</path>
<taskdef name="eclipse"
classname="prantl.ant.eclipse.EclipseTask"
classpath="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar" />
<eclipse updatealways="true" mode="java">
<project name="${eclipse.project}" />
<classpath>
<source path="${src.dir}"/>
<source path="${src.gen.dir}"/>
<source path="${src.lib.dir}/bzip2"/>
<source path="${test.e2e.dir}/udfs/java"/>
<source path="${src.shims.dir}"/>
<source path="${src.shims.test.dir}"/>
<source path="tutorial/src"/>
<source path="${test.src.dir}" excluding="e2e/pig/udfs/java/|resources/|perf/"/>
<output path="${build.dir.eclipse-main-classes}" />
<library pathref="eclipse.classpath" exported="true" />
<!--library pathref="classpath" exported="false"/-->
</classpath>
</eclipse>
</target>
<!-- ====================================================== -->
<!-- Stuff needed by all targets -->
<!-- ====================================================== -->
<!-- setup the classpath -->
<path id="classpath">
<fileset file="${ivy.lib.dir}/${zookeeper.jarfile}"/>
<fileset dir="${ivy.lib.dir}" includes="*.jar"/>
<fileset dir="${ivy.lib.dir.spark}" includes="*.jar"/>
</path>
<!-- javadoc-classpath -->
<path id="javadoc-classpath">
<path refid="javadoc.classpath"/>
</path>
<path id="test.classpath">
<!-- need to put this first, otherwise junit-3 testcases can break -->
<pathelement location="${ivy.lib.dir}/junit-3.8.1.jar"/>
<pathelement location="${build.classes}"/>
<pathelement location="${test.src.dir}"/>
<pathelement location="${piggybank.jarfile}"/>
<path refid="classpath"/>
</path>
<fileset dir="${ivy.lib.dir}" id="core.dependencies.jar">
<exclude name="**.*jar"/>
<exclude name="spark/**.*jar"/>
</fileset>
<fileset dir="${ivy.lib.dir}" id="runtime.dependencies-withouthadoop.jar">
<patternset id="pattern.runtime.dependencies-withouthadoop.jar">
<include name="antlr-runtime-${antlr.version}.jar"/>
<include name="ST4-${stringtemplate.version}.jar"/>
<include name="jline-${jline.version}.jar"/>
<include name="joda-time-${joda-time.version}.jar"/>
<include name="automaton-${automaton.version}.jar"/>
<include name="jansi-${jansi.version}.jar"/>
<include name="RoaringBitmap-shaded-${roaring-bitmap-shaded.version}.jar"/>
</patternset>
</fileset>
<target name="init" depends="ivy-compile" >
<mkdir dir="${src.gen.query.parser.dir}" />
<mkdir dir="${src.gen.script.parser.dir}" />
<mkdir dir="${src.gen.param.parser.dir}" />
<mkdir dir="${build.classes}" />
<mkdir dir="${test.build.classes}" />
<mkdir dir="${src.gen.dot.parser.dir}" />
<mkdir dir="${src.gen.textdata.parser.dir}" />
<tstamp>
<format property="timestamp" pattern="MMM dd yyyy, HH:mm:ss" />
</tstamp>
<svnversion outputproperty="svn.revision"/>
<!-- properties are immutable in Ant, so this gets set only if we get nothing out of svnversion -->
<property name="svn.revision" value=": unknown"/>
</target>
<macrodef name="svnversion">
<!-- the path needs to be small content otherwise it will take AGES ! -->
<attribute name="wcpath" default="${basedir}" />
<attribute name="outputproperty" />
<sequential>
<exec executable="svnversion" outputproperty="@{outputproperty}" failonerror="false" failifexecutionfails="false" >
<arg value="@{wcpath}" />
<redirector>
<outputfilterchain>
<tokenfilter>
<!-- version can be xxxx, xxxx:yyyy, xxxxM, xxxxS or xxxx:yyyyMS , ... just get the working copy one -->
<replaceregex pattern="((\d+).*)" replace="\2" />
</tokenfilter>
</outputfilterchain>
</redirector>
</exec>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== -->
<target name="clean" description="Cleanup build artifacts">
<delete dir="${src.gen.dir}" />
<delete dir="${docs.dir}/build" />
<delete file="${jdiff.xml.dir}\${name}_${version}.xml" />
<delete dir="${build.dir}" />
<delete dir="${src.gen.dot.parser.dir}" />
<delete>
<fileset dir="${basedir}" includes="pig*.jar" />
</delete>
<delete dir="${lib.dir}" />
<delete dir="${legacy.dir}" />
<ant dir="${test.e2e.dir}" target="clean"/>
</target>
<target name="clean-deps" description="Cleanup dependencies">
<delete dir="${lib.dir}" />
<delete dir="${ivy.lib.dir}" />
</target>
<target name="very-clean" unless="offline" depends="ivy-clean-cache,jackson-pig-3039-test-clean,clean"
description="Clean build artifacts and flush Ivy cache" />
<target name="clean-piggybank" description="Cleanup piggybank">
<ant target="clean" dir="contrib/piggybank/java" inheritAll="false"/>
</target>
<target name="clean-tutorial" description="Cleanup Tutorial">
<ant target="clean" dir="tutorial" inheritAll="false"/>
</target>
<target name="clean-test-e2e" description="Cleanup e2e tests">
<ant target="clean" dir="test/e2e/harness" inheritAll="false"/>
<ant target="clean" dir="test/e2e/pig" inheritAll="false"/>
<ant target="clean" dir="test/e2e/pig/udfs/java" inheritAll="false"/>
</target>
<!--target name="eclipse-files" depends="compile, ivy-buildJar"
description="Generate files for Eclipse">
<pathconvert property="eclipse.project">
<path path="${basedir}"/>
<regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
</pathconvert>
<copy todir="." overwrite="true">
<fileset dir=".eclipse.templates">
<exclude name="**/README.txt"/>
</fileset>
<filterset>
<filter token="PROJECT" value="${eclipse.project}"/>
</filterset>
</copy>
</target-->
<!-- ================================================================== -->
<!-- Java Compiler Compiler, generate Parsers -->
<!-- ================================================================== -->
<target name="cc-compile" depends="init, ivy-compile" description="Create and Compile Parser">
<move file="${ivy.lib.dir}/javacc-${javacc.version}.jar" tofile="${javacc.home}/javacc.jar"/>
<javacc target="${src.dir}/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj" outputdirectory="${src.gen.script.parser.dir}" javacchome="${javacc.home}" />
<javacc target="${src.dir}/org/apache/pig/tools/parameters/PigFileParser.jj" outputdirectory="${src.gen.param.parser.dir}" javacchome="${javacc.home}" />
<javacc target="${src.dir}/org/apache/pig/tools/parameters/ParamLoader.jj" outputdirectory="${src.gen.param.parser.dir}" javacchome="${javacc.home}" />
<jjtree target="${test.src.dir}/org/apache/pig/test/utils/dotGraph/DOTParser.jjt" outputdirectory="${src.gen.dot.parser.dir}" javacchome="${javacc.home}" />
<javacc target="${src.gen.dot.parser.dir}/DOTParser.jj" outputdirectory="${src.gen.dot.parser.dir}" javacchome="${javacc.home}" />
<move file="${javacc.home}/javacc.jar" tofile="${ivy.lib.dir}/javacc-${javacc.version}.jar"/>
</target>
<target name="gen" depends="genTreeParser"
description="generates lexer and parser code from an ANTLR grammar">
<!-- Move generated Java code to the correct package directory. -->
<move todir="${src.gen.dir}${grammar.package.dir}">
<fileset dir="${src.gen.dir}" includes="*.java"/>
</move>
<!-- Copy generated .token files to current directory. -->
<copy todir=".">
<fileset dir="${src.gen.dir}" includes="*.tokens"/>
</copy>
</target>
<target name="genLexer" depends="prepare, init, ivy-compile"
unless="lexerGrammarProcessed"
description="generates lexer class from an ANTLR grammar">
<java classname="org.antlr.Tool"
classpathref="classpath" fork="true" failonerror="true">
<arg line="-o ${src.gen.dir}/${grammar.package.dir} ${src.dir}/${grammar.package.dir}/${grammar.name}Lexer.g"/>
</java>
</target>
<target name="genParser" depends="genLexer"
unless="parserGrammarProcessed"
description="generates token parser class from an ANTLR grammar">
<java classname="org.antlr.Tool"
classpathref="classpath" fork="true" failonerror="true">
<arg line="-o ${src.gen.dir}/${grammar.package.dir} ${src.dir}/${grammar.package.dir}/${grammar.name}Parser.g"/>
</java>
</target>
<target name="genTreeParser" depends="genParser"
unless="treeGrammarProcessed"
description="generates tree parser class from an ANTLR grammar">
<java classname="org.antlr.Tool"
classpathref="classpath" fork="true" failonerror="true">
<arg line="-o ${src.gen.dir}/${grammar.package.dir} ${src.dir}/${grammar.package.dir}/AstPrinter.g ${src.dir}/${grammar.package.dir}/AliasMasker.g ${src.dir}/${grammar.package.dir}/AstValidator.g ${src.dir}/${grammar.package.dir}/LogicalPlanGenerator.g"/>
</java>
</target>
<target name="prepare">
<uptodate property="lexerGrammarProcessed" srcfile="${grammar.src.dir}/${grammar.name}Lexer.g">
<mapper type="merge" to="${src.gen.dir}/${grammar.package.dir}/${grammar.name}Lexer.java"/>
</uptodate><!--
<uptodate property="parserGrammarProcessed" srcfile="${grammar.src.dir}/${grammar.name}Parser.g">
<mapper type="merge" to="${src.gen.dir}/${grammar.package.dir}/${grammar.name}Parser.java"/>
</uptodate>
<uptodate property="treeGrammarProcessed" srcfile="${grammar.src.dir}/${grammar.name}Tree.g">
<mapper type="merge" to="${src.gen.dir}/${gramar.package.dir}/${grammar.name}Tree.java"/>
</uptodate>-->
<!--mkdir dir="build/classes"/-->
<mkdir dir="${src.gen.dir}/${grammar.package.dir}"/>
</target>
<!-- ================================================================== -->
<!-- Build sources -->
<!-- ================================================================== -->
<target name="compile" depends="cc-compile, gen" description="Compile all artifacts">
<echo>*** Building Main Sources ***</echo>
<echo>*** To compile with all warnings enabled, supply -Dall.warnings=1 on command line ***</echo>
<echo>*** Else, you will only be warned about deprecations ***</echo>
<echo>*** Hadoop version used: ${hadoopversion} ; HBase version used: ${hbaseversion} ; Spark version used: ${sparkversion} ***</echo>
<compileSources sources="${src.dir};${src.gen.dir};${src.lib.dir}/bzip2;${src.shims.dir}"
excludes="${src.exclude.dir}" dist="${build.classes}" cp="classpath" warnings="${javac.args.warnings}" />
<copy todir="${build.classes}/META-INF">
<fileset dir="${src.dir}/META-INF" includes="**"/>
</copy>
</target>
<target name="compile-test" depends="jar-simple, ivy-test">
<echo>*** Building Test Sources ***</echo>
<echo>*** To compile with all warnings enabled, supply -Dall.warnings=1 on command line ***</echo>
<echo>*** Else, you will only be warned about deprecations ***</echo>
<compileSources sources="${test.src.dir};${src.shims.test.dir}"
excludes="**/PigTestLoader.java **/resources/** perf/** ${src.exclude.dir}"
dist="${test.build.classes}" cp="test.classpath" warnings="${javac.args.warnings}" />
<copy file="${basedir}/test/hbase-site.xml" tofile="${test.build.classes}/hbase-site.xml"/>
<ivy:cachepath pathid="mr-apps-test-ivy.classpath" conf="test" />
<path id="mr-apps-test.classpath">
<fileset dir="${clover.home}" erroronmissingdir="false">
<include name="lib/clover.jar"/>
</fileset>
<path refid="mr-apps-test-ivy.classpath"/>
</path>
<property name="mr-apps-classpath" refid="mr-apps-test.classpath" />
<!-- Remove jython jar from mrapp-generated-classpath -->
<script language="javascript">
project.setProperty('mr-apps-classpath', project.getProperty('mr-apps-classpath').
replace(":" + project.getProperty('ivy.default.ivy.user.dir') + "/cache/org.python/jython-standalone/jars/jython-standalone-" + project.getProperty('jython.version') + ".jar", ""));
</script>
<echo file="${test.build.classes}/mrapp-generated-classpath" message="${mr-apps-classpath}" />
</target>
<macrodef name="compileSources">
<attribute name="sources"/>
<attribute name="excludes"/>
<attribute name="dist"/>
<attribute name="cp"/>
<attribute name="warnings"/>
<sequential>
<javac encoding="${build.encoding}" srcdir="@{sources}" excludes="@{excludes}"
includes="**/*.java" destdir="@{dist}" debug="${javac.debug}"
optimize="${javac.optimize}" target="${javac.version}"
source="${javac.version}" deprecation="${javac.deprecation}"
includeantruntime="false">
<compilerarg line="${javac.args} @{warnings}"/>
<classpath refid="@{cp}" />
</javac>
<copy file="${src.dir}/org/apache/pig/tools/grunt/autocomplete" todir="${build.classes}/org/apache/pig/tools/grunt"/>
<copy file="${src.dir}/org/apache/pig/tools/grunt/autocomplete_aliases" todir="${build.classes}/org/apache/pig/tools/grunt"/>
<copy todir="${build.classes}/python">
<fileset dir="${python.src.dir}"/>
</copy>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Documentation -->
<!-- ================================================================== -->
<target name="javadoc" depends="jar, ivy-javadoc" description="Create documentation">
<mkdir dir="${build.javadoc}" />
<javadoc overview="${src.dir}/overview.html" packagenames="org.apache.pig.*" destdir="${build.javadoc}" author="true" version="true" use="true" windowtitle="${Name} ${version} API" doctitle="${Name} ${version} API" bottom="Copyright &amp;copy; ${year} The Apache Software Foundation">
<packageset dir="${src.dir}" />
<link href="${javadoc.link.java}" />
<classpath>
<path refid="javadoc-classpath" />
<pathelement path="${output.jarfile.core}" />
</classpath>
<group title="pig" packages="org.apache.*" />
</javadoc>
</target>
<target name="javadoc-all" depends="jar, piggybank, ivy-javadoc" description="Create documentation including all contrib projects">
<mkdir dir="${build.javadoc}" />
<javadoc overview="${src.dir}/overview.html" packagenames="org.apache.pig*" destdir="${build.javadoc}" author="true" version="true" use="true" windowtitle="${Name} ${version} API" doctitle="${Name} ${version} API" bottom="Copyright &amp;copy; ${year} The Apache Software Foundation">
<packageset dir="${src.dir}" />
<packageset dir="contrib/piggybank/java/src/main/java"/>
<link href="${javadoc.link.java}" />
<classpath>
<path refid="javadoc-classpath" />
<pathelement path="${output.jarfile.core}" />
<pathelement path="${piggybank.jarfile}"/>
</classpath>
<group title="pig" packages="org.apache.pig*" />
<group title="contrib: Piggybank" packages="org.apache.pig.piggybank*" />
</javadoc>
</target>
<!-- ================================================================== -->
<!-- @deprecated, Documentation -->
<!-- ================================================================== -->
<target name="docs" depends="forrest.check, javadoc-all" description="Generate forrest-based documentation.
To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." if="forrest.home">
<exec dir="${docs.dir}" executable="${forrest.home}/bin/forrest" failonerror="true">
</exec>
<copy todir="${build.docs}">
<fileset dir="${docs.dir}/build/site/" />
</copy>
</target>
<target name="forrest.check" unless="forrest.home">
<fail message="'forrest.home' is not defined.
Please pass -Dforrest.home=&lt;base of Apache Forrest installation&gt; to Ant on the command-line." />
</target>
<target name="source-jar" depends="cc-compile">
<jar duplicate="preserve" jarfile="${output.jarfile.sources}" basedir="${src.dir}/" excludes="docs/**, overview.html">
<manifest>
<section name="org/apache/pig">
<attribute name="Implementation-Vendor" value="Apache" />
<attribute name="Implementation-Title" value="Pig" />
<attribute name="Implementation-Version" value="${version}" />
</section>
</manifest>
<fileset dir="${src.lib.dir}/bzip2"/>
<fileset dir="${python.src.dir}"/>
</jar>
</target>
<target name="javadoc-jar" depends="cc-compile, javadoc">
<jar duplicate="preserve" jarfile="${output.jarfile.javadoc}" basedir="${build.javadoc}/">
<manifest>
<section name="org/apache/pig">
<attribute name="Implementation-Vendor" value="Apache" />
<attribute name="Implementation-Title" value="Pig" />
<attribute name="Implementation-Version" value="${version}" />
</section>
</manifest>
</jar>
</target>
<!-- ================================================================== -->
<!-- Make pig.jar -->
<!-- ================================================================== -->
<target name="jar-simple" depends="compile,ivy-buildJar" description="Create pig core jar">
<buildJar svnString="${svn.revision}" outputFile="${output.jarfile.core}" includedJars="core.dependencies.jar"/>
<buildJar svnString="${svn.revision}" outputFile="${output.jarfile.withouthadoop}" includedJars="runtime.dependencies-withouthadoop.jar"/>
<antcall target="copyCommonDependencies"/>
<antcall target="copySparkDependencies"/>
<antcall target="copyh2Dependencies"/>
<antcall target="copyHadoop2LocalRuntimeDependencies" />
</target>
<target name="copyCommonDependencies">
<mkdir dir="${lib.dir}" />
<copy todir="${lib.dir}">
<fileset dir="${ivy.lib.dir}" includes="antlr-runtime-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="ST4-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="jline-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="jackson-mapper-asl-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="jackson-core-asl-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="joda-time-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="guava-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="automaton-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="jansi-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="avro-*.jar" excludes="avro-*tests.jar,avro-mapred-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="trevni-core-*.jar" excludes="trevni-core-*tests.jar"/>
<fileset dir="${ivy.lib.dir}" includes="trevni-avro-*.jar" excludes="trevni-avro-*tests.jar"/>
<fileset dir="${ivy.lib.dir}" includes="snappy-java-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="asm*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="jython-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="jruby-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="groovy-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="js-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="htrace-core*incubating.jar"/>
<fileset dir="${ivy.lib.dir}" includes="metrics-core-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="hbase-*.jar" excludes="hbase-*tests.jar,hbase-*hadoop2*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="hive-*.jar" excludes="hive-shims-0.*.jar, hive-contrib*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="protobuf-java-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="zookeeper-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="accumulo-*.jar" excludes="accumulo-minicluster*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="json-simple-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="kryo-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="httpdlog-*-${basjes-httpdlog-pigloader.version}.jar"/>
<fileset dir="${ivy.lib.dir}" includes="parser-core-${basjes-httpdlog-pigloader.version}.jar"/>
<fileset dir="${ivy.lib.dir}" includes="ivy-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-logging-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="RoaringBitmap-shaded-${roaring-bitmap-shaded.version}.jar"/>
</copy>
</target>
<target name="copySparkDependencies">
<mkdir dir="${spark.lib.dir}" />
<copy todir="${spark.lib.dir}">
<fileset dir="${ivy.lib.dir.spark}" includes="*.jar"/>
</copy>
</target>
<target name="copyh2Dependencies" if="isHadoop2">
<mkdir dir="${lib.dir}/h2" />
<copy todir="${lib.dir}/h2">
<fileset dir="${ivy.lib.dir}" includes="avro-mapred-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="hive-shims-0.*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="hbase-hadoop2*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="tez-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-collections4-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-lang3-*.jar"/>
</copy>
<copy file="${output.jarfile.core}" tofile="${output.jarfile.backcompat-core-h2}"/>
<mkdir dir="${legacy.dir}" />
<move file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.withouthadoop-h2}"/>
</target>
<target name="copyHadoop2LocalRuntimeDependencies">
<mkdir dir="${lib.dir}/hadoop2-runtime" />
<copy todir="${lib.dir}/hadoop2-runtime">
<fileset dir="${ivy.lib.dir}" includes="hadoop-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-cli-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-configuration-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-collections-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-lang3-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-codec-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-io-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="commons-logging-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="httpcore-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="log4j-*.jar"/>
<fileset dir="${ivy.lib.dir}" includes="slf4j-*.jar"/>
</copy>
</target>
<scriptdef name="propertyreset" language="javascript"
description="Allows to assign @{property} new value">
<attribute name="name"/>
<attribute name="value"/>
project.setProperty(attributes.get("name"), attributes.get("value"));
</scriptdef>
<macrodef name="propertycopy">
<attribute name="name"/>
<attribute name="from"/>
<sequential>
<propertyreset name="@{name}" value="${@{from}}"/>
</sequential>
</macrodef>
<target name="jar-core" depends="compile,ivy-buildJar" description="Create only pig core jar">
<buildJar svnString="${svn.revision}" outputFile="${output.jarfile.core}" includedJars="core.dependencies.jar"/>
</target>
<target name="jar" description="Create pig jar with Spark 1 and 2">
<echo>Compiling against Spark 2</echo>
<antcall target="clean-deps" inheritRefs="true" inheritall="true"/>
<propertyreset name="sparkversion" value="2"/>
<propertyreset name="src.exclude.dir" value="**/Spark1*.java" />
<antcall target="jar-core" inheritRefs="true" inheritall="true"/>
<move file="${output.jarfile.core}" tofile="${basedir}/_pig-shims.jar"/>
<echo>Compiling against Spark 1</echo>
<antcall target="clean-deps" inheritRefs="true" inheritall="true"/>
<propertyreset name="sparkversion" value="1"/>
<propertyreset name="src.exclude.dir" value="**/Spark2*.java" />
<antcall target="jar-simple" inheritRefs="true" inheritall="true"/>
<jar update="yes" jarfile="${output.jarfile.core}">
<zipfileset src="${basedir}/_pig-shims.jar" includes="**/Spark2*.class"/>
</jar>
<jar update="yes" jarfile="${output.jarfile.backcompat-core-h2}">
<zipfileset src="${basedir}/_pig-shims.jar" includes="**/Spark2*.class"/>
</jar>
<jar update="yes" jarfile="${output.jarfile.withouthadoop-h2}">
<zipfileset src="${basedir}/_pig-shims.jar" includes="**/Spark2*.class"/>
</jar>
<delete file="${basedir}/_pig-shims.jar"/>
</target>
<!-- ================================================================== -->
<!-- macrodef: buildJar -->
<!-- ================================================================== -->
<macrodef name="buildJar">
<attribute name="svnString"/>
<attribute name="outputFile"/>
<attribute name="includedJars"/>
<sequential>
<echo>svnString @{svnString}</echo>
<jar jarfile="@{outputFile}" basedir="${build.classes}" duplicate="preserve">
<manifest>
<attribute name="Main-Class" value="org.apache.pig.Main" />
<section name="org/apache/pig">
<attribute name="Implementation-Vendor" value="Apache" />
<attribute name="Implementation-Title" value="Pig" />
<attribute name="Implementation-Version" value="${version}" />
<attribute name="Build-TimeStamp" value="${timestamp}" />
<attribute name="Svn-Revision" value="@{svnString}" />
</section>
</manifest>
<zipgroupfileset refid="@{includedJars}" />
<fileset file="${basedir}/src/pig-default.properties" />
<fileset file="${basedir}/src/main/jruby/pigudf.rb" />
<fileset file="${basedir}/conf/ivysettings.xml" />
<exclude name="hadoop-site.xml" />
</jar>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Smoke tests -->
<!-- ================================================================== -->
<target name="smoketests-jar" depends="pigunit-jar"
description="Creating jar file for smoke tests">
<echo> *** Creating smoke tests jar for pigunit ***</echo>
<jar jarfile="${smoke.tests.jarfile}" basedir="${test.build.classes}"
includes="org/apache/pig/test/pigunit/**/*">
<fileset dir="${basedir}" includes="test/data/pigunit/**/*"/>
<manifest>
<section name="org/apache/pig/test/pigunit">
<attribute name="Implementation-Vendor" value="Apache" />
<attribute name="Implementation-Title" value="Pig" />
<attribute name="Implementation-Version" value="${version}" />
<attribute name="Build-TimeStamp" value="${timestamp}" />
<attribute name="Svn-Revision" value="${svnString}" />
</section>
</manifest>
</jar>
</target>
<!-- ================================================================== -->
<!-- Make pigperf.jar -->
<!-- ================================================================== -->
<target name="pigperf" depends="compile-test" description="Create pigperf.jar">
<jar jarfile="pigperf.jar">
<fileset dir="${test.build.dir}/classes">
<include name="org/apache/pig/test/pigmix/**"/>
<include name="org/apache/pig/test/utils/datagen/*"/>
<include name="org/apache/pig/test/udf/storefunc/*"/>
</fileset>
<zipfileset src="test/perf/pigmix/lib/sdsuLibJKD12.jar" />
</jar>
</target>
<!-- ================================================================== -->
<!-- Run unit tests -->
<!-- ================================================================== -->
<target name="test-core" depends="setWindowsPath,setLinuxPath,compile-test,pigtest-jar,debugger.check,jackson-pig-3039-test-download" description="Run full set of unit tests">
<macro-test-runner test.file="${test.all.file}" tests.failed="test-core.failed" />
<fail if="test-core.failed">Tests failed!</fail>
</target>
<target name="test-commit" depends="setWindowsPath,setLinuxPath,compile-test,pigtest-jar,debugger.check" description="Run approximate 10-minute set of unit tests prior to commiting">
<macro-test-runner test.file="${test.commit.file}" tests.failed="test-commit.failed"/>
<fail if="test-commit.failed">Tests failed!</fail>
</target>
<target name="test-unit" depends="setWindowsPath,setLinuxPath,compile-test,pigtest-jar,debugger.check" description="Run all true unit tests">
<macro-test-runner test.file="${test.unit.file}" tests.failed="test-unit.failed"/>
<fail if="test-unit.failed">Tests failed!</fail>
</target>
<target name="test-smoke" depends="setWindowsPath,setLinuxPath,compile-test,pigtest-jar,debugger.check" description="Run 30 min smoke tests">
<macro-test-runner test.file="${test.smoke.file}" tests.failed="test-smoke.failed"/>
<fail if="test-smoke.failed">Tests failed!</fail>
</target>
<target name="test-tez" depends="setTezEnv,setWindowsPath,setLinuxPath,compile-test,debugger.check,jackson-pig-3039-test-download" description="Run tez unit tests">
<macro-test-runner test.file="${test.all.file}" tests.failed="test-tez.failed"/>
<fail if="test-tez.failed">Tests failed!</fail>
</target>
<target name="test-spark" depends="setSparkEnv,setWindowsPath,setLinuxPath,compile-test,pigtest-jar,debugger.check,jackson-pig-3039-test-download" description="Run Spark unit tests in Spark cluster-local mode">
<macro-test-runner test.file="${test.all.file}" tests.failed="test-spark.failed"/>
<fail if="test-spark.failed">Tests failed!</fail>
</target>
<target name="debugger.check" depends="debugger.set,debugger.unset"/>
<target name="debugger.set" if="debugPort">
<property name="debugArgs" value="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=${debugPort}"/>
<echo>debugArgs: ${debugArgs}</echo>
</target>
<target name="debugger.unset" unless="debugPort">
<property name="debugArgs" value=""/>
</target>
<macrodef name="macro-test-runner">
<attribute name="test.file" />
<attribute name="tests.failed" />
<sequential>
<delete dir="${test.log.dir}"/>
<mkdir dir="${test.log.dir}"/>
<tempfile property="junit.tmp.dir" prefix="pig_junit_tmp" destDir="${java.io.tmpdir}" />
<mkdir dir="${junit.tmp.dir}/"/>
<propertycopy name="test.exclude.file.for.exectype" from="test.exclude.file.${test.exec.type}"/>
<echo>Tests in ${test.exclude.file.for.exectype} will be excluded</echo>
<junit showoutput="${test.output}" printsummary="yes" haltonfailure="no" fork="yes" maxmemory="2048m" dir="${basedir}" timeout="${test.timeout}" errorProperty="@{tests.failed}" failureProperty="@{tests.failed}">
<sysproperty key="hadoopversion" value="${hadoopversion}" />
<sysproperty key="test.exec.type" value="${test.exec.type}" />
<sysproperty key="ssh.gateway" value="${ssh.gateway}" />
<sysproperty key="hod.server" value="${hod.server}" />
<sysproperty key="build.classes" value="${build.classes}" />
<sysproperty key="test.build.classes" value="${test.build.classes}" />
<sysproperty key="ivy.lib.dir" value="${ivy.lib.dir}" />
<sysproperty key="java.io.tmpdir" value="${junit.tmp.dir}" />
<sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
<jvmarg line="-XX:+CMSClassUnloadingEnabled ${debugArgs} -Djava.library.path=${hadoop.root}\bin"/>
<sysproperty key="java.security.krb5.realm" value="" />
<sysproperty key="java.security.krb5.kdc" value="" />
<sysproperty key="log4j.configuration" value="file:${basedir}/conf/test-log4j.properties"/>
<env key="MALLOC_ARENA_MAX" value="4"/>
<env key="SPARK_MASTER" value="${test.spark.spark_master}"/>
<env key="PATH" path="${build.path}"/>
<classpath>
<pathelement location="${output.jarfile.core}" />
<pathelement location="${test.build.classes}" />
<pathelement location="." />
<pathelement path="${clover.jar}"/>
<path refid="test.classpath"/>
</classpath>
<formatter type="${test.junit.output.format}" />
<batchtest fork="yes" todir="${test.log.dir}" unless="testcase">
<fileset dir="test">
<patternset>
<includesfile name="@{test.file}"/>
<excludesfile name="${test.exclude.file}" if="test.exclude.file"/>
<excludesfile name="${test.exclude.file.for.exectype}"/>
</patternset>
<exclude name="**/${exclude.testcase}.java" if="exclude.testcase" />
<exclude name="**/TestRegisteredJarVisibility.java" if="offline"/>
<exclude name="**/TestInvokerSpeed.java" if="clover.enabled"/>
</fileset>
</batchtest>
<batchtest fork="yes" todir="${test.log.dir}" if="testcase">
<fileset dir="test" includes="**/${testcase}.java">
<exclude name="e2e/**/*.java"/>
</fileset>
</batchtest>
<assertions>
<enable />
</assertions>
</junit>
<delete dir="${junit.tmp.dir}/"/>
</sequential>
</macrodef>
<target name="test" description="to call the test-core and test-contrib target">
<antcall target="test-core" inheritRefs="true" inheritall="true"/>
</target>
<target name="test-core-mrtez" description="run core tests on both mr and tez mode"
depends="setWindowsPath,setLinuxPath,compile-test,debugger.check,jackson-pig-3039-test-download">
<fail message="hadoopversion must be set to 2 when invoking test-core-mrtez">
<condition>
<not>
<equals arg1="${hadoopversion}" arg2="2" />
</not>
</condition>
</fail>
<echo message="=======================" />
<echo message="Running MR tests" />
<echo message="=======================" />
<propertyreset name="test.exec.type" value="mr" />
<propertyreset name="test.log.dir" value="${test.build.dir}/logs/${test.exec.type}" />
<macro-test-runner test.file="${test.all.file}" tests.failed="test.mr.failed"/>
<delete>
<fileset dir="${build.classes}" includes="*.xml" />
</delete>
<echo />
<echo message="=======================" />
<echo message="Running Tez tests" />
<echo message="=======================" />
<propertyreset name="test.exec.type" value="tez" />
<propertyreset name="test.log.dir" value="${test.build.dir}/logs/${test.exec.type}" />
<macro-test-runner test.file="${test.all.file}" tests.failed="test.tez.failed"/>
<condition property="any.tests.failed">
<or>
<isset property="test.mr.failed"/>
<isset property="test.tez.failed"/>
</or>
</condition>
<fail if="any.tests.failed">Tests failed!</fail>
</target>
<!-- ================================================================== -->
<!-- End to end tests -->
<!-- ================================================================== -->
<target name="test-e2e" depends="jar, piggybank" description="run end-to-end tests">
<ant dir="${test.e2e.dir}"/>
</target>
<target name="test-e2e-tez" depends="jar, piggybank" description="run end-to-end tests in tez mode">
<ant dir="${test.e2e.dir}" target="test-tez"/>
</target>
<target name="test-e2e-spark" depends="jar, piggybank" description="run end-to-end tests in spark mode">
<ant dir="${test.e2e.dir}" target="test-spark"/>
</target>
<target name="test-e2e-deploy" depends="jar" description="deploy end-to-end tests to existing cluster">
<ant dir="${test.e2e.dir}" target="deploy"/>
</target>
<target name="test-e2e-undeploy" depends="jar" description="undeploy end-to-end tests from existing cluster">
<ant dir="${test.e2e.dir}" target="undeploy"/>
</target>
<target name="test-e2e-local" depends="jar, piggybank" description="run end-to-end tests in local mode">
<ant dir="${test.e2e.dir}" target="test-local"/>
</target>
<target name="test-e2e-deploy-local" depends="jar" description="create files needed by local mode end-to-end tests">
<ant dir="${test.e2e.dir}" target="deploy-local"/>
</target>
<target name="pigmix-deploy" depends="jar" description="deploy end-to-end tests to existing cluster">
<ant dir="${pigmix.dir}" target="deploy"/>
</target>
<target name="pigmix" depends="jar, piggybank" description="run end-to-end tests">
<ant dir="${pigmix.dir}" target="test"/>
</target>
<target name="pigtest-jar" depends="compile-test, ivy-test" description="create the pigtest jar file">
<echo> *** Creating pigtest.jar ***</echo>
<jar destfile="${pigtest.jarfile}">
<fileset dir="${test.build.classes}">
<include name="**/org/apache/pig/test/**"/>
</fileset>
<zipfileset src="${ivy.lib.dir}/commons-lang3-${commons-lang3.version}.jar" />
</jar>
</target>
<!-- ================================================================== -->
<!-- Pigunit -->
<!-- ================================================================== -->
<target name="pigunit-jar" depends="compile-test, ivy-test" description="create the pigunit jar file">
<echo> *** Creating pigunit.jar ***</echo>
<jar destfile="${pigunit.jarfile}">
<fileset dir="${test.build.classes}">
<include name="**/org/apache/pig/pigunit/**"/>
<include name="**/org/apache/pig/test/Util.**"/>
</fileset>
<zipfileset src="${ivy.lib.dir}/commons-lang3-${commons-lang3.version}.jar" />
</jar>
</target>
<target name="test-pigunit" depends="setWindowsPath,setLinuxPath,compile-test,jar, pigunit-jar" description="Run tests that test PigUnit">
<macro-test-runner test.file="${test.pigunit.file}" />
</target>
<!-- ================================================================== -->
<!-- Distribution -->
<!-- ================================================================== -->
<target name="package" depends="jar, docs, api-report, piggybank" description="Create a Pig tar release">
<package-base/>
</target>
<macrodef name="package-base">
<sequential>
<mkdir dir="${tar.dist.dir}" />
<mkdir dir="${tar.dist.dir}/lib" />
<mkdir dir="${tar.dist.dir}/conf" />
<mkdir dir="${tar.dist.dir}/scripts" />
<mkdir dir="${tar.dist.dir}/docs" />
<mkdir dir="${tar.dist.dir}/docs/api" />
<mkdir dir="${tar.dist.dir}/docs/jdiff"/>
<mkdir dir="${tar.dist.dir}/license" />
<copy todir="${tar.dist.dir}/lib" includeEmptyDirs="false">
<fileset dir="${lib.dir}"/>
</copy>
<copy file="${output.jarfile.backcompat-core-h2}" tofile="${tar.dist.dir}/${final.name}-core-h2.jar" failonerror="false"/>
<copy todir="${tar.dist.dir}/lib" file="contrib/piggybank/java/piggybank.jar"/>
<copy todir="${tar.dist.dir}/" file="ivy.xml" />
<copy todir="${tar.dist.dir}/ivy">
<fileset dir="ivy" />
</copy>
<copy todir="${tar.dist.dir}/bin">
<fileset dir="bin" />
</copy>
<copy todir="${tar.dist.dir}/docs">
<fileset dir="${build.docs}" />
</copy>
<copy todir="${tar.dist.dir}/conf" file="conf/pig.properties"/>
<copy todir="${tar.dist.dir}/conf" file="conf/log4j.properties.template"/>
<copy todir="${tar.dist.dir}/conf" file="conf/test-log4j.properties"/>
<copy todir="${tar.dist.dir}/src" includeEmptyDirs="true">
<fileset dir="${src.dir}" />
</copy>
<copy todir="${tar.dist.dir}/shims" includeEmptyDirs="true">
<fileset dir="${basedir}/shims" />
</copy>
<copy todir="${tar.dist.dir}/lib-src" includeEmptyDirs="true">
<fileset dir="${src.lib.dir}" />
</copy>
<copy todir="${tar.dist.dir}/test" includeEmptyDirs="true">
<fileset dir="${test.src.dir}" />
</copy>
<copy todir="${tar.dist.dir}/tutorial" includeEmptyDirs="true">
<fileset dir="tutorial" />
</copy>
<copy todir="${tar.dist.dir}/contrib" includeEmptyDirs="true">
<fileset dir="contrib" />
</copy>
<copy todir="${tar.dist.dir}/" file="build.xml" />
<copy todir="${tar.dist.dir}">
<fileset dir=".">
<include name="*.txt" />
</fileset>
</copy>
<copy todir="${tar.dist.dir}/license">
<fileset dir="license" />
</copy>
<copy todir="${tar.dist.dir}/legacy">
<fileset dir="${legacy.dir}" />
</copy>
<chmod perm="ugo+x" type="file">
<fileset dir="${tar.dist.dir}/bin" />
</chmod>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make release packages -->
<!-- ================================================================== -->
<target name="tar" depends="package" description="Source distribution">
<tar-base/>
</target>
<macrodef name="tar-base">
<sequential>
<tar compression="gzip" longfile="gnu" destfile="${build.dir}/${artifact.pig.tar}">
<tarfileset dir="${build.dir}/tar/" mode="664">
<exclude name="${final.name}/bin/*" />
<include name="${final.name}/**" />
</tarfileset>
<tarfileset dir="${build.dir}/tar" mode="755">
<include name="${final.name}/bin/*" />
</tarfileset>
</tar>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make release tarball -->
<!-- ================================================================== -->
<target name="src-release" depends="clean, clean-piggybank, clean-test-e2e, clean-tutorial" description="Source distribution">
<mkdir dir="${build.dir}"/>
<tar compression="gzip" longfile="gnu"
destfile="${build.dir}/${final.name}-src.tar.gz">
<tarfileset dir="${basedir}" mode="644" prefix="${final.name}-src">
<include name="conf/**"/>
<include name="contrib/**"/>
<include name="ivy/**"/>
<exclude name="ivy/ivy-*.jar"/>
<exclude name="ivy/maven-*.jar"/>
<include name="lib/**"/>
<include name="lib-src/**"/>
<include name="license/**"/>
<include name="shims/**"/>
<include name="src/**"/>
<include name="test/**"/>
<exclude name="test/**/*.jar"/>
<include name="tutorial/**"/>
<include name="*.txt"/>
<include name="*.xml"/>
<include name="doap_Pig.rdf"/>
<include name="KEYS"/>
<include name="autocomplete"/>
</tarfileset>
<tarfileset dir="" mode="755" prefix="${final.name}-src">
<include name="bin/**"/>
</tarfileset>
</tar>
</target>
<!-- ================================================================== -->
<!-- Findbugs -->
<!-- ================================================================== -->
<property name="findbugs.home" value=""/>
<target name="findbugs" depends="check-for-findbugs, jar" if="findbugs.present" description="Run findbugs if present">
<property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
<property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
<property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/pig-findbugs-report.html"/>
<property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/pig-findbugs-report.xml"/>
<taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
classpath="${findbugs.home}/lib/findbugs-ant.jar" />
<mkdir dir="${findbugs.out.dir}"/>
<findbugs home="${findbugs.home}" output="xml:withMessages"
outputFile="${findbugs.report.xmlfile}" effort="max" timeout="2400000"
excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx1024M">
<auxClasspath>
<fileset dir="${lib.dir}">
<include name="**/*.jar"/>
</fileset>
</auxClasspath>
<sourcePath path="${src.dir}"/>
<class location="${output.jarfile.core}" />
</findbugs>
<xslt style="${findbugs.home}/src/xsl/default.xsl" in="${findbugs.report.xmlfile}"
out="${findbugs.report.htmlfile}"/>
</target>
<target name="check-for-findbugs">
<available property="findbugs.present" file="${findbugs.home}/lib/findbugs.jar" />
</target>
<target name="ant-task-download" description="To download mvn-ant-task" unless="offline">
<get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
</target>
<target name="mvn-taskdef" depends="ant-task-download">
<path id="mvn-ant-task.classpath" path="${ant_task.jar}"/>
<typedef resource="org/apache/maven/artifact/ant/antlib.xml"
uri="urn:maven-artifact-ant"
classpathref="mvn-ant-task.classpath"/>
</target>
<target name="mvn-install" depends="mvn-taskdef, mvn-build, set-version"
description="To install pig to local filesystem's m2 cache">
<artifact:pom file="${pig.pom}" id="pig"/>
<artifact:install file="${output.jarfile.core-h2}">
<pom refid="pig"/>
<attach file="${output.jarfile.sources}" classifier="sources" />
<attach file="${output.jarfile.javadoc}" classifier="javadoc" />
</artifact:install>
<artifact:pom file="${pigunit.pom}" id="pigunit"/>
<artifact:install file="${pigunit.jarfile}">
<pom refid="pigunit"/>
</artifact:install>
<artifact:pom file="${pigsmoke.pom}" id="pigsmoke"/>
<artifact:install file="${smoke.tests.jarfile}">
<pom refid="pigsmoke"/>
</artifact:install>
<artifact:pom file="${piggybank.pom}" id="piggybank"/>
<artifact:install file="${piggybank.jarfile}">
<pom refid="piggybank"/>
</artifact:install>
</target>
<target name="mvn-build" depends="jar, source-jar,
javadoc-jar, smoketests-jar, pigunit-jar, piggybank"
description="To build the pig jar artifacts to be deployed to apache maven repository">
<move file="${output.jarfile.backcompat-core-h2}" tofile="${output.jarfile.core-h2}"/>
</target>
<!-- Expects that mvn-build has already been run but does not run it. In some cases building
might happen on one host and signing/uploading on another. Hence the decoupled tasks. -->
<target name="mvn-publish" depends="mvn-taskdef, set-version, signanddeploy, simpledeploy"
description="To publish the pig jar artifacts to the apache maven repository">
</target>
<target name="mvn-deploy" depends="mvn-build, mvn-publish"
description="To build and upload pig jar to apache maven repository">
</target>
<target name="signanddeploy" if="staging" depends="sign">
<artifact:pom file="${pig.pom}" id="pig"/>
<artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
<artifact:deploy file="${output.jarfile.core}">
<remoteRepository id="${staging_repo_id}" url="${asfstagingrepo}"/>
<pom refid="pig"/>
<attach file="${output.jarfile.core}.asc" type="jar.asc"/>
<attach file="${pig.pom}.asc" type="pom.asc"/>
<attach file="${output.jarfile.sources}.asc" type="jar.asc" classifier="sources"/>
<attach file="${output.jarfile.sources}" classifier="sources" />
<attach file="${output.jarfile.javadoc}.asc" type="jar.asc" classifier="javadoc"/>
<attach file="${output.jarfile.javadoc}" classifier="javadoc" />
</artifact:deploy>
<artifact:pom file="${pigunit.pom}" id="pigunit"/>
<artifact:deploy file="${pigunit.jarfile}">
<remoteRepository id="${staging_repo_id}" url="${asfstagingrepo}"/>
<pom refid="pigunit"/>
<attach file="${pigunit.jarfile}.asc" type="jar.asc"/>
<attach file="${pigunit.pom}.asc" type="pom.asc"/>
</artifact:deploy>
<artifact:pom file="${pigsmoke.pom}" id="pigsmoke"/>
<artifact:deploy file="${smoke.tests.jarfile}">
<remoteRepository id="${staging_repo_id}" url="${asfstagingrepo}"/>
<pom refid="pigsmoke"/>
<attach file="${smoke.tests.jarfile}.asc" type="jar.asc"/>
<attach file="${pigsmoke.pom}.asc" type="pom.asc"/>
</artifact:deploy>
<artifact:pom file="${piggybank.pom}" id="piggybank"/>
<artifact:deploy file="${piggybank.jarfile}">
<remoteRepository id="${staging_repo_id}" url="${asfstagingrepo}"/>
<pom refid="piggybank"/>
<attach file="${piggybank.jarfile}.asc" type="jar.asc"/>
<attach file="${piggybank.pom}.asc" type="pom.asc"/>
</artifact:deploy>
</target>
<target name="simpledeploy" unless="staging">
<artifact:pom file="${pig.pom}" id="pig"/>
<artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
<artifact:deploy file="${output.jarfile.core}">
<remoteRepository id="${snapshots_repo_id}" url="${asfsnapshotrepo}"/>
<pom refid="pig"/>
<attach file="${output.jarfile.sources}" classifier="sources" />
<attach file="${output.jarfile.javadoc}" classifier="javadoc" />
</artifact:deploy>
<artifact:pom file="${pigunit.pom}" id="pigunit"/>
<artifact:deploy file="${pigunit.jarfile}">
<remoteRepository id="${snapshots_repo_id}" url="${asfsnapshotrepo}"/>
<pom refid="pigunit"/>
</artifact:deploy>
<artifact:pom file="${pigsmoke.pom}" id="pigsmoke"/>
<artifact:deploy file="${smoke.tests.jarfile}">
<remoteRepository id="${snapshots_repo_id}" url="${asfsnapshotrepo}"/>
<pom refid="pigsmoke"/>
</artifact:deploy>
<artifact:pom file="${piggybank.pom}" id="piggybank"/>
<artifact:deploy file="${piggybank.jarfile}">
<remoteRepository id="${snapshots_repo_id}" url="${asfsnapshotrepo}"/>
<pom refid="piggybank"/>
</artifact:deploy>
</target>
<target name="sign" depends="clean-sign" if="staging">
<input message="password:>" addproperty="gpg.passphrase">
<handler classname="org.apache.tools.ant.input.SecureInputHandler" />
</input>
<macrodef name="sign-artifact" description="Signs the artifact">
<attribute name="input.file"/>
<attribute name="output.file" default="@{input.file}.asc"/>
<attribute name="gpg.passphrase"/>
<sequential>
<echo>Signing @{input.file} Sig File: @{output.file}</echo>
<exec executable="gpg" >
<arg value="--armor"/>
<arg value="--output"/>
<arg value="@{output.file}"/>
<arg value="--passphrase"/>
<arg value="@{gpg.passphrase}"/>
<arg value="--detach-sig"/>
<arg value="@{input.file}"/>
</exec>
</sequential>
</macrodef>
<sign-artifact input.file="${output.jarfile.core}"
output.file="${output.jarfile.core}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${output.jarfile.sources}"
output.file="${output.jarfile.sources}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${output.jarfile.javadoc}"
output.file="${output.jarfile.javadoc}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${pig.pom}"
output.file="${pig.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${pigunit.pom}"
output.file="${pigunit.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${pigunit.jarfile}"
output.file="${pigunit.jarfile}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${pigsmoke.pom}"
output.file="${pigsmoke.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${smoke.tests.jarfile}"
output.file="${smoke.tests.jarfile}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${piggybank.jarfile}"
output.file="${piggybank.jarfile}.asc" gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact input.file="${piggybank.pom}"
output.file="${piggybank.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
</target>
<target name="clean-sign" description="Clean. Delete .asc files">
<delete>
<fileset dir="." includes="**/**/*.asc"/>
</delete>
</target>
<target name="set-version">
<delete file="${pig.pom}"/>
<delete file="${pigunit.pom}"/>
<delete file="${pigsmoke.pom}"/>
<delete file="${piggybank.pom}"/>
<copy file="${basedir}/ivy/pig-template.xml" tofile="${pig.pom}"/>
<copy file="${basedir}/ivy/pigunit-template.xml" tofile="${pigunit.pom}"/>
<copy file="${basedir}/ivy/pigsmoke-template.xml" tofile="${pigsmoke.pom}"/>
<copy file="${basedir}/ivy/piggybank-template.xml" tofile="${piggybank.pom}"/>
<replaceregexp byline="true">
<regexp pattern="@version"/>
<substitution expression="${version}"/>
<fileset dir="${basedir}/ivy">
<include name="*.pom"/>
</fileset>
</replaceregexp>
</target>
<!-- ================================================================== -->
<!-- Perform audit activities for the release -->
<!-- ================================================================== -->
<target name="releaseaudit" depends="ivy-releaseaudit" description="generate a release audit report">
<get src="${mvnrepo}/org/apache/rat/apache-rat/${apacherat.version}/apache-rat-${apacherat.version}.jar"
dest="${basedir}/build/apache-rat-${apacherat.version}.jar"
usetimestamp="true"
skipexisting="true"/>
<java jar="${basedir}/build/apache-rat-${apacherat.version}.jar"
fork="true"
output="${basedir}/build/releaseaudit_report.txt">
<arg value="--dir"/>
<arg value="${basedir}"/>
<arg value="-e"/>
<arg value=".svn"/>
<arg value=".git"/>
<arg value=".gitignore"/>
<arg value=".*/build/.*"/>
</java>
<echo message="releaseaudit report generated at ${basedir}/build/releaseaudit_report.txt"/>
</target>
<!--target name="checkstyle" depends="checkstyle.check, set-checkstyle-classpath" if="checkstyle.home" -->
<target name="checkstyle" depends="jar, ivy-checkstyle" description="Run optional third-party tool targets">
<taskdef resource="checkstyletask.properties">
<classpath refid="checkstyle-classpath"/>
</taskdef>
<mkdir dir="${test.build.dir}"/>
<checkstyle config="${test.src.dir}/checkstyle.xml" failOnViolation="false">
<fileset dir="${src.dir}" includes="**/*.java" excludes="**/generated/**"/>
<formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
</checkstyle>
<xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl" in="${test.build.dir}/checkstyle-errors.xml"
out="${test.build.dir}/checkstyle-errors.html"/>
</target>
<target name="checkstyle.check" unless="checkstyle.home">
<fail message="'checkstyle.home' is not defined. Please pass -Dcheckstyle.home=&lt;base of checkstyle installation&gt;
to Ant on the command-line." />
</target>
<!--
<target name="set-checkstyle-classpath">
<path id="checkstyle-classpath">
<fileset dir="${checkstyle.home}">
<include name="**/*.jar"/>
</fileset>
</path>
</target>
-->
<path id="checkstyle-classpath">
<path refid="checkstyle.classpath"/>
<fileset dir="${ivy.lib.dir}">
<include name="*.jar"/>
</fileset>
</path>
<target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
<fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=&lt;base of Findbugs installation&gt;
to Ant on the command-line." />
</target>
<target name="patch.check" unless="patch.file">
<fail message="'patch.file' is not defined. Please pass -Dpatch.file=&lt;location of patch file&gt;
to Ant on the command-line." />
</target>
<target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
<exec executable="bash" failonerror="true">
<arg value="${test_patch_sh}"/>
<arg value="DEVELOPER"/>
<arg value="${patch.file}"/>
<arg value="${scratch.dir}"/>
<arg value="${svn.cmd}"/>
<arg value="${grep.cmd}"/>
<arg value="${patch.cmd}"/>
<arg value="${findbugs.home}"/>
<arg value="${forrest.home}"/>
<arg value="${basedir}"/>
<arg value="${ant.project.name}"/>
</exec>
</target>
<target name="hudson-test-patch" depends="findbugs.check,forrest.check">
<exec executable="bash" failonerror="true">
<arg value="${test_patch_sh}"/>
<arg value="HUDSON"/>
<arg value="${scratch.dir}"/>
<arg value="${support.dir}"/>
<arg value="${ps.cmd}"/>
<arg value="${wget.cmd}"/>
<arg value="${jiracli.cmd}"/>
<arg value="${svn.cmd}"/>
<arg value="${grep.cmd}"/>
<arg value="${patch.cmd}"/>
<arg value="${findbugs.home}"/>
<arg value="${forrest.home}"/>
<arg value="${eclipse.home}"/>
<arg value="${python.home}"/>
<arg value="${basedir}"/>
<arg value="${trigger.url}"/>
<arg value="${jira.passwd}"/>
<arg value="${curl.cmd}"/>
<arg value="${defect}"/>
<arg value="${ant.project.name}"/>
</exec>
</target>
<target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover.
To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>
<target name="clover.setup" if="clover.enabled">
<taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
<mkdir dir="${clover.db.dir}"/>
<clover-setup initString="${clover.db.dir}/pig_coverage.db">
<fileset dir="src" includes="**/*.java"/>
</clover-setup>
</target>
<target name="clover.info" unless="clover.present">
<echo>
Clover not found. Code coverage reports disabled.
</echo>
</target>
<target name="clover.check">
<fail unless="clover.present">
##################################################################
Clover not found.
Please specify -Dclover.home=&lt;base of clover installation&gt;
on the command line.
##################################################################
</fail>
</target>
<target name="generate-clover-reports" depends="clover.check, clover">
<mkdir dir="${clover.report.dir}"/>
<clover-report>
<current outfile="${clover.report.dir}" title="${final.name}">
<format type="html"/>
</current>
</clover-report>
<clover-report>
<current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
<format type="xml"/>
</current>
</clover-report>
</target>
<target name="generate-pdf-clover-reports" depends="clover.check, clover">
<mkdir dir="${clover.pdf.report.dir}"/>
<clover-pdf-report outfile="${clover.pdf.report.dir}/clover_coverage.pdf" />
</target>
<target name="api-xml" depends="ivy-jdiff, javadoc-all, write-null">
<javadoc>
<doclet name="jdiff.JDiff"
path="${jdiff.jar}:${xerces.jar}">
<param name="-apidir" value="${jdiff.xml.dir}"/>
<param name="-apiname" value="${name} ${version}"/>
</doclet>
<packageset dir="${src.dir}"/>
<classpath>
<path refid="javadoc-classpath" />
<pathelement path="${output.jarfile.core}" />
</classpath>
</javadoc>
<!-- For some Scala classes <any> is recorded in the jdiff xml as return type. Need to xml-escape this properly -->
<replace file="${jdiff.xml.dir}/${name}_${version}.xml" token="&quot;&lt;any&gt;&quot;" value="&quot;&amp;lt;any&amp;gt;&quot;" />
</target>
<target name="write-null">
<exec executable="touch">
<arg value="${build.dir}/Null.java"/>
</exec>
</target>
<target name="api-report" depends="api-xml">
<mkdir dir="${jdiff.build.dir}"/>
<javadoc sourcepath="${src.dir}"
destdir="${jdiff.build.dir}"
sourceFiles="${build.dir}/Null.java">
<doclet name="jdiff.JDiff" path="${jdiff.jar}:${xerces.jar}">
<param name="-oldapi" value="${name} ${jdiff.stable}"/>
<param name="-newapi" value="${name} ${version}"/>
<param name="-oldapidir" value="${jdiff.xml.dir}"/>
<param name="-newapidir" value="${jdiff.xml.dir}"/>
<param name="-javadocold" value="${jdiff.stable.javadoc}"/>
<param name="-javadocnew" value="../../api/"/>
<param name="-stats"/>
</doclet>
<classpath>
<path refid="javadoc-classpath" />
<pathelement path="${output.jarfile.core}" />
</classpath>
</javadoc>
<fail message="Failed to generate jdiff change docs">
<!-- JDiff errors don't fail the build, have to take care of it manually -->
<condition><not><available file="${jdiff.build.dir}/changes" type="dir" /></not></condition>
</fail>
</target>
<target name="ivy-init-dirs">
<mkdir dir="${build.ivy.dir}" />
<mkdir dir="${build.ivy.lib.dir}" />
<mkdir dir="${build.ivy.report.dir}" />
<mkdir dir="${build.ivy.maven.dir}" />
</target>
<target name="ivy-probe-antlib" >
<condition property="ivy.found">
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
</condition>
</target>
<target name="ivy-download" description="To download ivy" unless="offline">
<get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
</target>
<!--
To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
Also note how we skip loading Ivy if it is already there, just to make sure all is well.
-->
<target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
<typedef uri="antlib:org.apache.ivy.ant" onerror="fail" loaderRef="ivyLoader">
<classpath>
<pathelement location="${ivy.jar}"/>
</classpath>
</typedef>
<fail>
<condition >
<not>
<typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
</not>
</condition>
You need Apache Ivy 2.0 or later from http://ant.apache.org/
It could not be loaded from ${ivy_repo_url}
</fail>
</target>
<target name="ivy-init" depends="ivy-init-antlib" >
<!--Configure Ivy by reading in the settings file
If anyone has already read in a settings file into this settings ID, it gets priority -->
<ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/>
</target>
<target name="ivy-resolve" depends="ivy-init" unless="ivy.resolved" description="Resolve Ivy dependencies">
<property name="ivy.resolved" value="true"/>
<echo>*** Ivy resolve with Hadoop ${hadoopversion}, Spark ${sparkversion} and HBase ${hbaseversion} ***</echo>
<ivy:resolve log="${loglevel}" settingsRef="${ant.project.name}.ivy.settings" conf="compile"/>
<ivy:report toDir="build/ivy/report"/>
</target>
<target name="ivy-compile" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for compile configuration">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="compile"/>
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${ivy.lib.dir.spark}/[artifact]-[revision](-[classifier]).[ext]" conf="spark${sparkversion},hbase${hbaseversion}"/>
<ivy:cachepath pathid="compile.classpath" conf="compile"/>
<exec dir="${basedir}/shade/roaringbitmap" executable="mvn">
<arg line="clean package -Droaring.bitmap.version=${roaring-bitmap-shaded.version}"/>
</exec>
<copy file="${basedir}/shade/roaringbitmap/target/RoaringBitmap-shaded-${roaring-bitmap-shaded.version}.jar" todir="${ivy.lib.dir}"/>
</target>
<target name="ivy-test" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for test configuration">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="test"/>
</target>
<target name="ivy-javadoc" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for javadoc configuration">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="javadoc"/>
<ivy:cachepath pathid="javadoc.classpath" conf="javadoc"/>
</target>
<target name="ivy-releaseaudit" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for releaseaudit configuration">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="releaseaudit"/>
<ivy:cachepath pathid="releaseaudit.classpath" conf="releaseaudit"/>
</target>
<target name="ivy-checkstyle" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for checkstyle configuration">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="checkstyle"/>
<ivy:cachepath pathid="checkstyle.classpath" conf="checkstyle"/>
</target>
<target name="ivy-buildJar" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for buildJar configuration">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="buildJar"/>
<ivy:cachepath pathid="buildJar.classpath" conf="buildJar"/>
</target>
<target name="ivy-jdiff" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for jdiff configuration">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" log="${loglevel}"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="jdiff"/>
<ivy:cachepath pathid="jdiff.classpath" conf="jdiff"/>
</target>
<target name="ivy-clean-cache" depends="ivy-init-antlib"
description="Clean the Ivy cache">
<ivy:cleancache />
</target>
<target name="jackson-pig-3039-test-download" description="To download jackson" unless="offline">
<get src="${jackson_core_repo_url}" dest="${jackson_core.jar}" usetimestamp="true"/>
<get src="${jackson_mapper_repo_url}" dest="${jackson_mapper.jar}" usetimestamp="true"/>
</target>
<target name="jackson-pig-3039-test-clean" description="Clean the jackson jar files">
<delete file="${jackson_core.jar}"/>
<delete file="${jackson_mapper.jar}"/>
</target>
<target name="ready-to-publish" depends="jar, ivy-resolve"/>
<target name="ivy-publish-local" depends="ready-to-publish,ivy-resolve">
<ivy:publish settingsRef="${ant.project.name}.ivy.settings"
resolver="local" pubrevision="${version}" overwrite="true"
artifactspattern="${build.dir}/${ivy.publish.pattern}"/>
</target>
<!-- this is here for curiosity, to see how well the makepom task works
makepom does not depend on the resolution as it translate the ivy file into a maven file
we map ivy configurations to maven scopes
-->
<target name="makepom" depends="ivy-init">
<ivy:makepom settingsRef="${ant.project.name}.ivy.settings"
ivyfile="ivy.xml"
pomfile="${build.ivy.maven.dir}/generated.pom"
conf="compile,runtime,test">
<ivy:mapping conf="compile" scope="compile"/>
<ivy:mapping conf="test" scope="test"/>
<ivy:mapping conf="runtime" scope="run"/>
</ivy:makepom>
</target>
<target name="copy-jar-to-maven" depends="ready-to-publish">
<copy file="${output.jarfile.core}" tofile="${build.ivy.maven.jar}"/>
<checksum file="${build.ivy.maven.jar}" algorithm="md5"/>
</target>
<target name="copypom" depends="set-version, ivy-init-dirs">
<presetdef name="expandingcopy">
<copy overwrite="true">
<filterchain>
<expandproperties/>
</filterchain>
</copy>
</presetdef>
<expandingcopy file="ivy/pig.pom" tofile="${build.ivy.maven.pom}"/>
<checksum file="${build.ivy.maven.pom}" algorithm="md5"/>
</target>
<target name="maven-artifacts" depends="copy-jar-to-maven, copypom" />
<target name="published" depends="ivy-publish-local, maven-artifacts"/>
<target name="piggybank" depends="jar">
<ant antfile="contrib/piggybank/java/build.xml" inheritAll="false">
<propertyset id="hadoop"> <propertyref name="hadoopversion"/> </propertyset>
</ant>
</target>
<import file="./build-site.xml" optional="true"/>
</project>