blob: 940f4e91f37cc8f7bb29acf2604b4c6e8c616b6f [file] [log] [blame]
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns:ivy="antlib:org.apache.ivy.ant"
xmlns:artifact="urn:maven-artifact-ant"
name="hivecommon" default="jar">
<property name="hive.root" location="${basedir}/.."/>
<property file="${hive.root}/build.properties"/>
<property file="${user.home}/build.properties" />
<property file="${basedir}/build.properties" />
<property environment="env"/>
<property name="hive.conf.dir" value="${hive.root}/conf"/>
<property name="dist.dir" location="${hive.root}"/>
<property name="build.dir.hive" location="${hive.root}/build"/>
<property name="build.dir.hadoop" location="${build.dir.hive}/hadoopcore"/>
<property name="build.dir" location="${build.dir.hive}/${ant.project.name}"/>
<property name="build.classes" location="${build.dir}/classes"/>
<property name="build.encoding" value="ISO-8859-1"/>
<!-- Thrift codegen properties -->
<property name="thrift.args" value="-I ${thrift.home} --gen java:beans,hashcode --gen cpp --gen php --gen py --gen rb"/>
<property name="thrift.gen.dir" value="${basedir}/src/gen/thrift"/>
<property name="hadoop.conf.dir" location="${hadoop.root}/conf"/>
<!-- configuration needed for tests -->
<property name="test.src.dir" value="${basedir}/src/test"/>
<property name="test.src.data.dir" value="${hive.root}/data"/>
<property name="test.resources.dir" value="${basedir}/src/test/resources"/>
<property name="test.build.dir" value="${build.dir}/test"/>
<property name="test.log.dir" value="${test.build.dir}/logs"/>
<property name="test.data.dir" value="${test.build.dir}/data"/>
<property name="test.build.src" value="${test.build.dir}/src"/>
<property name="test.build.classes" value="${test.build.dir}/classes"/>
<property name="test.build.resources" value="${test.build.dir}/resources"/>
<property name="test.include" value="Test*"/>
<property name="test.classpath.id" value="test.classpath"/>
<property name="test.output" value="true"/>
<property name="test.junit.output.format" value="xml"/>
<property name="test.junit.output.usefile" value="true"/>
<property name="minimr.query.files" value="list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q"/>
<property name="minimr.query.negative.files" value="cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q" />
<property name="test.silent" value="true"/>
<property name="hadoopVersion" value="${hadoop.version.ant-internal}"/>
<property name="test.serialize.qplan" value="false"/>
<property name="test.print.classpath" value="false"/>
<property name="test.lang" value="en_US.UTF-8"/>
<property name="hadoop.opts.23" value="-D mapreduce.framework.name=local" />
<property name="hadoop.opts.20" value="" />
<condition property="test.halt.on.failure" value="no" else="yes">
<equals arg1="${test.continue.on.failure}" arg2="true"/>
</condition>
<target name="set-test-classpath">
<typedef name="distinctelementsclasspath" classname="org.apache.hadoop.hive.ant.DistinctElementsClassPath"
classpath="${build.dir.hive}/anttasks/hive-anttasks-${version}.jar:${build.ivy.lib.dir}/default/commons-collections-${commons-collections.version}.jar:${build.ivy.lib.dir}/default/commons-lang-${commons-lang.version}.jar"/>
<distinctelementsclasspath id="test.classpath">
<pathelement location="${test.build.classes}" />
<pathelement location="${test.build.resources}" />
<pathelement location="" />
<pathelement location="${test.src.data.dir}/conf"/>
<pathelement location="${hive.conf.dir}"/>
<pathelement location="${build.dir.hive}/beeline/test/classes"/>
<pathelement location="${build.dir.hive}/cli/test/classes"/>
<pathelement location="${build.dir.hive}/common/test/classes"/>
<pathelement location="${build.dir.hive}/hbase-handler/test/classes"/>
<pathelement location="${build.dir.hive}/hwi/test/classes"/>
<pathelement location="${build.dir.hive}/jdbc/test/classes"/>
<pathelement location="${build.dir.hive}/metastore/test/classes"/>
<pathelement location="${build.dir.hive}/hcatalog/test/classes"/>
<pathelement location="${build.dir.hive}/ql/test/classes"/>
<pathelement location="${build.dir.hive}/serde/test/classes"/>
<pathelement location="${build.dir.hive}/service/test/classes"/>
<pathelement location="${build.dir.hive}/shims/test/classes"/>
<!-- Include build/dist/lib on the classpath before Ivy and exclude hive jars from Ivy to make sure we get the local changes when we test Hive -->
<fileset dir="${build.dir.hive}/dist/lib" includes="*.jar" erroronmissingdir="false" excludes="**/hive_contrib*.jar,**/hive-contrib*.jar,**/lib*.jar"/>
<fileset dir="${hive.root}/testlibs" includes="*.jar"/>
<fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
<pathelement location="${build.classes}" />
<!-- test directory may contain hadoop jars used by tests only (e.g. mini cluster) -->
<fileset dir="${hive.root}/build/ivy/lib/test" includes="*.jar" erroronmissingdir="false"
excludes="**/hive_*.jar,**/hive-*.jar"/>
<fileset dir="${hive.root}/build/ivy/lib/test" includes="hive-testutils*.jar" Erroronmissingdir="false"/>
<!-- we strip out hadoop jars present in places other than the hadoop shimmed dir-->
<fileset dir="${hive.root}/build/ivy/lib/default" includes="*.jar" erroronmissingdir="false"
excludes="**/hive_*.jar,**/hive-*.jar,**/hadoop-*.jar" />
</distinctelementsclasspath>
</target>
<!-- include contrib on local classpath, but not on cluster -->
<!-- https://reviews.facebook.net/D2133#comment-47 -->
<path id="test.local.classpath">
<path refid="${test.classpath.id}"/>
<fileset dir="${hive.root}/build/ivy/lib/test" includes="hive-contrib*.jar" erroronmissingdir="false"/>
</path>
<loadproperties srcfile="${ivy.conf.dir}/libraries.properties"/>
<osfamily property="os.family"/>
<condition property="offline">
<istrue value="${is-offline}"/>
</condition>
<import file="build-offline.xml"/>
<!--this is the naming policy for artifacts we want pulled down-->
<property name="ivy.artifact.retrieve.pattern" value="[conf]/[artifact]-[revision](-[classifier]).[ext]"/>
<target name="ivy-init-settings">
<!--Configure Ivy by reading in the settings file
If anyone has already read in a settings file into this settings ID, it gets priority
-->
<echo message="Project: ${ant.project.name}"/>
<ivy:settings id="${ant.project.name}.ivy.settings" file="${ivysettings.xml}"/>
</target>
<target name="ivy-resolve" depends="ivy-init-settings" unless="offline">
<echo message="Project: ${ant.project.name}"/>
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings"
conf="default" log="${ivyresolvelog}"/>
<ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"
graph="false" />
</target>
<target name="ivy-retrieve" depends="ivy-resolve"
description="Retrieve Ivy-managed artifacts">
<echo message="Project: ${ant.project.name}"/>
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyresolvelog}"/>
</target>
<target name="ivy-resolve-test" depends="ivy-init-settings" unless="offline">
<echo message="Project: ${ant.project.name}"/>
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings"
conf="test" log="${ivyresolvelog}"/>
</target>
<target name="ivy-retrieve-test" depends="ivy-resolve-test"
description="Retrieve Ivy-managed artifacts">
<echo message="Project: ${ant.project.name}"/>
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyresolvelog}" conf="test"/>
</target>
<target name="ivy-resolve-hadoop-shim" depends="ivy-init-settings" unless="offline">
<echo message="Project: ${ant.project.name}"/>
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings"
conf="${ivy.hadoop.shim.conf}" log="${ivyresolvelog}"/>
</target>
<target name="ivy-retrieve-hadoop-shim" depends="ivy-resolve-hadoop-shim"
description="Retrieve Ivy-managed artifacts">
<echo message="Project: ${ant.project.name}"/>
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
log="${ivyresolvelog}" conf="${ivy.hadoop.shim.conf}"/>
</target>
<!-- the normal classpath -->
<path id="common-classpath">
<pathelement location="${build.dir.hive}/classes"/>
<fileset dir="${build.dir.hive}" includes="*/*.jar"/>
<fileset dir="${hive.root}/lib" includes="*.jar"/>
<fileset dir="${build.ivy.lib.dir}/default" includes="junit*.jar" />
<fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
<fileset dir="${build.ivy.lib.dir}/default" includes="*.jar"
excludes="**/hadoop-*.jar"
erroronmissingdir="false"/>
</path>
<path id="classpath">
<pathelement location="${build.dir.hive}/service/classes"/>
<pathelement location="${build.dir.hive}/common/classes"/>
<pathelement location="${build.dir.hive}/serde/classes"/>
<pathelement location="${build.dir.hive}/metastore/classes"/>
<pathelement location="${build.dir.hive}/hcatalog/classes"/>
<pathelement location="${build.dir.hive}/ql/classes"/>
<pathelement location="${build.dir.hive}/beeline/classes"/>
<pathelement location="${build.dir.hive}/cli/classes"/>
<pathelement location="${build.dir.hive}/shims/classes"/>
<pathelement location="${build.dir.hive}/hwi/classes"/>
<pathelement location="${build.dir.hive}/jdbc/classes"/>
<pathelement location="${build.dir.hive}/hbase-handler/classes"/>
<fileset dir="${basedir}" includes="lib/*.jar"/>
<path refid="common-classpath"/>
</path>
<target name="create-dirs">
<echo message="Project: ${ant.project.name}"/>
<mkdir dir="${build.dir.hive}"/>
<mkdir dir="${build.dir}"/>
<mkdir dir="${build.classes}"/>
<mkdir dir="${build.dir.hive}/jexl/classes"/>
<mkdir dir="${build.dir.hadoop}"/>
<mkdir dir="${test.build.dir}"/>
<mkdir dir="${test.build.src}"/>
<mkdir dir="${test.build.classes}"/>
<mkdir dir="${test.build.resources}"/>
<copy todir="${test.build.resources}" failonerror="false">
<fileset dir="${test.resources.dir}"/>
</copy>
</target>
<target name="init" depends="create-dirs">
<echo message="Project: ${ant.project.name}"/>
</target>
<target name="test-init" depends="set-test-classpath">
<echo message="Project: ${ant.project.name}"/>
<mkdir dir="${test.data.dir}"/>
<mkdir dir="${test.log.dir}/clientpositive"/>
<mkdir dir="${test.log.dir}/beelinepositive"/>
<mkdir dir="${test.log.dir}/clientnegative"/>
<mkdir dir="${test.log.dir}/positive"/>
<mkdir dir="${test.log.dir}/negative"/>
<mkdir dir="${test.data.dir}/warehouse"/>
<mkdir dir="${test.data.dir}/metadb"/>
</target>
<target name="setup">
<echo message="Project: ${ant.project.name}"/>
</target>
<target name="compile" depends="init, setup, ivy-retrieve">
<echo message="Project: ${ant.project.name}"/>
<javac
encoding="${build.encoding}"
srcdir="${src.dir}"
includes="**/*.java"
destdir="${build.classes}"
debug="${javac.debug}"
deprecation="${javac.deprecation}"
includeantruntime="false">
<compilerarg line="${javac.args} ${javac.args.warnings}" />
<classpath refid="classpath"/>
</javac>
<copy todir="${build.classes}" failonerror="false">
<fileset dir="${src.dir}/conf"/>
</copy>
</target>
<target name="jar" depends="make-pom,compile">
<echo message="Project: ${ant.project.name}" />
<jar
jarfile="${build.dir}/hive-${ant.project.name}-${version}.jar"
basedir="${build.classes}">
<manifest>
<!-- Not putting these in their own manifest section, since that inserts
a new-line, which breaks the reading of the attributes. -->
<attribute name="Implementation-Title" value="Hive"/>
<attribute name="Implementation-Version" value="${version}"/>
<attribute name="Implementation-Vendor" value="Apache"/>
</manifest>
<metainf dir="${hive.root}" includes="LICENSE,NOTICE"/>
</jar>
<ivy:publish settingsRef="${ant.project.name}.ivy.settings"
resolver="local" pubrevision="${version}" overwrite="true"
artifactspattern="${build.dir}/${ivy.publish.pattern}"/>
</target>
<!-- target to compile tests -->
<target name="compile-test" depends="compile,ivy-retrieve-test,set-test-classpath">
<echo message="Project: ${ant.project.name}"/>
<javac
encoding="${build.encoding}"
srcdir="${test.src.dir}"
includes="org/apache/**/hive/**/*.java"
excludes="**/TestSerDe.java"
destdir="${test.build.classes}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}"
includeantruntime="false">
<compilerarg line="${javac.args} ${javac.args.warnings}" />
<classpath refid="test.classpath"/>
</javac>
<javac
encoding="${build.encoding}"
srcdir="${test.build.src}"
includes="org/apache/**/hive/**/*.java"
destdir="${test.build.classes}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}"
includeantruntime="false">
<compilerarg line="${javac.args} ${javac.args.warnings}" />
<classpath refid="test.classpath"/>
</javac>
<!-- Generate a classpath to have YARN use downloaded dependencies. -->
<property name="mrapp-classpath" refid="test.classpath" />
<echo file="${test.build.classes}/mrapp-generated-classpath" message="${mrapp-classpath}" />
</target>
<target name="test-jar" depends="compile-test">
<echo message="Project: ${ant.project.name}"/>
<delete file="${test.build.dir}/test-udfs.jar"/>
<jar jarfile="${test.build.dir}/test-udfs.jar">
<fileset dir="${test.build.classes}" includes="**/udf/*.class"/>
<fileset dir="${test.build.classes}" includes="**/udf/generic/*.class"/>
</jar>
<delete file="${test.build.dir}/test-serdes.jar"/>
<jar jarfile="${test.build.dir}/test-serdes.jar">
<fileset dir="${test.build.classes}" includes="**/serde2/*.class"/>
</jar>
</target>
<target name="test-conditions">
<echo message="Project: ${ant.project.name}"/>
<condition property="qfile" value="">
<not>
<isset property="qfile"/>
</not>
</condition>
<condition property="qfile_regex" value="">
<not>
<isset property="qfile_regex"/>
</not>
</condition>
<condition property="qfile_negative_regex" value="">
<not>
<isset property="qfile_negative_regex"/>
</not>
</condition>
<condition property="overwrite" value="false">
<not>
<isset property="overwrite"/>
</not>
</condition>
<condition property="standalone" value="false">
<not>
<isset property="standalone"/>
</not>
</condition>
<condition property="disableserver" value="false">
<not>
<isset property="disableserver"/>
</not>
</condition>
<condition property="clustermode" value="">
<not>
<isset property="clustermode"/>
</not>
</condition>
<condition property="run_disabled" value="false">
<not>
<isset property="run_disabled"/>
</not>
</condition>
</target>
<target name="gen-test">
<echo message="Project: ${ant.project.name}"/>
</target>
<!-- use pfile:/// as warehouse file system in 20 for non miniMR runs -->
<condition property="test.warehouse.scheme" value="pfile://" else="">
<not>
<equals arg1="${clustermode}" arg2="miniMR" />
</not>
</condition>
<property name="test.warehouse.dir" value="${test.warehouse.scheme}${build.dir}/test/data/warehouse"/>
<property name="mapred.job.tracker" value="local"/>
<property name="fs.default.name" value="file:///"/>
<!-- target to run the tests -->
<target name="test"
depends="test-conditions,gen-test,compile-test,test-jar,test-init">
<echo message="Project: ${ant.project.name}"/>
<property name="hadoop.testcp" refid="test.classpath"/>
<if>
<equals arg1="${hadoop.mr.rev}" arg2="23" />
<then>
<property name="hadoop.opts" value="${hadoop.opts.23}" />
<property name="test.dfs.mkdir" value="-mkdir -p"/>
</then>
<else>
<property name="hadoop.opts" value="${hadoop.opts.20}" />
<property name="test.dfs.mkdir" value="-mkdir"/>
</else>
</if>
<!-- Set the OS specific settings to run junit tests on unix as well as on windows -->
<if>
<equals arg1="windows" arg2="${os.family}"/>
<then>
<property name="junit.script.extension" value=".cmd"/>
</then>
<else>
<property name="junit.script.extension" value=""/>
</else>
</if>
<if>
<equals arg1="${test.print.classpath}" arg2="true" />
<then>
<echo message="Test Classpath: ${hadoop.testcp}"/>
</then>
</if>
<junit showoutput="${test.output}" printsummary="yes" haltonfailure="${test.halt.on.failure}"
fork="yes" maxmemory="${test.junit.maxmemory}" dir="${basedir}" timeout="${test.junit.timeout}"
errorProperty="tests.failed" failureProperty="tests.failed" filtertrace="off">
<jvmarg value="-XX:+HeapDumpOnOutOfMemoryError"/>
<jvmarg value="-XX:HeapDumpPath=${hive.root}"/>
<env key="LANG" value="${test.lang}"/>
<env key="HIVE_HADOOP_TEST_CLASSPATH" value="${hadoop.testcp}"/>
<env key="HADOOP_HOME" value="${hadoop.root}"/>
<env key="HADOOP_CLASSPATH" path="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/JavaEWAH-${javaewah.version}.jar:${hadoop.root}/modules/*"/> <!-- Modules needed for Hadoop 0.23 -->
<env key="TZ" value="US/Pacific"/>
<sysproperty key="test.output.overwrite" value="${overwrite}"/>
<sysproperty key="test.dfs.mkdir" value="${test.dfs.mkdir}"/>
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
<sysproperty key="test.service.disable.server" value="${disableserver}"/>
<sysproperty key="log4j.configuration" value="file:///${test.src.data.dir}/conf/hive-log4j.properties"/>
<sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
<sysproperty key="hive.aux.jars.path" value="file:///${test.build.dir}/test-udfs.jar"/>
<sysproperty key="ql.test.query.clientpositive.dir" value="${ql.test.query.clientpositive.dir}"/>
<sysproperty key="ql.test.results.clientpositive.dir" value="${ql.test.results.clientpositive.dir}"/>
<sysproperty key="test.log.dir" value="${test.log.dir}"/>
<sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
<sysproperty key="test.silent" value="${test.silent}"/>
<sysproperty key="test.tmp.dir" value="${build.dir}/tmp"/>
<sysproperty key="test.src.data.dir" value="${test.src.data.dir}"/>
<sysproperty key="test.warehouse.dir" value="${test.warehouse.dir}"/>
<sysproperty key="test.build.resources" value="${test.build.resources}"/>
<sysproperty key="mapred.job.tracker" value="${mapred.job.tracker}"/>
<sysproperty key="fs.default.name" value="${fs.default.name}"/>
<sysproperty key="build.dir" value="${build.dir}"/>
<sysproperty key="build.dir.hive" value="${build.dir.hive}"/>
<sysproperty key="build.ivy.lib.dir" value="${build.ivy.lib.dir}"/>
<sysproperty key="derby.version" value="${derby.version}"/>
<sysproperty key="hive.root" value="${hive.root}"/>
<sysproperty key="hive.version" value="${version}"/>
<sysproperty key="java.net.preferIPv4Stack" value="${java.net.preferIPv4Stack}"/>
<sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}${junit.script.extension}"/>
<sysproperty key="test.concurrency.num.threads" value="${test.concurrency.num.threads}"/>
<sysproperty key="hive.home" value="${hive.root}/build/dist"/>
<jvmarg line="${junit.jvm.args}"/>
<classpath refid="test.local.classpath"/>
<formatter type="${test.junit.output.format}" usefile="${test.junit.output.usefile}" />
<batchtest todir="${test.build.dir}" unless="testcase">
<fileset dir="${test.build.classes}"
includes="**/${test.include}.class"
excludes="**/TestSerDe.class,**/TestHiveMetaStore.class,**/TestBeeLineDriver.class,**/TestHiveServer2Concurrency.class,**/*$*.class,${test.junit.exclude}" />
</batchtest>
<batchtest todir="${test.build.dir}" if="testcase">
<fileset dir="${test.build.classes}" includes="**/${testcase}.class"/>
</batchtest>
<assertions>
<enable />
</assertions>
</junit>
<fail if="tests.failed">Tests failed!</fail>
</target>
<target name="clean-test">
<echo message="Project: ${ant.project.name}"/>
<delete dir="${test.build.dir}"/>
<delete dir="${build.dir.hive}/ql/tmp"/>
<delete dir="${build.dir.hive}/test"/>
</target>
<target name="clean">
<echo message="Project: ${ant.project.name}"/>
<delete dir="${build.dir}"/>
</target>
<target name="check-thrift-home">
<echo message="Project: ${ant.project.name}"/>
<condition property="thrift.home.defined">
<or>
<not>
<isset property="thrift.home"/>
</not>
<equals arg1="${thrift.home}" arg2="$${thrift.home}" trim="true"/>
</or>
</condition>
</target>
<target name="thriftif" depends="check-thrift-home">
<echo message="Project: ${ant.project.name}"/>
<delete dir="${thrift.gen.dir}"/>
<mkdir dir="${thrift.gen.dir}"/>
<for param="thrift.file">
<path>
<fileset dir="." includes="if/*.thrift,if/test/*.thrift" />
</path>
<sequential>
<echo message="Generating Thrift code for @{thrift.file}"/>
<exec executable="${thrift.home}/bin/thrift" failonerror="true" dir=".">
<arg line="${thrift.args} -I ${basedir}/include -I ${basedir}/.. -o ${thrift.gen.dir} @{thrift.file} " />
</exec>
</sequential>
</for>
</target>
<target name="check-ivy" depends="ivy-init-settings">
<echo message="Project: ${ant.project.name}"/>
<available file="${basedir}/ivy.xml" property="ivy.present"/>
</target>
<target name="make-pom" if="ivy.present" depends="check-ivy,ivy-resolve">
<echo message="Project: ${ant.project.name}"/>
<echo> Writing POM to ${build.dir}/pom.xml</echo>
<ivy:makepom ivyfile="${basedir}/ivy.xml" pomfile="${build.dir}/pom.xml">
<mapping conf="default" scope="compile" />
<mapping conf="runtime" scope="compile" />
<mapping conf="compile" scope="compile" />
</ivy:makepom>
<replace file="${build.dir}/pom.xml">
<replacetoken>&lt;dependencies&gt;</replacetoken>
<replacevalue>
&lt;description&gt;Hive is a data warehouse infrastructure built on top of Hadoop see
http://wiki.apache.org/hadoop/Hive &lt;/description&gt;
&lt;licenses&gt;
&lt;license&gt;
&lt;name&gt;The Apache Software License, Version 2.0&lt;/name&gt;
&lt;url&gt;http://www.apache.org/licenses/LICENSE-2.0.txt&lt;/url&gt;
&lt;distribution&gt;repo&lt;/distribution&gt;
&lt;/license&gt;
&lt;/licenses&gt;
&lt;scm&gt;
&lt;url&gt;http://svn.apache.org/repos/asf/hive/&lt;/url&gt;
&lt;/scm&gt;
&lt;dependencies&gt;
</replacevalue>
</replace>
</target>
</project>