| <?xml version="1.0"?> |
| |
| <!-- |
| Licensed to the Apache Software Foundation (ASF) under one or more |
| contributor license agreements. See the NOTICE file distributed with |
| this work for additional information regarding copyright ownership. |
| The ASF licenses this file to You under the Apache License, Version 2.0 |
| (the "License"); you may not use this file except in compliance with |
| the License. You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software |
| distributed under the License is distributed on an "AS IS" BASIS, |
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| See the License for the specific language governing permissions and |
| limitations under the License. |
| --> |
| |
| <project name="Hadoop" default="compile" |
| xmlns:artifact="urn:maven-artifact-ant" |
| xmlns:ivy="antlib:org.apache.ivy.ant"> |
| |
| <!-- Load all the default properties, and any the user wants --> |
| <!-- to contribute (without having to type -D or edit this file --> |
| <property file="${user.home}/build.properties" /> |
| <property file="${basedir}/build.properties" /> |
| |
| <property name="Name" value="Hadoop"/> |
| <property name="name" value="hadoop"/> |
| <property name="version" value="0.20.203.1-SNAPSHOT"/> |
| <property name="final.name" value="${name}-${version}"/> |
| <property name="test.final.name" value="${name}-test-${version}"/> |
| <property name="year" value="2009"/> |
| |
| <property name="core.final.name" value="${name}-core-${version}"/> |
| <property name="test.final.name" value="${name}-test-${version}"/> |
| <property name="examples.final.name" value="${name}-examples-${version}"/> |
| <property name="tools.final.name" value="${name}-tools-${version}"/> |
| <property name="ant.final.name" value="${name}-ant-${version}"/> |
| <property name="streaming.final.name" value="${name}-streaming-${version}"/> |
| |
| <property name="src.dir" value="${basedir}/src"/> |
| <property name="core.src.dir" value="${src.dir}/core"/> |
| <property name="mapred.src.dir" value="${src.dir}/mapred"/> |
| <property name="hdfs.src.dir" value="${src.dir}/hdfs"/> |
| <property name="native.src.dir" value="${basedir}/src/native"/> |
| <property name="examples.dir" value="${basedir}/src/examples"/> |
| <property name="anttasks.dir" value="${basedir}/src/ant"/> |
| <property name="lib.dir" value="${basedir}/lib"/> |
| <property name="conf.dir" value="${basedir}/conf"/> |
| <property name="contrib.dir" value="${basedir}/src/contrib"/> |
| <property name="docs.src" value="${basedir}/src/docs"/> |
| <property name="src.docs.cn" value="${basedir}/src/docs/cn"/> |
| <property name="changes.src" value="${docs.src}/changes"/> |
| <property name="c++.src" value="${basedir}/src/c++"/> |
| <property name="c++.utils.src" value="${c++.src}/utils"/> |
| <property name="c++.pipes.src" value="${c++.src}/pipes"/> |
| <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/> |
| <property name="c++.libhdfs.src" value="${c++.src}/libhdfs"/> |
| <property name="librecordio.src" value="${c++.src}/librecordio"/> |
| <property name="tools.src" value="${basedir}/src/tools"/> |
| |
| <property name="xercescroot" value=""/> |
| <property name="build.dir" value="${basedir}/build"/> |
| <property name="build.classes" value="${build.dir}/classes"/> |
| <property name="build.src" value="${build.dir}/src"/> |
| <property name="build.tools" value="${build.dir}/tools"/> |
| <property name="build.webapps" value="${build.dir}/webapps"/> |
| <property name="build.examples" value="${build.dir}/examples"/> |
| <property name="build.anttasks" value="${build.dir}/ant"/> |
| <property name="build.librecordio" value="${build.dir}/librecordio"/> |
| <!-- convert spaces to _ so that mac os doesn't break things --> |
| <exec executable="sed" inputstring="${os.name}" |
| outputproperty="nonspace.os"> |
| <arg value="s/ /_/g"/> |
| </exec> |
| <property name="build.platform" |
| value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/> |
| <property name="jvm.arch" |
| value="${sun.arch.data.model}"/> |
| <property name="build.native" value="${build.dir}/native/${build.platform}"/> |
| <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/> |
| <property name="build.c++.utils" value="${build.c++}/utils"/> |
| <property name="build.c++.pipes" value="${build.c++}/pipes"/> |
| <property name="build.c++.libhdfs" value="${build.c++}/libhdfs"/> |
| <property name="build.c++.examples.pipes" |
| value="${build.c++}/examples/pipes"/> |
| <property name="build.docs" value="${build.dir}/docs"/> |
| <property name="build.docs.cn" value="${build.dir}/docs/cn"/> |
| <property name="build.javadoc" value="${build.docs}/api"/> |
| <property name="build.javadoc.dev" value="${build.docs}/dev-api"/> |
| <property name="build.encoding" value="ISO-8859-1"/> |
| <property name="install.c++" value="${build.dir}/c++/${build.platform}"/> |
| <property name="install.c++.examples" |
| value="${build.dir}/c++-examples/${build.platform}"/> |
| |
| <property name="test.src.dir" value="${basedir}/src/test"/> |
| <property name="test.lib.dir" value="${basedir}/src/test/lib"/> |
| <property name="test.build.dir" value="${build.dir}/test"/> |
| <property name="test.generated.dir" value="${test.build.dir}/src"/> |
| <property name="test.build.data" value="${test.build.dir}/data"/> |
| <property name="test.cache.data" value="${test.build.dir}/cache"/> |
| <property name="test.debug.data" value="${test.build.dir}/debug"/> |
| <property name="test.log.dir" value="${test.build.dir}/logs"/> |
| <property name="test.build.classes" value="${test.build.dir}/classes"/> |
| <property name="test.build.testjar" value="${test.build.dir}/testjar"/> |
| <property name="test.build.testshell" value="${test.build.dir}/testshell"/> |
| <property name="test.build.extraconf" value="${test.build.dir}/extraconf"/> |
| <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/> |
| <property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/> |
| <property name="test.include" value="Test*"/> |
| <property name="test.classpath.id" value="test.classpath"/> |
| <property name="test.output" value="no"/> |
| <property name="test.timeout" value="900000"/> |
| <property name="test.junit.output.format" value="plain"/> |
| <property name="test.junit.fork.mode" value="perTest" /> |
| <property name="test.junit.printsummary" value="yes" /> |
| <property name="test.junit.haltonfailure" value="no" /> |
| <property name="test.junit.maxmemory" value="512m" /> |
| <property name="test.tools.input.dir" value="${basedir}/src/test/tools/data"/> |
| |
| <property name="test.commit.tests.file" value="${test.src.dir}/commit-tests" /> |
| <property name="test.smoke.tests.file" value="${test.src.dir}/smoke-tests" /> |
| <property name="test.all.tests.file" value="${test.src.dir}/all-tests" /> |
| |
| <property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/> |
| <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/> |
| |
| <property name="librecordio.test.dir" value="${test.build.dir}/librecordio"/> |
| <property name="web.src.dir" value="${basedir}/src/web"/> |
| <property name="src.webapps" value="${basedir}/src/webapps"/> |
| |
| <property name="javadoc.link.java" |
| value="http://java.sun.com/javase/6/docs/api/"/> |
| <property name="javadoc.packages" value="org.apache.hadoop.*"/> |
| <property name="javadoc.maxmemory" value="512m" /> |
| |
| <property name="dist.dir" value="${build.dir}/${final.name}"/> |
| |
| <property name="javac.debug" value="on"/> |
| <property name="javac.optimize" value="on"/> |
| <property name="javac.deprecation" value="off"/> |
| <property name="javac.version" value="1.6"/> |
| <property name="javac.args" value=""/> |
| <property name="javac.args.warnings" value="-Xlint:unchecked"/> |
| |
| <property name="clover.db.dir" location="${build.dir}/test/clover/db"/> |
| <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/> |
| |
| <property name="rat.reporting.classname" value="rat.Report"/> |
| |
| <property name="jdiff.build.dir" value="${build.docs}/jdiff"/> |
| <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/> |
| <property name="jdiff.stable" value="0.20.9"/> |
| <property name="jdiff.stable.javadoc" |
| value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/> |
| |
| <property name="scratch.dir" value="${user.home}/tmp"/> |
| <property name="svn.cmd" value="svn"/> |
| <property name="grep.cmd" value="grep"/> |
| <property name="patch.cmd" value="patch"/> |
| <property name="make.cmd" value="make"/> |
| |
| <property name="jsvc.build.dir" value="${build.dir}/jsvc" /> |
| <property name="jsvc.install.dir" value="${dist.dir}/bin" /> |
| <property name="jsvc.location" value="http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-i386.tar.gz" /> |
| <property name="jsvc.dest.name" value="jsvc.tar.gz" /> |
| |
| <!-- task-controller properties set here --> |
| <!-- Source directory from where configure is run and files are copied |
| --> |
| |
| <property name="c++.task-controller.src" |
| value="${basedir}/src/c++/task-controller" /> |
| <!-- directory where autoconf files + temporary files and src is |
| stored for compilation --> |
| <property name="build.c++.task-controller" |
| value="${build.c++}/task-controller" /> |
| <property name="task-controller.prefix.dir" value="${dist.dir}" /> |
| <!-- the configuration directory for the linux task controller --> |
| <property name="hadoop.conf.dir" value="/etc/hadoop"/> |
| |
| <!-- end of task-controller properties --> |
| |
| <!-- IVY properteis set here --> |
| <property name="ivy.dir" location="ivy" /> |
| <loadproperties srcfile="${ivy.dir}/libraries.properties"/> |
| <property name="mvnrepo" value="http://repo2.maven.org/maven2"/> |
| <property name="asfrepo" value="https://repository.apache.org"/> |
| <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/> |
| <property name="ivy_repo_url" |
| value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/> |
| <property name="ant_task.jar" |
| location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/> |
| <property name="tsk.org" value="/org/apache/maven/maven-ant-tasks/"/> |
| <property name="ant_task_repo_url" |
| value="${mvnrepo}${tsk.org}${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/> |
| <property name="repo" value="snapshots"/> |
| <property name="asfsnapshotrepo" |
| value="${asfrepo}/content/repositories/snapshots"/> |
| <property name="asfstagingrepo" |
| value="${asfrepo}/service/local/staging/deploy/maven2"/> |
| <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml"/> |
| <property name="ivy.org" value="org.apache.hadoop"/> |
| <property name="build.dir" location="build" /> |
| <property name="dist.dir" value="${build.dir}/${final.name}"/> |
| <property name="build.ivy.dir" location="${build.dir}/ivy" /> |
| <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib"/> |
| <property name="common.ivy.lib.dir" |
| location="${build.ivy.lib.dir}/${ant.project.name}/common"/> |
| <property name="build.ivy.report.dir" location="${build.ivy.dir}/report"/> |
| |
| <property name="hadoop-core.pom" location="${ivy.dir}/hadoop-core-pom.xml"/> |
| <property name="hadoop-core-pom-template.xml" |
| location="${ivy.dir}/hadoop-core-pom-template.xml"/> |
| <property name="hadoop-core.jar" location="${build.dir}/${core.final.name}.jar"/> |
| <property name="hadoop-test.pom" location="${ivy.dir}/hadoop-test-pom.xml"/> |
| <property name="hadoop-test-pom-template.xml" |
| location="${ivy.dir}/hadoop-test-pom-template.xml" /> |
| <property name="hadoop-test.jar" location="${build.dir}/${test.final.name}.jar"/> |
| <property name="hadoop-tools.pom" location="${ivy.dir}/hadoop-tools-pom.xml"/> |
| <property name="hadoop-tools-pom-template.xml" |
| location="${ivy.dir}/hadoop-tools-pom-template.xml" /> |
| <property name="hadoop-tools.jar" location="${build.dir}/${tools.final.name}.jar"/> |
| <property name="hadoop-examples.pom" location="${ivy.dir}/hadoop-examples-pom.xml"/> |
| <property name="hadoop-examples-pom-template.xml" |
| location="${ivy.dir}/hadoop-examples-pom-template.xml"/> |
| <property name="hadoop-examples.jar" |
| location="${build.dir}/${examples.final.name}.jar"/> |
| <property name="hadoop-streaming.pom" |
| location="${ivy.dir}/hadoop-streaming-pom.xml"/> |
| <property name="hadoop-streaming-pom-template.xml" |
| location="${ivy.dir}/hadoop-streaming-pom-template.xml"/> |
| <property name="hadoop-streaming.jar" |
| location="${build.dir}/contrib/streaming/${streaming.final.name}.jar"/> |
| |
| <!--this is the naming policy for artifacts we want pulled down--> |
| <property name="ivy.artifact.retrieve.pattern" |
| value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/> |
| |
| <!--this is how artifacts that get built are named--> |
| <property name="ivy.publish.pattern" value="hadoop-[revision]-core.[ext]"/> |
| |
| <!-- jdiff.home property set --> |
| <property name="jdiff.home" |
| value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/> |
| <property name="jdiff.jar" value="${jdiff.home}/jdiff-${jdiff.version}.jar"/> |
| <property name="xerces.jar" value="${jdiff.home}/xerces-${xerces.version}.jar"/> |
| |
| <property name="clover.jar" location="${clover.home}/lib/clover.jar"/> |
| <available property="clover.present" file="${clover.jar}" /> |
| |
| <!-- check if clover reports should be generated --> |
| <condition property="clover.enabled"> |
| <and> |
| <isset property="run.clover"/> |
| <isset property="clover.present"/> |
| </and> |
| </condition> |
| |
| <condition property="staging"> |
| <equals arg1="${repo}" arg2="staging"/> |
| </condition> |
| |
| <!-- the normal classpath --> |
| <path id="classpath"> |
| <pathelement location="${build.classes}"/> |
| <fileset dir="${lib.dir}"> |
| <include name="**/*.jar" /> |
| <exclude name="**/excluded/" /> |
| </fileset> |
| <pathelement location="${conf.dir}"/> |
| <path refid="ivy-common.classpath"/> |
| </path> |
| |
| <!-- the unit test classpath: uses test.src.dir for configuration --> |
| <path id="test.classpath"> |
| <pathelement location="${test.build.extraconf}"/> |
| <pathelement location="${test.build.classes}" /> |
| <pathelement location="${test.src.dir}"/> |
| <pathelement location="${build.dir}"/> |
| <pathelement location="${build.examples}"/> |
| <pathelement location="${build.tools}"/> |
| <pathelement path="${clover.jar}"/> |
| <fileset dir="${test.lib.dir}"> |
| <include name="**/*.jar"/> |
| <exclude name="**/excluded/"/> |
| </fileset> |
| <path refid="classpath"/> |
| </path> |
| |
| <!-- the cluster test classpath: uses conf.dir for configuration --> |
| <path id="test.cluster.classpath"> |
| <path refid="classpath"/> |
| <pathelement location="${test.build.classes}" /> |
| <pathelement location="${test.src.dir}"/> |
| <pathelement location="${build.dir}"/> |
| </path> |
| |
| <!-- ====================================================== --> |
| <!-- Macro definitions --> |
| <!-- ====================================================== --> |
| <macrodef name="macro_tar" description="Worker Macro for tar"> |
| <attribute name="param.destfile"/> |
| <element name="param.listofitems"/> |
| <sequential> |
| <tar compression="gzip" longfile="gnu" |
| destfile="@{param.destfile}"> |
| <param.listofitems/> |
| </tar> |
| </sequential> |
| </macrodef> |
| |
| <!-- ====================================================== --> |
| <!-- Stuff needed by all targets --> |
| <!-- ====================================================== --> |
| <target name="init" depends="ivy-retrieve-common"> |
| <mkdir dir="${build.dir}"/> |
| <mkdir dir="${build.classes}"/> |
| <mkdir dir="${build.tools}"/> |
| <mkdir dir="${build.src}"/> |
| <mkdir dir="${build.webapps}/task/WEB-INF"/> |
| <mkdir dir="${build.webapps}/job/WEB-INF"/> |
| <mkdir dir="${build.webapps}/history/WEB-INF"/> |
| <mkdir dir="${build.webapps}/hdfs/WEB-INF"/> |
| <mkdir dir="${build.webapps}/datanode/WEB-INF"/> |
| <mkdir dir="${build.webapps}/secondary/WEB-INF"/> |
| <mkdir dir="${build.examples}"/> |
| <mkdir dir="${build.anttasks}"/> |
| <mkdir dir="${build.dir}/c++"/> |
| |
| <mkdir dir="${test.build.dir}"/> |
| <mkdir dir="${test.build.classes}"/> |
| <mkdir dir="${test.build.testjar}"/> |
| <mkdir dir="${test.build.testshell}"/> |
| <mkdir dir="${test.build.extraconf}"/> |
| <tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/> |
| <touch millis="0" file="${touch.temp.file}"> |
| <fileset dir="${conf.dir}" includes="**/*.template"/> |
| <fileset dir="${contrib.dir}" includes="**/*.template"/> |
| </touch> |
| <delete file="${touch.temp.file}"/> |
| <!-- copy all of the jsp and static files --> |
| <copy todir="${build.webapps}"> |
| <fileset dir="${src.webapps}"> |
| <exclude name="**/*.jsp" /> |
| </fileset> |
| </copy> |
| |
| <copy todir="${conf.dir}" verbose="true"> |
| <fileset dir="${conf.dir}" includes="**/*.template"/> |
| <mapper type="glob" from="*.template" to="*"/> |
| </copy> |
| |
| <copy todir="${contrib.dir}" verbose="true"> |
| <fileset dir="${contrib.dir}" includes="**/*.template"/> |
| <mapper type="glob" from="*.template" to="*"/> |
| </copy> |
| |
| <exec executable="sh"> |
| <arg line="src/saveVersion.sh ${version} ${build.dir}"/> |
| </exec> |
| |
| <exec executable="sh"> |
| <arg line="src/fixFontsPath.sh ${src.docs.cn}"/> |
| </exec> |
| </target> |
| |
| <import file="${test.src.dir}/aop/build/aop.xml"/> |
| |
| <!-- ====================================================== --> |
| <!-- Compile the Java files --> |
| <!-- ====================================================== --> |
| <target name="record-parser" depends="init" if="javacc.home"> |
| <javacc |
| target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj" |
| outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated" |
| javacchome="${javacc.home}" /> |
| </target> |
| |
| <target name="compile-rcc-compiler" depends="init, record-parser"> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${core.src.dir}" |
| includes="org/apache/hadoop/record/compiler/**/*.java" |
| destdir="${build.classes}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args}"/> |
| <classpath refid="classpath"/> |
| </javac> |
| |
| <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask"> |
| <classpath refid="classpath" /> |
| </taskdef> |
| </target> |
| |
| <target name="compile-core-classes" depends="init, compile-rcc-compiler"> |
| <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" > |
| <classpath refid="test.classpath"/> |
| </taskdef> |
| <!-- Compile Java files (excluding JSPs) checking warnings --> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${core.src.dir}" |
| includes="org/apache/hadoop/**/*.java" |
| destdir="${build.classes}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}" /> |
| <classpath refid="classpath"/> |
| </javac> |
| |
| <copy todir="${build.classes}"> |
| <fileset dir="${core.src.dir}" includes="**/*.properties"/> |
| <fileset dir="${core.src.dir}" includes="core-default.xml"/> |
| </copy> |
| |
| </target> |
| |
| <target name="compile-mapred-classes" depends="compile-core-classes,compile-hdfs-classes"> |
| <jsp-compile |
| uriroot="${src.webapps}/task" |
| outputdir="${build.src}" |
| package="org.apache.hadoop.mapred" |
| webxml="${build.webapps}/task/WEB-INF/web.xml"> |
| </jsp-compile> |
| |
| <!-- Compile Java files (excluding JSPs) checking warnings --> |
| <jsp-compile |
| uriroot="${src.webapps}/history" |
| outputdir="${build.src}" |
| package="org.apache.hadoop.mapred" |
| webxml="${build.webapps}/history/WEB-INF/web.xml"> |
| </jsp-compile> |
| |
| <copy todir="${build.webapps}/job"> |
| <fileset dir="${src.webapps}/job" includes="**/*.jsp"/> |
| <fileset dir="${src.webapps}/history" includes="**/*.jsp"/> |
| </copy> |
| |
| <jsp-compile |
| uriroot="${build.webapps}/job" |
| outputdir="${build.src}" |
| package="org.apache.hadoop.mapred" |
| webxml="${build.webapps}/job/WEB-INF/web.xml"> |
| </jsp-compile> |
| |
| <!-- Compile Java files (excluding JSPs) checking warnings --> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${mapred.src.dir};${build.src}" |
| includes="org/apache/hadoop/**/*.java" |
| destdir="${build.classes}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}" /> |
| <classpath refid="classpath"/> |
| </javac> |
| |
| <copy todir="${build.classes}"> |
| <fileset dir="${mapred.src.dir}" includes="**/*.properties"/> |
| <fileset dir="${mapred.src.dir}" includes="mapred-default.xml"/> |
| </copy> |
| </target> |
| |
| <target name="compile-hdfs-classes" depends="compile-core-classes"> |
| <jsp-compile |
| uriroot="${src.webapps}/hdfs" |
| outputdir="${build.src}" |
| package="org.apache.hadoop.hdfs.server.namenode" |
| webxml="${build.webapps}/hdfs/WEB-INF/web.xml"> |
| </jsp-compile> |
| |
| <jsp-compile |
| uriroot="${src.webapps}/datanode" |
| outputdir="${build.src}" |
| package="org.apache.hadoop.hdfs.server.datanode" |
| webxml="${build.webapps}/datanode/WEB-INF/web.xml"> |
| </jsp-compile> |
| |
| <!-- Compile Java files (excluding JSPs) checking warnings --> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${hdfs.src.dir};${build.src}" |
| includes="org/apache/hadoop/**/*.java" |
| destdir="${build.classes}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}" /> |
| <classpath refid="classpath"/> |
| </javac> |
| |
| <copy todir="${build.classes}"> |
| <fileset dir="${hdfs.src.dir}" includes="**/*.properties"/> |
| <fileset dir="${hdfs.src.dir}" includes="hdfs-default.xml"/> |
| </copy> |
| </target> |
| |
| <target name="compile-tools" depends="init"> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${tools.src}" |
| includes="org/apache/hadoop/**/*.java" |
| destdir="${build.tools}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}" /> |
| <classpath refid="classpath"/> |
| </javac> |
| |
| <copy todir="${build.tools}"> |
| <fileset |
| dir="${tools.src}" |
| includes="**/*.properties" |
| /> |
| </copy> |
| </target> |
| |
| <target name="compile-native"> |
| <antcall target="compile-core-native"> |
| <param name="compile.native" value="true"/> |
| </antcall> |
| </target> |
| |
| <target name="compile-core-native" depends="compile-core-classes" |
| if="compile.native"> |
| |
| <mkdir dir="${build.native}/lib"/> |
| <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/> |
| <mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/> |
| <mkdir dir="${build.native}/src/org/apache/hadoop/security"/> |
| |
| <javah |
| classpath="${build.classes}" |
| destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib" |
| force="yes" |
| verbose="yes" |
| > |
| <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" /> |
| <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" /> |
| </javah> |
| |
| <javah |
| classpath="${build.classes}" |
| destdir="${build.native}/src/org/apache/hadoop/io/nativeio" |
| force="yes" |
| verbose="yes" |
| > |
| <class name="org.apache.hadoop.io.nativeio.NativeIO" /> |
| </javah> |
| <javah |
| classpath="${build.classes}" |
| destdir="${build.native}/src/org/apache/hadoop/security" |
| force="yes" |
| verbose="yes" |
| > |
| <class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" /> |
| </javah> |
| |
| <javah |
| classpath="${build.classes}" |
| destdir="${build.native}/src/org/apache/hadoop/security" |
| force="yes" |
| verbose="yes" |
| > |
| <class name="org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping" /> |
| </javah> |
| |
| <exec dir="${build.native}" executable="sh" failonerror="true"> |
| <env key="OS_NAME" value="${os.name}"/> |
| <env key="OS_ARCH" value="${os.arch}"/> |
| <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/> |
| <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/> |
| <arg line="${native.src.dir}/configure"/> |
| </exec> |
| |
| <exec dir="${build.native}" executable="${make.cmd}" failonerror="true"> |
| <env key="OS_NAME" value="${os.name}"/> |
| <env key="OS_ARCH" value="${os.arch}"/> |
| <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/> |
| <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/> |
| </exec> |
| |
| <exec dir="${build.native}" executable="sh" failonerror="true"> |
| <arg line="${build.native}/libtool --mode=install cp ${build.native}/libhadoop.la ${build.native}/lib"/> |
| </exec> |
| |
| </target> |
| |
| <target name="compile-core" |
| depends="clover,compile-core-classes,compile-mapred-classes, |
| compile-hdfs-classes,compile-core-native,compile-c++" |
| description="Compile core only"> |
| </target> |
| |
| <target name="compile-contrib" depends="compile-core,tools-jar,compile-c++-libhdfs"> |
| <subant target="compile"> |
| <property name="version" value="${version}"/> |
| <fileset file="${contrib.dir}/build.xml"/> |
| </subant> |
| </target> |
| |
| <target name="compile" depends="compile-core, compile-contrib, compile-ant-tasks, compile-tools" description="Compile core, contrib"> |
| </target> |
| |
| <target name="compile-examples" |
| depends="compile-core,compile-tools,compile-c++-examples"> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${examples.dir}" |
| includes="org/apache/hadoop/**/*.java" |
| destdir="${build.examples}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}" /> |
| <classpath> |
| <path refid="classpath"/> |
| <pathelement location="${build.tools}"/> |
| </classpath> |
| </javac> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Make hadoop.jar --> |
| <!-- ================================================================== --> |
| <!-- --> |
| <!-- ================================================================== --> |
| <target name="jar" depends="compile-core" description="Make hadoop.jar"> |
| <tar compression="gzip" destfile="${build.classes}/bin.tgz"> |
| <tarfileset dir="bin" mode="755"/> |
| </tar> |
| <property name="jar.properties.list" |
| value="commons-logging.properties, log4j.properties, hadoop-metrics.properties"/> |
| <jar jarfile="${build.dir}/${core.final.name}.jar" |
| basedir="${build.classes}"> |
| <manifest> |
| <section name="org/apache/hadoop"> |
| <attribute name="Implementation-Title" value="Hadoop"/> |
| <attribute name="Implementation-Version" value="${version}"/> |
| <attribute name="Implementation-Vendor" value="Apache"/> |
| </section> |
| </manifest> |
| <fileset dir="${conf.dir}" includes="${jar.properties.list}" /> |
| <fileset file="${jar.extra.properties.list}" /> |
| <zipfileset dir="${build.webapps}" prefix="webapps"/> |
| </jar> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Make the Hadoop examples jar. --> |
| <!-- ================================================================== --> |
| <!-- --> |
| <!-- ================================================================== --> |
| <target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar."> |
| <macro-jar-examples |
| build.dir="${build.dir}" |
| basedir="${build.examples}"> |
| </macro-jar-examples> |
| </target> |
| |
| <macrodef name="macro-jar-examples"> |
| <attribute name="build.dir" /> |
| <attribute name="basedir" /> |
| <sequential> |
| <jar jarfile="@{build.dir}/${examples.final.name}.jar" |
| basedir="@{basedir}"> |
| <manifest> |
| <attribute name="Main-Class" |
| value="org/apache/hadoop/examples/ExampleDriver"/> |
| </manifest> |
| </jar> |
| </sequential> |
| </macrodef> |
| |
| <target name="tools-jar" depends="jar, compile-tools" |
| description="Make the Hadoop tools jar."> |
| <jar jarfile="${build.dir}/${tools.final.name}.jar" |
| basedir="${build.tools}"> |
| <manifest> |
| <attribute name="Main-Class" |
| value="org/apache/hadoop/examples/ExampleDriver"/> |
| </manifest> |
| </jar> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Make the Hadoop metrics plugin dev/sdk jar. (for use outside Hadoop) --> |
| <!-- ================================================================== --> |
| <!-- --> |
| <!-- ================================================================== --> |
| <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics plugin dev/sdk jar. (for use outside Hadoop)"> |
| <jar jarfile="${build.dir}/hadoop-metrics-dev-${version}.jar" |
| basedir="${build.classes}"> |
| <include name="**/metrics2/*.class" /> |
| <include name="**/metrics2/util/*.class" /> |
| </jar> |
| </target> |
| |
| <target name="generate-test-records" depends="compile-rcc-compiler"> |
| <recordcc destdir="${test.generated.dir}"> |
| <fileset dir="${test.src.dir}" |
| includes="**/*.jr" /> |
| </recordcc> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Compile test code --> |
| <!-- ================================================================== --> |
| <target name="compile-core-test" depends="compile-examples, compile-tools, generate-test-records"> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${test.generated.dir}" |
| includes="org/apache/hadoop/**/*.java" |
| destdir="${test.build.classes}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args}" /> |
| <classpath refid="test.classpath"/> |
| </javac> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${test.src.dir}" |
| includes="org/apache/hadoop/**/*.java" |
| destdir="${test.build.classes}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}" /> |
| <classpath refid="test.classpath"/> |
| </javac> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${test.src.dir}/testjar" |
| includes="*.java" |
| destdir="${test.build.testjar}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}" /> |
| <classpath refid="test.classpath"/> |
| </javac> |
| <delete file="${test.build.testjar}/testjob.jar"/> |
| <jar jarfile="${test.build.testjar}/testjob.jar" |
| basedir="${test.build.testjar}"> |
| </jar> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${test.src.dir}/testshell" |
| includes="*.java" |
| destdir="${test.build.testshell}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args} ${javac.args.warnings}"/> |
| <classpath refid="test.classpath"/> |
| </javac> |
| <delete file="${test.build.testshell}/testshell.jar"/> |
| <jar jarfile="${test.build.testshell}/testshell.jar" |
| basedir="${test.build.testshell}"> |
| </jar> |
| |
| <delete dir="${test.cache.data}"/> |
| <mkdir dir="${test.cache.data}"/> |
| <delete dir="${test.debug.data}"/> |
| <mkdir dir="${test.debug.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data15bytes" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/> |
| <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Make hadoop-test.jar --> |
| <!-- ================================================================== --> |
| <!-- --> |
| <!-- ================================================================== --> |
| <target name="jar-test" depends="compile-core-test" description="Make hadoop-test.jar"> |
| <jar jarfile="${build.dir}/${test.final.name}.jar" |
| basedir="${test.build.classes}"> |
| <manifest> |
| <attribute name="Main-Class" |
| value="org/apache/hadoop/test/AllTestDriver"/> |
| <section name="org/apache/hadoop"> |
| <attribute name="Implementation-Title" value="Hadoop"/> |
| <attribute name="Implementation-Version" value="${version}"/> |
| <attribute name="Implementation-Vendor" value="Apache"/> |
| </section> |
| </manifest> |
| </jar> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Fault injection customization section. |
| These targets ought to be copied over to other projects and modified |
| as needed --> |
| <!-- ================================================================== --> |
| <target name="-classes-compilation" depends="compile-core-classes, |
| compile-hdfs-classes, compile-mapred-classes, compile-core-test"/> |
| <target name="run-test-core-fault-inject" depends="injectfaults" |
| description="Run full set of the unit tests with fault injection"> |
| <macro-run-tests-fault-inject target.name="test-core" |
| testcasesonly="false"/> |
| </target> |
| |
| <target name="jar-test-fault-inject" depends="injectfaults" |
| description="Make hadoop-test-fi.jar"> |
| <macro-jar-test-fault-inject |
| target.name="jar-test" |
| jar.final.name="test.final.name" |
| jar.final.value="${test.final.name}-fi" /> |
| </target> |
| |
| <target name="jar-fault-inject" depends="injectfaults" |
| description="Make hadoop-fi.jar"> |
| <macro-jar-fault-inject |
| target.name="jar" |
| build.dir="${build-fi.dir}" |
| jar.final.name="final.name" |
| jar.final.value="${final.name}-fi" /> |
| </target> |
| |
| <!--This target is not included into the the top level list of target |
| for it serves a special "regression" testing purpose of non-FI tests in |
| FI environment --> |
| <target name="run-fault-inject-with-testcaseonly" depends="injectfaults"> |
| <fail unless="testcase">Can't run this target without -Dtestcase setting! |
| </fail> |
| <macro-run-tests-fault-inject target.name="test-core" |
| testcasesonly="true"/> |
| </target> |
| <!-- ================================================================== --> |
| <!-- End of Fault injection customization section --> |
| <!-- ================================================================== --> |
| |
| <condition property="tests.notestcase"> |
| <and> |
| <isfalse value="${test.fault.inject}"/> |
| <not> |
| <isset property="testcase"/> |
| </not> |
| </and> |
| </condition> |
| <condition property="tests.notestcase.fi"> |
| <and> |
| <not> |
| <isset property="testcase" /> |
| </not> |
| <istrue value="${test.fault.inject}" /> |
| </and> |
| </condition> |
| <condition property="tests.testcase"> |
| <and> |
| <isfalse value="${test.fault.inject}" /> |
| <isset property="testcase" /> |
| </and> |
| </condition> |
| <condition property="tests.testcase.fi"> |
| <and> |
| <istrue value="${test.fault.inject}" /> |
| <isset property="testcase" /> |
| </and> |
| </condition> |
| <!-- ================================================================== --> |
| <!-- Define exclude lists for different kinds of testing --> |
| <!-- ================================================================== --> |
| <patternset id="empty.exclude.list.id" /> |
| <patternset id="commit.smoke.exclude.list.id"> |
| <excludesfile name="${test.commit.tests.file}"/> |
| <excludesfile name="${test.smoke.tests.file}"/> |
| </patternset> |
| |
| <!-- ================================================================== --> |
| <!-- Run unit tests --> |
| <!-- ================================================================== --> |
| <macrodef name="macro-test-runner"> |
| <attribute name="test.file" /> |
| <attribute name="classpath" /> |
| <attribute name="test.dir" /> |
| <attribute name="fileset.dir" /> |
| <attribute name="hadoop.conf.dir.deployed" default="" /> |
| <attribute name="test.krb5.conf" default="" /> |
| <attribute name="test.krb5.conf.filename" default="" /> |
| <attribute name="exclude.list.id" default="empty.exclude.list.id" /> |
| <sequential> |
| <delete file="${test.build.dir}/testsfailed"/> |
| <delete dir="@{test.dir}/data" /> |
| <mkdir dir="@{test.dir}/data" /> |
| <delete dir="@{test.dir}/logs" /> |
| <mkdir dir="@{test.dir}/logs" /> |
| <copy file="${test.src.dir}/hadoop-policy.xml" |
| todir="@{test.dir}/extraconf" /> |
| <copy file="${test.src.dir}/fi-site.xml" |
| todir="@{test.dir}/extraconf" /> |
| <junit showoutput="${test.output}" |
| printsummary="${test.junit.printsummary}" |
| haltonfailure="${test.junit.haltonfailure}" |
| fork="yes" |
| forkmode="${test.junit.fork.mode}" |
| maxmemory="${test.junit.maxmemory}" |
| dir="${basedir}" |
| timeout="${test.timeout}" |
| errorProperty="tests.failed" |
| failureProperty="tests.failed"> |
| <sysproperty key="test.build.data" value="${test.build.data}" /> |
| <sysproperty key="test.tools.input.dir" |
| value="${test.tools.input.dir}" /> |
| <sysproperty key="test.cache.data" value="${test.cache.data}" /> |
| <sysproperty key="test.debug.data" value="${test.debug.data}" /> |
| <sysproperty key="hadoop.log.dir" value="${test.log.dir}" /> |
| <sysproperty key="test.src.dir" value="${test.src.dir}" /> |
| <sysproperty key="taskcontroller-path" value="${taskcontroller-path}" /> |
| <sysproperty key="taskcontroller-ugi" value="${taskcontroller-ugi}" /> |
| <sysproperty key="test.build.extraconf" |
| value="@{test.dir}/extraconf" /> |
| <sysproperty key="@{test.krb5.conf}" |
| value="@{test.krb5.conf.filename}"/> |
| <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" /> |
| <sysproperty key="java.library.path" |
| value="${build.native}/lib:${lib.dir}/native/${build.platform}:${lib.file.path}" /> |
| <sysproperty key="install.c++.examples" |
| value="${install.c++.examples}" /> |
| <sysproperty key="testjar" |
| value="@{test.dir}/testjar" /> |
| <!-- System properties that are specifically set for system tests --> |
| <sysproperty key="test.system.hdrc.deployed.hadoopconfdir" |
| value="@{hadoop.conf.dir.deployed}" /> |
| <!-- set io.compression.codec.lzo.class in the child jvm only if it is set --> |
| <syspropertyset dynamic="no"> |
| <propertyref name="io.compression.codec.lzo.class" /> |
| </syspropertyset> |
| <!-- set compile.c++ in the child jvm only if it is set --> |
| <syspropertyset dynamic="no"> |
| <propertyref name="compile.c++" /> |
| </syspropertyset> |
| <classpath refid="@{classpath}" /> |
| <syspropertyset id="FaultProbabilityProperties"> |
| <propertyref regex="fi.*" /> |
| </syspropertyset> |
| <formatter type="${test.junit.output.format}" /> |
| <batchtest todir="@{test.dir}" if="tests.notestcase"> |
| <fileset dir="@{fileset.dir}" |
| excludes="**/${test.exclude}.java aop/** system/**"> |
| <patternset> |
| <includesfile name="@{test.file}"/> |
| </patternset> |
| <patternset refid="@{exclude.list.id}"/> |
| </fileset> |
| </batchtest> |
| <batchtest todir="${test.build.dir}" if="tests.notestcase.fi"> |
| <fileset dir="${test.src.dir}/aop" |
| includes="**/${test.include}.java" |
| excludes="**/${test.exclude}.java" /> |
| </batchtest> |
| <batchtest todir="@{test.dir}" if="tests.testcase"> |
| <fileset dir="@{fileset.dir}" |
| includes="**/${testcase}.java" excludes="aop/** system/**"/> |
| </batchtest> |
| <batchtest todir="${test.build.dir}" if="tests.testcase.fi"> |
| <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java" /> |
| </batchtest> |
| <!--The following batch is for very special occasions only when |
| a non-FI tests are needed to be executed against FI-environment --> |
| <batchtest todir="${test.build.dir}" if="tests.testcaseonly"> |
| <fileset dir="${test.src.dir}" includes="**/${testcase}.java" /> |
| </batchtest> |
| </junit> |
| <antcall target="checkfailure"/> |
| </sequential> |
| </macrodef> |
| |
| <target name="test-core" depends="test-commit, test-smoke, |
| test-core-excluding-commit-and-smoke, |
| test-core-all-withtestcaseonly, jar-test" |
| description="Run core unit tests"> |
| </target> |
| |
| <target name="test-core-all-withtestcaseonly" depends="jar-test" if="testcase"> |
| <macro-test-runner test.file="${test.all.tests.file}" |
| classpath="${test.classpath.id}" |
| test.dir="${test.build.dir}" |
| fileset.dir="${test.src.dir}" |
| test.krb5.conf="java.security.krb5.conf" |
| test.krb5.conf.filename="${test.src.dir}/krb5.conf" |
| > |
| </macro-test-runner> |
| </target> |
| |
| <target name="test-core-excluding-commit-and-smoke" depends="jar-test" |
| unless="testcase"> |
| <macro-test-runner test.file="${test.all.tests.file}" |
| classpath="${test.classpath.id}" |
| test.dir="${test.build.dir}" |
| fileset.dir="${test.src.dir}" |
| test.krb5.conf="java.security.krb5.conf" |
| test.krb5.conf.filename="${test.src.dir}/krb5.conf" |
| exclude.list.id="commit.smoke.exclude.list.id" |
| > |
| </macro-test-runner> |
| </target> |
| |
| <target name="test-commit" depends="jar-test" |
| description="Run approx 10-minute set of unit tests prior to commiting" |
| unless="testcase"> |
| <macro-test-runner test.file="${test.commit.tests.file}" |
| classpath="${test.classpath.id}" |
| test.dir="${test.build.dir}" |
| fileset.dir="${test.src.dir}" |
| test.krb5.conf="java.security.krb5.conf" |
| test.krb5.conf.filename="${test.src.dir}/krb5.conf" |
| > |
| </macro-test-runner> |
| </target> |
| |
| <target name="test-smoke" depends="jar-test" |
| description="Run approx 30-minute set of functional tests prior to |
| guarantee that the build is not DOA" unless="testcase"> |
| <macro-test-runner test.file="${test.smoke.tests.file}" |
| classpath="${test.classpath.id}" |
| test.dir="${test.build.dir}" |
| fileset.dir="${test.src.dir}" |
| test.krb5.conf="java.security.krb5.conf" |
| test.krb5.conf.filename="${test.src.dir}/krb5.conf" |
| > |
| </macro-test-runner> |
| </target> |
| |
| <target name="checkfailure" if="tests.failed"> |
| <touch file="${test.build.dir}/testsfailed"/> |
| <fail unless="continueOnFailure">Tests failed!</fail> |
| </target> |
| |
| <target name="test-contrib" depends="compile, compile-core-test" description="Run contrib unit tests"> |
| <subant target="test"> |
| <property name="version" value="${version}"/> |
| <property name="clover.jar" value="${clover.jar}"/> |
| <fileset file="${contrib.dir}/build.xml"/> |
| </subant> |
| </target> |
| |
| <target name="test" description="Run core, contrib, fault injection tests"> |
| <delete file="${test.build.dir}/testsfailed"/> |
| <property name="continueOnFailure" value="true"/> |
| <antcall target="test-core"/> |
| <antcall target="test-contrib"/> |
| <available file="${test.build.dir}/testsfailed" property="testsfailed"/> |
| <fail if="testsfailed">Tests failed!</fail> |
| </target> |
| |
| <!-- Run all unit tests, not just Test*, and use non-test configuration. --> |
| <target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration."> |
| <antcall target="test"> |
| <param name="test.include" value="*"/> |
| <param name="test.classpath.id" value="test.cluster.classpath"/> |
| </antcall> |
| </target> |
| |
| <target name="nightly" depends="test, tar"> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Run optional third-party tool targets --> |
| <!-- ================================================================== --> |
| <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets"> |
| <taskdef resource="checkstyletask.properties"> |
| <classpath refid="checkstyle-classpath"/> |
| </taskdef> |
| |
| <mkdir dir="${test.build.dir}"/> |
| |
| <checkstyle config="${test.src.dir}/checkstyle.xml" |
| failOnViolation="false"> |
| <fileset dir="${core.src.dir}" includes="**/*.java" excludes="**/generated/**"/> |
| <fileset dir="${mapred.src.dir}" includes="**/*.java" excludes="**/generated/**"/> |
| <fileset dir="${hdfs.src.dir}" includes="**/*.java" excludes="**/generated/**"/> |
| <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/> |
| </checkstyle> |
| |
| <xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl" |
| in="${test.build.dir}/checkstyle-errors.xml" |
| out="${test.build.dir}/checkstyle-errors.html"/> |
| </target> |
| |
| <target name="check-for-checkstyle"> |
| <available property="checkstyle.present" resource="checkstyletask.properties"> |
| <classpath refid="checkstyle-classpath"/> |
| </available> |
| </target> |
| |
| <property name="findbugs.home" value=""/> |
| <target name="findbugs" depends="check-for-findbugs, tar" if="findbugs.present" description="Run findbugs if present"> |
| <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/> |
| <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/> |
| <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/> |
| <property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/> |
| <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask" |
| classpath="${findbugs.home}/lib/findbugs-ant.jar" /> |
| |
| <mkdir dir="${findbugs.out.dir}"/> |
| |
| <findbugs home="${findbugs.home}" output="xml:withMessages" |
| outputFile="${findbugs.report.xmlfile}" effort="max" |
| excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M"> |
| <auxClasspath> |
| <fileset dir="${lib.dir}"> |
| <include name="**/*.jar"/> |
| </fileset> |
| <fileset dir="${build.ivy.lib.dir}/${ant.project.name}/common"> |
| <include name="**/*.jar"/> |
| </fileset> |
| </auxClasspath> |
| <sourcePath path="${core.src.dir}"/> |
| <sourcePath path="${mapred.src.dir}"/> |
| <sourcePath path="${hdfs.src.dir}"/> |
| <sourcePath path="${examples.dir}" /> |
| <sourcePath path="${tools.src}" /> |
| <sourcePath path="${basedir}/src/contrib/streaming/src/java" /> |
| <class location="${build.dir}/${core.final.name}.jar" /> |
| <class location="${build.dir}/${examples.final.name}.jar" /> |
| <class location="${build.dir}/${tools.final.name}.jar" /> |
| <class location="${build.dir}/contrib/streaming/${streaming.final.name}.jar" /> |
| </findbugs> |
| |
| <xslt style="${findbugs.home}/src/xsl/default.xsl" |
| in="${findbugs.report.xmlfile}" |
| out="${findbugs.report.htmlfile}"/> |
| </target> |
| |
| <target name="check-for-findbugs"> |
| <available property="findbugs.present" |
| file="${findbugs.home}/lib/findbugs.jar" /> |
| </target> |
| |
| |
| <!-- ================================================================== --> |
| <!-- Documentation --> |
| <!-- ================================================================== --> |
| |
| <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." if="forrest.home"> |
| <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest" |
| failonerror="true"> |
| <env key="JAVA_HOME" value="${java5.home}"/> |
| </exec> |
| <copy todir="${build.docs}"> |
| <fileset dir="${docs.src}/build/site/" /> |
| </copy> |
| <copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/> |
| <style basedir="${core.src.dir}" destdir="${build.docs}" |
| includes="core-default.xml" style="conf/configuration.xsl"/> |
| <style basedir="${hdfs.src.dir}" destdir="${build.docs}" |
| includes="hdfs-default.xml" style="conf/configuration.xsl"/> |
| <style basedir="${mapred.src.dir}" destdir="${build.docs}" |
| includes="mapred-default.xml" style="conf/configuration.xsl"/> |
| <antcall target="changes-to-html"/> |
| <antcall target="cn-docs"/> |
| </target> |
| |
| <target name="cn-docs" depends="forrest.check, init" |
| description="Generate forrest-based Chinese documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." |
| if="forrest.home"> |
| <exec dir="${src.docs.cn}" executable="${forrest.home}/bin/forrest" failonerror="true"> |
| <env key="LANG" value="en_US.utf8"/> |
| <env key="JAVA_HOME" value="${java5.home}"/> |
| </exec> |
| <copy todir="${build.docs.cn}"> |
| <fileset dir="${src.docs.cn}/build/site/" /> |
| </copy> |
| <style basedir="${core.src.dir}" destdir="${build.docs.cn}" |
| includes="core-default.xml" style="conf/configuration.xsl"/> |
| <style basedir="${hdfs.src.dir}" destdir="${build.docs.cn}" |
| includes="hdfs-default.xml" style="conf/configuration.xsl"/> |
| <style basedir="${mapred.src.dir}" destdir="${build.docs.cn}" |
| includes="mapred-default.xml" style="conf/configuration.xsl"/> |
| <antcall target="changes-to-html"/> |
| </target> |
| |
| <target name="forrest.check" unless="forrest.home" depends="java5.check"> |
| <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=<base of Apache Forrest installation> to Ant on the command-line." /> |
| </target> |
| |
| <target name="java5.check" unless="java5.home"> |
| <fail message="'java5.home' is not defined. Forrest requires Java 5. Please pass -Djava5.home=<base of Java 5 distribution> to Ant on the command-line." /> |
| </target> |
| |
| <target name="javadoc-dev" description="Generate javadoc for hadoop developers"> |
| <mkdir dir="${build.javadoc.dev}"/> |
| <javadoc |
| overview="${core.src.dir}/overview.html" |
| packagenames="org.apache.hadoop.*" |
| destdir="${build.javadoc.dev}" |
| author="true" |
| version="true" |
| use="true" |
| windowtitle="${Name} ${version} API" |
| doctitle="${Name} ${version} Developer API" |
| bottom="Copyright &copy; ${year} The Apache Software Foundation" |
| maxmemory="${javadoc.maxmemory}" |
| > |
| <packageset dir="${core.src.dir}"/> |
| <packageset dir="${mapred.src.dir}"/> |
| <packageset dir="${hdfs.src.dir}"/> |
| <packageset dir="${examples.dir}"/> |
| |
| <packageset dir="src/contrib/streaming/src/java"/> |
| <packageset dir="src/contrib/data_join/src/java"/> |
| <packageset dir="src/contrib/index/src/java"/> |
| |
| <link href="${javadoc.link.java}"/> |
| |
| <classpath > |
| <path refid="classpath" /> |
| <fileset dir="src/contrib/"> |
| <include name="*/lib/*.jar" /> |
| </fileset> |
| <pathelement path="${java.class.path}"/> |
| </classpath> |
| |
| <group title="Core" packages="org.apache.*"/> |
| <group title="Examples" packages="org.apache.hadoop.examples*"/> |
| |
| <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/> |
| <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/> |
| <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/> |
| |
| </javadoc> |
| </target> |
| |
| <target name="javadoc" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc"> |
| <mkdir dir="${build.javadoc}"/> |
| <javadoc |
| overview="${core.src.dir}/overview.html" |
| packagenames="org.apache.hadoop.*" |
| destdir="${build.javadoc}" |
| author="true" |
| version="true" |
| use="true" |
| windowtitle="${Name} ${version} API" |
| doctitle="${Name} ${version} API" |
| bottom="Copyright &copy; ${year} The Apache Software Foundation" |
| maxmemory="${javadoc.maxmemory}" |
| > |
| <packageset dir="${core.src.dir}"/> |
| <packageset dir="${mapred.src.dir}"/> |
| <packageset dir="${examples.dir}"/> |
| |
| <packageset dir="src/contrib/streaming/src/java"/> |
| <packageset dir="src/contrib/data_join/src/java"/> |
| <packageset dir="src/contrib/index/src/java"/> |
| <packageset dir="src/contrib/failmon/src/java/"/> |
| |
| <link href="${javadoc.link.java}"/> |
| |
| <classpath > |
| <path refid="classpath" /> |
| <fileset dir="src/contrib/"> |
| <include name="*/lib/*.jar" /> |
| </fileset> |
| <path refid="javadoc-classpath"/> |
| <pathelement path="${java.class.path}"/> |
| <pathelement location="${build.tools}"/> |
| </classpath> |
| |
| <group title="Core" packages="org.apache.*"/> |
| <group title="Examples" packages="org.apache.hadoop.examples*"/> |
| |
| <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/> |
| <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/> |
| <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/> |
| <group title="contrib: FailMon" packages="org.apache.hadoop.contrib.failmon*"/> |
| </javadoc> |
| </target> |
| |
| <target name="api-xml" depends="ivy-retrieve-jdiff,javadoc,write-null"> |
| <javadoc maxmemory="${javadoc.maxmemory}"> |
| <doclet name="jdiff.JDiff" |
| path="${jdiff.jar}:${xerces.jar}"> |
| <param name="-apidir" value="${jdiff.xml.dir}"/> |
| <param name="-apiname" value="hadoop ${version}"/> |
| </doclet> |
| <packageset dir="src/core"/> |
| <packageset dir="src/mapred"/> |
| <packageset dir="src/tools"/> |
| <classpath > |
| <path refid="classpath" /> |
| <path refid="jdiff-classpath" /> |
| <pathelement path="${java.class.path}"/> |
| </classpath> |
| </javadoc> |
| </target> |
| |
| <target name="write-null"> |
| <exec executable="touch"> |
| <arg value="${jdiff.home}/Null.java"/> |
| </exec> |
| </target> |
| |
| <target name="api-report" depends="ivy-retrieve-jdiff,api-xml"> |
| <mkdir dir="${jdiff.build.dir}"/> |
| <javadoc sourcepath="src/core,src/hdfs,src,mapred,src/tools" |
| destdir="${jdiff.build.dir}" |
| sourceFiles="${jdiff.home}/Null.java" |
| maxmemory="${javadoc.maxmemory}"> |
| <doclet name="jdiff.JDiff" |
| path="${jdiff.jar}:${xerces.jar}"> |
| <param name="-oldapi" value="hadoop ${jdiff.stable}"/> |
| <param name="-newapi" value="hadoop ${version}"/> |
| <param name="-oldapidir" value="${jdiff.xml.dir}"/> |
| <param name="-newapidir" value="${jdiff.xml.dir}"/> |
| <param name="-javadocold" value="${jdiff.stable.javadoc}"/> |
| <param name="-javadocnew" value="../../api/"/> |
| <param name="-stats"/> |
| </doclet> |
| <classpath > |
| <path refid="classpath" /> |
| <path refid="jdiff-classpath"/> |
| <pathelement path="${java.class.path}"/> |
| </classpath> |
| </javadoc> |
| </target> |
| |
| <target name="changes-to-html" description="Convert CHANGES.txt into an html file"> |
| <mkdir dir="${build.docs}"/> |
| <exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true"> |
| <arg value="${changes.src}/changes2html.pl"/> |
| </exec> |
| <copy todir="${build.docs}"> |
| <fileset dir="${changes.src}" includes="*.css"/> |
| </copy> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- D I S T R I B U T I O N --> |
| <!-- ================================================================== --> |
| <!-- --> |
| <!-- ================================================================== --> |
| <target name="package" depends="compile, jar, javadoc, docs, cn-docs, api-report, examples, tools-jar, jar-test, ant-tasks, package-librecordio, jsvc" |
| description="Build distribution"> |
| <mkdir dir="${dist.dir}"/> |
| <mkdir dir="${dist.dir}/lib"/> |
| <mkdir dir="${dist.dir}/contrib"/> |
| <mkdir dir="${dist.dir}/bin"/> |
| <mkdir dir="${dist.dir}/docs"/> |
| <mkdir dir="${dist.dir}/docs/api"/> |
| <mkdir dir="${dist.dir}/docs/jdiff"/> |
| |
| <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true"> |
| <fileset dir="${common.ivy.lib.dir}"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/lib" includeEmptyDirs="false"> |
| <fileset dir="lib"> |
| <exclude name="**/native/**"/> |
| </fileset> |
| </copy> |
| |
| <exec dir="${dist.dir}" executable="sh" failonerror="true"> |
| <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/> |
| <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/> |
| <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/> |
| <arg line="${native.src.dir}/packageNativeHadoop.sh"/> |
| </exec> |
| |
| <subant target="package"> |
| <!--Pass down the version in case its needed again and the target |
| distribution directory so contribs know where to install to.--> |
| <property name="version" value="${version}"/> |
| <property name="dist.dir" value="${dist.dir}"/> |
| <fileset file="${contrib.dir}/build.xml"/> |
| </subant> |
| |
| <copy todir="${dist.dir}/webapps"> |
| <fileset dir="${build.webapps}"/> |
| </copy> |
| |
| <copy todir="${dist.dir}"> |
| <fileset file="${build.dir}/${name}-*-${version}.jar"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/bin"> |
| <fileset dir="bin"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/conf"> |
| <fileset dir="${conf.dir}" excludes="**/*.template"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/docs"> |
| <fileset dir="${build.docs}"/> |
| </copy> |
| |
| <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/> |
| |
| <copy todir="${dist.dir}/ivy"> |
| <fileset dir="ivy"/> |
| </copy> |
| |
| <copy todir="${dist.dir}"> |
| <fileset dir="."> |
| <include name="*.txt" /> |
| </fileset> |
| </copy> |
| |
| <copy todir="${dist.dir}/src" includeEmptyDirs="true"> |
| <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/c++" includeEmptyDirs="false"> |
| <fileset dir="${build.dir}/c++"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/" file="build.xml"/> |
| |
| <chmod perm="ugo+x" type="file" parallel="false"> |
| <fileset dir="${dist.dir}/bin"/> |
| <fileset dir="${dist.dir}/src/contrib/"> |
| <include name="*/bin/*" /> |
| </fileset> |
| <fileset dir="${dist.dir}/src/contrib/ec2/bin/image"/> |
| </chmod> |
| <chmod perm="ugo+x" type="file"> |
| <fileset dir="${dist.dir}/src/c++/pipes/debug"/> |
| </chmod> |
| |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Make release tarball --> |
| <!-- ================================================================== --> |
| <target name="tar" depends="package" description="Make release tarball"> |
| <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz"> |
| <param.listofitems> |
| <tarfileset dir="${build.dir}" mode="664"> |
| <exclude name="${final.name}/bin/*" /> |
| <exclude name="${final.name}/contrib/*/bin/*" /> |
| <exclude name="${final.name}/src/contrib/ec2/bin/*" /> |
| <exclude name="${final.name}/src/contrib/ec2/bin/image/*" /> |
| <include name="${final.name}/**" /> |
| </tarfileset> |
| <tarfileset dir="${build.dir}" mode="755"> |
| <include name="${final.name}/bin/*" /> |
| <include name="${final.name}/contrib/*/bin/*" /> |
| <include name="${final.name}/src/contrib/ec2/bin/*" /> |
| <include name="${final.name}/src/contrib/ec2/bin/image/*" /> |
| </tarfileset> |
| </param.listofitems> |
| </macro_tar> |
| </target> |
| |
| <target name="bin-package" depends="compile, jar, examples, tools-jar, jar-test, ant-tasks, package-librecordio, jsvc" |
| description="assembles artifacts for binary target"> |
| <mkdir dir="${dist.dir}"/> |
| <mkdir dir="${dist.dir}/lib"/> |
| <mkdir dir="${dist.dir}/contrib"/> |
| <mkdir dir="${dist.dir}/bin"/> |
| |
| <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true"> |
| <fileset dir="${common.ivy.lib.dir}"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/lib" includeEmptyDirs="false"> |
| <fileset dir="lib"> |
| <exclude name="**/native/**"/> |
| </fileset> |
| </copy> |
| |
| <exec dir="${dist.dir}" executable="sh" failonerror="true"> |
| <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/> |
| <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/> |
| <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/> |
| <arg line="${native.src.dir}/packageNativeHadoop.sh"/> |
| </exec> |
| |
| <subant target="package"> |
| <!--Pass down the version in case its needed again and the target |
| distribution directory so contribs know where to install to.--> |
| <property name="version" value="${version}"/> |
| <property name="dist.dir" value="${dist.dir}"/> |
| <fileset file="${contrib.dir}/build.xml"/> |
| </subant> |
| |
| <copy todir="${dist.dir}/webapps"> |
| <fileset dir="${build.webapps}"/> |
| </copy> |
| |
| <copy todir="${dist.dir}"> |
| <fileset file="${build.dir}/${name}-*-${version}.jar"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/bin"> |
| <fileset dir="bin"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/conf"> |
| <fileset dir="${conf.dir}" excludes="**/*.template"/> |
| </copy> |
| |
| <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/> |
| |
| <copy todir="${dist.dir}/ivy"> |
| <fileset dir="ivy"/> |
| </copy> |
| |
| <copy todir="${dist.dir}"> |
| <fileset dir="."> |
| <include name="*.txt" /> |
| </fileset> |
| </copy> |
| |
| <copy todir="${dist.dir}/c++" includeEmptyDirs="false"> |
| <fileset dir="${build.dir}/c++"/> |
| </copy> |
| |
| <copy todir="${dist.dir}/" file="build.xml"/> |
| |
| <chmod perm="ugo+x" type="file" parallel="false"> |
| <fileset dir="${dist.dir}/bin"/> |
| </chmod> |
| </target> |
| |
| <target name="binary-system" depends="bin-package, jar-system, jar-test-system" |
| description="make system test package for deployment"> |
| <copy todir="${system-test-build-dir}/${final.name}"> |
| <fileset dir="${dist.dir}"> |
| </fileset> |
| </copy> |
| <copy todir="${system-test-build-dir}/${final.name}" |
| file="${system-test-build-dir}/${core.final.name}.jar" overwrite="true"/> |
| <copy todir="${system-test-build-dir}/${final.name}" |
| file="${system-test-build-dir}/${test.final.name}.jar" overwrite="true"/> |
| <macro_tar |
| param.destfile="${system-test-build-dir}/${final.name}-bin.tar.gz"> |
| <param.listofitems> |
| <tarfileset dir="${system-test-build-dir}" mode="664"> |
| <exclude name="${final.name}/bin/*" /> |
| <exclude name="${final.name}/src/**" /> |
| <exclude name="${final.name}/docs/**" /> |
| <include name="${final.name}/**" /> |
| </tarfileset> |
| <tarfileset dir="${build.dir}" mode="755"> |
| <include name="${final.name}/bin/*" /> |
| </tarfileset> |
| </param.listofitems> |
| </macro_tar> |
| </target> |
| |
| <target name="binary" depends="bin-package" description="Make tarball without source and documentation"> |
| <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz"> |
| <param.listofitems> |
| <tarfileset dir="${build.dir}" mode="664"> |
| <exclude name="${final.name}/bin/*" /> |
| <exclude name="${final.name}/src/**" /> |
| <exclude name="${final.name}/docs/**" /> |
| <include name="${final.name}/**" /> |
| </tarfileset> |
| <tarfileset dir="${build.dir}" mode="755"> |
| <include name="${final.name}/bin/*" /> |
| </tarfileset> |
| </param.listofitems> |
| </macro_tar> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Perform audit activities for the release --> |
| <!-- ================================================================== --> |
| <target name="releaseaudit" depends="package,ivy-retrieve-releaseaudit" description="Release Audit activities"> |
| <fail unless="rat.present" message="Failed to load class [${rat.reporting.classname}]."/> |
| <java classname="${rat.reporting.classname}" fork="true"> |
| <classpath refid="releaseaudit-classpath"/> |
| <arg value="${build.dir}/${final.name}"/> |
| </java> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- Clean. Delete the build files, and their directories --> |
| <!-- ================================================================== --> |
| <target name="clean" depends="clean-contrib, clean-sign, clean-fi" description="Clean. Delete the build files, and their directories"> |
| <delete dir="${build.dir}"/> |
| <delete dir="${docs.src}/build"/> |
| <delete dir="${src.docs.cn}/build"/> |
| <delete file="${basedir}/ivy/hadoop-core-pom.xml"/> |
| <delete file="${basedir}/ivy/hadoop-test-pom.xml"/> |
| <delete file="${basedir}/ivy/hadoop-examples-pom.xml"/> |
| <delete file="${basedir}/ivy/hadoop-tools-pom.xml"/> |
| <delete file="${basedir}/ivy/hadoop-streaming-pom.xml"/> |
| </target> |
| |
| <target name="clean-sign" description="Clean. Delete .asc files"> |
| <delete> |
| <fileset dir="." includes="**/**/*.asc"/> |
| </delete> |
| </target> |
| |
| <target name="veryclean" depends="clean" description="Delete mvn ant task jar and ivy ant taks jar"> |
| <delete file="${ant_task.jar}"/> |
| <delete file="${ivy.jar}"/> |
| </target> |
| |
| |
| <!-- ================================================================== --> |
| <!-- Clean contrib target. For now, must be called explicitly --> |
| <!-- Using subant instead of ant as a workaround for 30569 --> |
| <!-- ================================================================== --> |
| <target name="clean-contrib"> |
| <subant target="clean"> |
| <fileset file="src/contrib/build.xml"/> |
| </subant> |
| </target> |
| |
| <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs"> |
| <delete dir="${test.libhdfs.dir}"/> |
| <mkdir dir="${test.libhdfs.dir}"/> |
| <mkdir dir="${test.libhdfs.dir}/logs"/> |
| <mkdir dir="${test.libhdfs.dir}/hdfs/name"/> |
| |
| <exec dir="${build.c++.libhdfs}" executable="${make.cmd}" failonerror="true"> |
| <env key="OS_NAME" value="${os.name}"/> |
| <env key="OS_ARCH" value="${os.arch}"/> |
| <env key="JVM_ARCH" value="${jvm.arch}"/> |
| <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/> |
| <env key="HADOOP_HOME" value="${basedir}"/> |
| <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/> |
| <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/> |
| <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/> |
| <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/> |
| <env key="LIB_DIR" value="${common.ivy.lib.dir}"/> |
| <arg value="test"/> |
| </exec> |
| </target> |
| |
| <!-- ================================================================== --> |
| <!-- librecordio targets. --> |
| <!-- ================================================================== --> |
| |
| <target name="compile-librecordio" depends="init" if="librecordio" > |
| <mkdir dir="${build.librecordio}"/> |
| <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true"> |
| <env key="XERCESCROOT" value="${xercescroot}"/> |
| <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/> |
| </exec> |
| </target> |
| |
| <target name="test-librecordio" depends="compile-librecordio, compile-core" if="librecordio"> |
| <delete dir="${librecordio.test.dir}"/> |
| <mkdir dir="${librecordio.test.dir}"/> |
| <exec dir="${librecordio.src}/test" executable="${make.cmd}" failonerror="true"> |
| <env key="HADOOP_HOME" value="${basedir}"/> |
| <env key="XERCESCROOT" value="${xercescroot}"/> |
| <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/> |
| <env key="LIBRECORDIO_TEST_DIR" value="${librecordio.test.dir}"/> |
| <arg value="all"/> |
| </exec> |
| </target> |
| |
| <target name="package-librecordio" depends="compile-librecordio" if="librecordio"> |
| <mkdir dir="${dist.dir}/librecordio"/> |
| <copy todir="${dist.dir}/librecordio"> |
| <fileset dir="${build.librecordio}" casesensitive="yes" followsymlinks="false"> |
| <exclude name="**/tests/**"/> |
| <exclude name="*.so"/> |
| <exclude name="*.o"/> |
| </fileset> |
| </copy> |
| <chmod perm="ugo+x" type="file"> |
| <fileset dir="${dist.dir}/librecordio"/> |
| </chmod> |
| </target> |
| |
| <target name="create-c++-configure" depends="init" if="compile.c++"> |
| <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes" |
| failonerror="yes"> |
| <arg value="-if"/> |
| </exec> |
| <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes" |
| failonerror="yes"> |
| <arg value="-if"/> |
| </exec> |
| <exec executable="autoreconf" dir="${c++.examples.pipes.src}" |
| searchpath="yes" failonerror="yes"> |
| <arg value="-if"/> |
| </exec> |
| <antcall target="create-c++-configure-libhdfs"/> |
| </target> |
| |
| <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs" if="islibhdfs"> |
| <exec executable="autoreconf" dir="${c++.libhdfs.src}" |
| searchpath="yes" failonerror="yes"> |
| <arg value="-if"/> |
| </exec> |
| </target> |
| |
| <target name="check-c++-makefiles" depends="init" if="compile.c++"> |
| <condition property="need.c++.utils.makefile"> |
| <not> <available file="${build.c++.utils}/Makefile"/> </not> |
| </condition> |
| <condition property="need.c++.pipes.makefile"> |
| <not> <available file="${build.c++.pipes}/Makefile"/> </not> |
| </condition> |
| <condition property="need.c++.examples.pipes.makefile"> |
| <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not> |
| </condition> |
| </target> |
| |
| <target name="check-c++-libhdfs"> |
| <condition property="islibhdfs"> |
| <and> |
| <isset property="compile.c++"/> |
| <isset property="libhdfs"/> |
| </and> |
| </condition> |
| </target> |
| |
| <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs"> |
| <condition property="need.c++.libhdfs.makefile"> |
| <not> <available file="${build.c++.libhdfs}/Makefile"/> </not> |
| </condition> |
| </target> |
| |
| <target name="create-c++-libhdfs-makefile" depends="check-c++-makefile-libhdfs" |
| if="need.c++.libhdfs.makefile"> |
| <mkdir dir="${build.c++.libhdfs}"/> |
| <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/> |
| <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}" |
| failonerror="yes"> |
| <env key="ac_cv_func_malloc_0_nonnull" value="yes"/> |
| <env key="JVM_ARCH" value="${jvm.arch}"/> |
| <arg value="--prefix=${install.c++}"/> |
| </exec> |
| </target> |
| |
| <target name="create-c++-utils-makefile" depends="check-c++-makefiles" |
| if="need.c++.utils.makefile"> |
| <mkdir dir="${build.c++.utils}"/> |
| <chmod file="${c++.utils.src}/configure" perm="ugo+x"/> |
| <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}" |
| failonerror="yes"> |
| <arg value="--prefix=${install.c++}"/> |
| </exec> |
| </target> |
| |
| <target name="compile-c++-utils" depends="create-c++-utils-makefile" |
| if="compile.c++"> |
| <exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes" |
| failonerror="yes"> |
| <arg value="install"/> |
| </exec> |
| </target> |
| |
| <target name="create-c++-pipes-makefile" depends="check-c++-makefiles" |
| if="need.c++.pipes.makefile"> |
| <mkdir dir="${build.c++.pipes}"/> |
| <chmod file="${c++.pipes.src}/configure" perm="ugo+x"/> |
| <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}" |
| failonerror="yes"> |
| <arg value="--prefix=${install.c++}"/> |
| </exec> |
| </target> |
| |
| <target name="compile-c++-pipes" |
| depends="create-c++-pipes-makefile,compile-c++-utils" |
| if="compile.c++"> |
| <exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes" |
| failonerror="yes"> |
| <arg value="install"/> |
| </exec> |
| </target> |
| |
| <target name="compile-c++" |
| depends="compile-c++-pipes"/> |
| |
| <target name="create-c++-examples-pipes-makefile" |
| depends="check-c++-makefiles" |
| if="need.c++.examples.pipes.makefile"> |
| <mkdir dir="${build.c++.examples.pipes}"/> |
| <chmod file="${c++.examples.pipes.src}/configure" perm="ugo+x"/> |
| <exec executable="${c++.examples.pipes.src}/configure" |
| dir="${build.c++.examples.pipes}" |
| failonerror="yes"> |
| <arg value="--prefix=${install.c++.examples}"/> |
| <arg value="--with-hadoop-utils=${install.c++}"/> |
| <arg value="--with-hadoop-pipes=${install.c++}"/> |
| </exec> |
| </target> |
| |
| <target name="compile-c++-examples-pipes" |
| depends="create-c++-examples-pipes-makefile,compile-c++-pipes" |
| if="compile.c++"> |
| <exec executable="${make.cmd}" dir="${build.c++.examples.pipes}" searchpath="yes" |
| failonerror="yes"> |
| <arg value="install"/> |
| </exec> |
| </target> |
| |
| <target name="compile-c++-examples" |
| depends="compile-c++-examples-pipes"/> |
| |
| <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile" if="islibhdfs"> |
| <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes" |
| failonerror="yes"> |
| <env key="ac_cv_func_malloc_0_nonnull" value="yes"/> |
| <env key="JVM_ARCH" value="${jvm.arch}"/> |
| <arg value="install"/> |
| </exec> |
| </target> |
| |
| |
| |
| <target name="compile-ant-tasks" depends="compile-core"> |
| <javac |
| encoding="${build.encoding}" |
| srcdir="${anttasks.dir}" |
| includes="org/apache/hadoop/ant/**/*.java" |
| destdir="${build.anttasks}" |
| debug="${javac.debug}" |
| optimize="${javac.optimize}" |
| target="${javac.version}" |
| source="${javac.version}" |
| deprecation="${javac.deprecation}"> |
| <compilerarg line="${javac.args}"/> |
| <classpath refid="classpath"/> |
| </javac> |
| </target> |
| |
| <target name="ant-tasks" depends="jar, compile-ant-tasks"> |
| <copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml" |
| todir="${build.anttasks}/org/apache/hadoop/ant"/> |
| <jar destfile="${build.dir}/${ant.final.name}.jar"> |
| <fileset dir="${build.anttasks}"/> |
| </jar> |
| </target> |
| |
| |
| |
| <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. To use, specify -Dclover.home=<base of clover installation> -Drun.clover=true on the command line."/> |
| |
| <target name="clover.setup" if="clover.enabled"> |
| <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/> |
| <mkdir dir="${clover.db.dir}"/> |
| <clover-setup initString="${clover.db.dir}/hadoop_coverage.db"> |
| <fileset dir="${src.dir}" includes="core/**/* tools/**/* hdfs/**/* mapred/**/*"/> |
| <testsources dir="${test.src.dir}" /> |
| </clover-setup> |
| </target> |
| |
| <target name="clover.info" unless="clover.present"> |
| <echo> |
| Clover not found. Code coverage reports disabled. |
| </echo> |
| </target> |
| |
| <target name="clover.check"> |
| <fail unless="clover.present"> |
| ################################################################## |
| Clover not found. |
| Please specify -Dclover.home=<base of clover installation> |
| on the command line. |
| ################################################################## |
| </fail> |
| </target> |
| |
| <target name="generate-clover-reports" depends="clover.check, clover"> |
| <mkdir dir="${clover.report.dir}"/> |
| <clover-report> |
| <current outfile="${clover.report.dir}" title="${final.name}"> |
| <format type="html"/> |
| </current> |
| </clover-report> |
| <clover-report> |
| <current outfile="${clover.report.dir}/clover.xml" title="${final.name}"> |
| <format type="xml"/> |
| </current> |
| </clover-report> |
| </target> |
| |
| <target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present"> |
| <fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=<base of Findbugs installation> to Ant on the command-line." /> |
| </target> |
| |
| <target name="patch.check" unless="patch.file"> |
| <fail message="'patch.file' is not defined. Please pass -Dpatch.file=<location of patch file> to Ant on the command-line." /> |
| </target> |
| |
| <target name="test-patch" depends="patch.check,findbugs.check,forrest.check"> |
| <exec executable="bash" failonerror="true"> |
| <arg value="${basedir}/src/test/bin/test-patch.sh"/> |
| <arg value="DEVELOPER"/> |
| <arg value="${patch.file}"/> |
| <arg value="${scratch.dir}"/> |
| <arg value="${svn.cmd}"/> |
| <arg value="${grep.cmd}"/> |
| <arg value="${patch.cmd}"/> |
| <arg value="${findbugs.home}"/> |
| <arg value="${forrest.home}"/> |
| <arg value="${basedir}"/> |
| <arg value="${java5.home}"/> |
| </exec> |
| </target> |
| |
| <target name="hudson-test-patch" depends="findbugs.check,forrest.check"> |
| <exec executable="bash" failonerror="true"> |
| <arg value="${basedir}/src/test/bin/test-patch.sh"/> |
| <arg value="HUDSON"/> |
| <arg value="${scratch.dir}"/> |
| <arg value="${support.dir}"/> |
| <arg value="${ps.cmd}"/> |
| <arg value="${wget.cmd}"/> |
| <arg value="${jiracli.cmd}"/> |
| <arg value="${svn.cmd}"/> |
| <arg value="${grep.cmd}"/> |
| <arg value="${patch.cmd}"/> |
| <arg value="${findbugs.home}"/> |
| <arg value="${forrest.home}"/> |
| <arg value="${eclipse.home}"/> |
| <arg value="${python.home}"/> |
| <arg value="${basedir}"/> |
| <arg value="${trigger.url}"/> |
| <arg value="${jira.passwd}"/> |
| <arg value="${java5.home}"/> |
| </exec> |
| </target> |
| |
| <target name="eclipse-files" depends="init" |
| description="Generate files for Eclipse"> |
| <pathconvert property="eclipse.project"> |
| <path path="${basedir}"/> |
| <regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/> |
| </pathconvert> |
| <copy todir="." overwrite="true"> |
| <fileset dir=".eclipse.templates"> |
| <exclude name="**/README.txt"/> |
| </fileset> |
| <filterset> |
| <filter token="PROJECT" value="${eclipse.project}"/> |
| </filterset> |
| </copy> |
| </target> |
| |
| <target name="ivy-init-dirs"> |
| <mkdir dir="${build.ivy.dir}" /> |
| <mkdir dir="${build.ivy.lib.dir}" /> |
| <mkdir dir="${build.ivy.report.dir}" /> |
| </target> |
| |
| <target name="ivy-probe-antlib" > |
| <condition property="ivy.found"> |
| <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/> |
| </condition> |
| </target> |
| |
| <target name="ivy-download" description="To download ivy" unless="offline"> |
| <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/> |
| </target> |
| |
| <!-- |
| To avoid Ivy leaking things across big projects, always load Ivy in the same classloader. |
| Also note how we skip loading Ivy if it is already there, just to make sure all is well. |
| --> |
| <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found"> |
| <typedef uri="antlib:org.apache.ivy.ant" onerror="fail" |
| loaderRef="ivyLoader"> |
| <classpath> |
| <pathelement location="${ivy.jar}"/> |
| </classpath> |
| </typedef> |
| <fail > |
| <condition > |
| <not> |
| <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/> |
| </not> |
| </condition> |
| You need Apache Ivy 2.0 or later from http://ant.apache.org/ |
| It could not be loaded from ${ivy_repo_url} |
| </fail> |
| </target> |
| |
| |
| <target name="ivy-init" depends="ivy-init-antlib" > |
| |
| <!--Configure Ivy by reading in the settings file |
| If anyone has already read in a settings file into this settings ID, it gets priority |
| --> |
| <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/> |
| </target> |
| |
| <target name="ivy-resolve" depends="ivy-init"> |
| <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"/> |
| </target> |
| |
| <target name="ivy-resolve-javadoc" depends="ivy-init"> |
| <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"/> |
| </target> |
| |
| <target name="ivy-resolve-releaseaudit" depends="ivy-init"> |
| <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/> |
| </target> |
| |
| <target name="ivy-resolve-test" depends="ivy-init"> |
| <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test" /> |
| </target> |
| |
| <target name="ivy-resolve-common" depends="ivy-init"> |
| <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" /> |
| </target> |
| |
| <target name="ivy-resolve-jdiff" depends="ivy-init"> |
| <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" /> |
| </target> |
| |
| <target name="ivy-resolve-checkstyle" depends="ivy-init"> |
| <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"/> |
| </target> |
| |
| <target name="ivy-retrieve" depends="ivy-resolve" |
| description="Retrieve Ivy-managed artifacts"> |
| <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" |
| pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/> |
| </target> |
| |
| <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle" |
| description="Retrieve Ivy-managed artifacts for the checkstyle configurations"> |
| <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" |
| pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/> |
| <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/> |
| </target> |
| |
| <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff" |
| description="Retrieve Ivy-managed artifacts for the javadoc configurations"> |
| <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" |
| pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/> |
| <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/> |
| </target> |
| |
| <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc" |
| description="Retrieve Ivy-managed artifacts for the javadoc configurations"> |
| <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" |
| pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/> |
| <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/> |
| </target> |
| |
| <target name="ivy-retrieve-test" depends="ivy-resolve-test" |
| description="Retrieve Ivy-managed artifacts for the test configurations"> |
| <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" |
| pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/> |
| <ivy:cachepath pathid="test.classpath" conf="test"/> |
| </target> |
| |
| <target name="ivy-retrieve-common" depends="ivy-resolve-common" |
| description="Retrieve Ivy-managed artifacts for the compile configurations"> |
| <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" |
| pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/> |
| <ivy:cachepath pathid="ivy-common.classpath" conf="common"/> |
| </target> |
| |
| <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit" |
| description="Retrieve Ivy-managed artifacts for the compile configurations"> |
| <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" |
| pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" /> |
| <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/> |
| <available classname="${rat.reporting.classname}" |
| classpathref="releaseaudit-classpath" property="rat.present" value="true"/> |
| </target> |
| |
| <target name="ivy-report" depends="ivy-resolve-releaseaudit" |
| description="Generate"> |
| <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/> |
| <echo> |
| Reports generated:${build.ivy.report.dir} |
| </echo> |
| </target> |
| |
| <target name="ant-task-download" description="To download mvn-ant-task"> |
| <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/> |
| </target> |
| |
| <target name="mvn-taskdef" depends="ant-task-download"> |
| <path id="mvn-ant-task.classpath" path="${ant_task.jar}"/> |
| <typedef resource="org/apache/maven/artifact/ant/antlib.xml" |
| uri="urn:maven-artifact-ant" |
| classpathref="mvn-ant-task.classpath"/> |
| </target> |
| |
| <target name="mvn-install" depends="mvn-taskdef,bin-package,set-version" |
| description="To install hadoop core and test jars to local filesystem's m2 cache"> |
| <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/> |
| <artifact:pom file="${hadoop-test.pom}" id="hadoop.test"/> |
| <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/> |
| <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/> |
| <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/> |
| |
| <artifact:install file="${hadoop-core.jar}"> |
| <pom refid="hadoop.core"/> |
| </artifact:install> |
| <artifact:install file="${hadoop-test.jar}"> |
| <pom refid="hadoop.test"/> |
| </artifact:install> |
| <artifact:install file="${hadoop-tools.jar}"> |
| <pom refid="hadoop.tools"/> |
| </artifact:install> |
| <artifact:install file="${hadoop-examples.jar}"> |
| <pom refid="hadoop.examples"/> |
| </artifact:install> |
| <artifact:install file="${hadoop-streaming.jar}"> |
| <pom refid="hadoop.streaming"/> |
| </artifact:install> |
| </target> |
| |
| <target name="mvn-deploy" depends="mvn-taskdef, bin-package, set-version, signanddeploy, simpledeploy" |
| description="To deploy hadoop core and test jar's to apache maven repository"/> |
| |
| <target name="signanddeploy" if="staging" depends="sign"> |
| <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/> |
| <artifact:pom file="${hadoop-test.pom}" id="hadoop.core.test"/> |
| <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/> |
| <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/> |
| <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/> |
| <artifact:install-provider artifactId="wagon-http" |
| version="${wagon-http.version}"/> |
| <artifact:deploy file="${hadoop-core.jar}"> |
| <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/> |
| <pom refid="hadoop.core"/> |
| <attach file="${hadoop-core.jar}.asc" type="jar.asc"/> |
| <attach file="${hadoop-core.pom}.asc" type="pom.asc"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-test.jar}"> |
| <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/> |
| <pom refid="hadoop.core.test"/> |
| <attach file="${hadoop-test.jar}.asc" type="jar.asc"/> |
| <attach file="${hadoop-test.pom}.asc" type="pom.asc"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-tools.jar}"> |
| <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/> |
| <pom refid="hadoop.tools"/> |
| <attach file="${hadoop-tools.jar}.asc" type="jar.asc"/> |
| <attach file="${hadoop-tools.pom}.asc" type="pom.asc"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-examples.jar}"> |
| <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/> |
| <pom refid="hadoop.examples"/> |
| <attach file="${hadoop-examples.jar}.asc" type="jar.asc"/> |
| <attach file="${hadoop-examples.pom}.asc" type="pom.asc"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-streaming.jar}"> |
| <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/> |
| <pom refid="hadoop.streaming"/> |
| <attach file="${hadoop-streaming.jar}.asc" type="jar.asc"/> |
| <attach file="${hadoop-streaming.pom}.asc" type="pom.asc"/> |
| </artifact:deploy> |
| </target> |
| |
| <target name="sign" depends="clean-sign" if="staging"> |
| <input message="password:>" addproperty="gpg.passphrase"> |
| <handler classname="org.apache.tools.ant.input.SecureInputHandler" /> |
| </input> |
| <macrodef name="sign-artifact" description="Signs the artifact"> |
| <attribute name="input.file"/> |
| <attribute name="output.file" default="@{input.file}.asc"/> |
| <attribute name="gpg.passphrase"/> |
| <sequential> |
| <echo>Signing @{input.file} Sig File: @{output.file}</echo> |
| <exec executable="gpg" > |
| <arg value="--armor"/> |
| <arg value="--output"/> |
| <arg value="@{output.file}"/> |
| <arg value="--passphrase"/> |
| <arg value="@{gpg.passphrase}"/> |
| <arg value="--detach-sig"/> |
| <arg value="@{input.file}"/> |
| </exec> |
| </sequential> |
| </macrodef> |
| <sign-artifact input.file="${hadoop-core.jar}" |
| output.file="${hadoop-core.jar}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-test.jar}" |
| output.file="${hadoop-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-tools.jar}" |
| output.file="${hadoop-tools.jar}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-examples.jar}" |
| output.file="${hadoop-examples.jar}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-streaming.jar}" |
| output.file="${hadoop-streaming.jar}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-core.pom}" |
| output.file="${hadoop-core.pom}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-test.pom}" |
| output.file="${hadoop-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-tools.pom}" |
| output.file="${hadoop-tools.pom}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-examples.pom}" |
| output.file="${hadoop-examples.pom}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| <sign-artifact input.file="${hadoop-streaming.pom}" |
| output.file="${hadoop-streaming.pom}.asc" gpg.passphrase="${gpg.passphrase}"/> |
| </target> |
| |
| <target name="simpledeploy" unless="staging"> |
| <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/> |
| <artifact:pom file="${hadoop-test.pom}" id="hadoop.test"/> |
| <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/> |
| <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/> |
| <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/> |
| |
| <artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/> |
| <artifact:deploy file="${hadoop-core.jar}"> |
| <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/> |
| <pom refid="hadoop.core"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-test.jar}"> |
| <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/> |
| <pom refid="hadoop.test"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-examples.jar}"> |
| <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/> |
| <pom refid="hadoop.examples"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-tools.jar}"> |
| <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/> |
| <pom refid="hadoop.tools"/> |
| </artifact:deploy> |
| <artifact:deploy file="${hadoop-streaming.jar}"> |
| <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/> |
| <pom refid="hadoop.streaming"/> |
| </artifact:deploy> |
| </target> |
| |
| <target name="set-version"> |
| <delete file="${hadoop-core.pom}"/> |
| <delete file="${hadoop-test.pom}"/> |
| <delete file="${hadoop-examples.pom}"/> |
| <delete file="${hadoop-tools.pom}"/> |
| <delete file="${hadoop-streaming.pom}"/> |
| <copy file="${hadoop-core-pom-template.xml}" tofile="${hadoop-core.pom}"/> |
| <copy file="${hadoop-test-pom-template.xml}" tofile="${hadoop-test.pom}"/> |
| <copy file="${hadoop-examples-pom-template.xml}" tofile="${hadoop-examples.pom}"/> |
| <copy file="${hadoop-tools-pom-template.xml}" tofile="${hadoop-tools.pom}"/> |
| <copy file="${hadoop-streaming-pom-template.xml}" tofile="${hadoop-streaming.pom}"/> |
| <replaceregexp byline="true"> |
| <regexp pattern="@version"/> |
| <substitution expression="${version}"/> |
| <fileset dir="${basedir}/ivy"> |
| <include name="hadoop-core-pom.xml"/> |
| <include name="hadoop-test-pom.xml"/> |
| <include name="hadoop-tools-pom.xml"/> |
| <include name="hadoop-examples-pom.xml"/> |
| <include name="hadoop-streaming-pom.xml"/> |
| </fileset> |
| </replaceregexp> |
| </target> |
| |
| <!-- taskcontroller targets --> |
| <target name="task-controller" depends="init"> |
| <exec executable="autoreconf" |
| dir="${c++.task-controller.src}" |
| searchpath="yes" failonerror="yes"> |
| <arg value="-i"/> |
| </exec> |
| <mkdir dir="${build.c++.task-controller}" /> |
| <exec executable="${c++.task-controller.src}/configure" |
| dir="${build.c++.task-controller}"> |
| <arg value="--prefix=${task-controller.prefix.dir}"/> |
| <env key="CFLAGS" |
| value="-DHADOOP_CONF_DIR=${hadoop.conf.dir}"/> |
| </exec> |
| <!-- delete main in case HADOOP_CONF_DIR is different --> |
| <delete file="${build.c++.task-controller}/impl/main.o" |
| quiet="true" failonerror="false"/> |
| <exec executable="make" |
| dir="${build.c++.task-controller}" |
| searchpath="yes" failonerror="yes"> |
| <arg value="install"/> |
| </exec> |
| </target> |
| |
| <target name="test-task-controller" depends="init,task-controller"> |
| <exec executable="make" |
| dir="${build.c++.task-controller}" |
| searchpath="yes" failonerror="yes"> |
| <arg value="check"/> |
| </exec> |
| </target> |
| |
| <!-- end of task-controller targets --> |
| |
| <target name="jsvc" > |
| <mkdir dir="${jsvc.build.dir}" /> |
| <get src="${jsvc.location}" dest="${jsvc.build.dir}/${jsvc.dest.name}" /> |
| |
| <untar compression="gzip" src="${jsvc.build.dir}/${jsvc.dest.name}" dest="${jsvc.build.dir}" /> |
| |
| <copy file="${jsvc.build.dir}/jsvc" todir="${jsvc.install.dir}" verbose="true" /> |
| <chmod perm="ugo+x" type="file"> |
| <fileset file="${jsvc.install.dir}/jsvc"/> |
| </chmod> |
| </target> |
| |
| </project> |