blob: b53df6b2648e81a1670e298d12985a8543314bc5 [file] [log] [blame]
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="aspects"
xmlns:artifact="urn:maven-artifact-ant">
<!-- The followng are duplications and have to be customized elsewhere too -->
<!-- TODO this version has to be updated synchronously with Ivy -->
<property name="aspectversion" value="1.6.5"/>
<!-- TODO this has to be changed synchronously with build.xml version prop.-->
<!-- this workarounds of test-patch setting its own 'version' -->
<property name="project.version" value="0.23.12-SNAPSHOT"/>
<!-- Properties common for all fault injections -->
<property name="build-fi.dir" value="${basedir}/build-fi"/>
<property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
<property file="${basedir}/build.properties"/>
<!-- Properties related to system fault injection and tests -->
<property name="system-test-build-dir" value="${build-fi.dir}/system"/>
<!-- This varialbe is set by respective injection targets -->
<property name="hadoop.instrumented.jar" value=""/>
<!-- Properties specifically for system fault-injections and system tests -->
<property name="herriot.suffix" value="instrumented"/>
<property name="instrumented.final.name"
value="${name}-${herriot.suffix}-${version}"/>
<property name="hadoop-hdfs-instrumented.pom"
location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}.xml" />
<property name="hadoop-hdfs-instrumented-test.pom"
location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}-test.xml" />
<property name="hadoop-hdfs-instrumented.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}.jar" />
<property name="hadoop-hdfs-instrumented-sources.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}-sources.jar" />
<property name="hadoop-hdfs-instrumented-test.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar" />
<property name="hadoop-hdfs-instrumented-test-sources.jar"
location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar" />
<!--All Fault Injection (FI) related targets are located in this session -->
<target name="clean-fi">
<delete dir="${build-fi.dir}"/>
</target>
<!-- Weaving aspects in place
Later on one can run 'ant jar-fault-inject' to create
Hadoop jar file with instrumented classes
-->
<!-- Target -classes-compilation has to be defined in build.xml and
needs to depend on classes compilation and test classes compilation
targets. This is a poor man parametrization for targets -->
<target name="-compile-fault-inject" depends="-classes-compilation" >
<!-- AspectJ task definition -->
<taskdef
resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
<classpath>
<pathelement
location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
</classpath>
</taskdef>
<echo message="Start weaving aspects in place"/>
<path id="aspect.path">
<pathelement location="${hadoop.instrumented.jar}"/>
</path>
<iajc
encoding="${build.encoding}"
srcdir="${java.src.dir};${build.src};${src.dir.path}"
includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
excludes="org/apache/hadoop/classification/tools/**/*, org/apache/hadoop/record/**/*"
destDir="${dest.dir}"
debug="${javac.debug}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}"
fork="true"
maxmem="256m">
<aspectpath refid="aspect.path"/>
<classpath refid="test.classpath"/>
</iajc>
<loadfile property="injection.failure" srcfile="${compile-inject.output}">
<filterchain>
<linecontainsregexp>
<regexp pattern='iajc.*warning'/>
</linecontainsregexp>
</filterchain>
</loadfile>
<fail if="injection.failure">
Broken binding of advises: ${line.separator}${injection.failure}
</fail>
<echo message="Weaving of aspects is finished"/>
</target>
<!-- Classpath for running system tests -->
<path id="test.system.classpath">
<pathelement location="${hadoop.conf.dir.deployed}" />
<pathelement location="${system-test-build-dir}/test/extraconf" />
<pathelement location="${system-test-build-dir}/test/classes" />
<pathelement location="${system-test-build-dir}/classes" />
<pathelement location="${test.src.dir}" />
<pathelement location="${build-fi.dir}" />
<pathelement location="${build-fi.dir}/tools" />
<pathelement path="${clover.jar}" />
<fileset dir="${system-test-build-dir}">
<include name="**/*.jar" />
<exclude name="**/excluded/" />
</fileset>
<path refid="classpath" />
</path>
<!-- ================ -->
<!-- run system tests -->
<!-- ================ -->
<target name="test-system" depends="init, ivy-retrieve-system"
description="Run system tests">
<subant buildpath="build.xml" target="jar-test-system"/>
<macro-test-runner test.file="${test.hdfs.all.tests.file}"
suite.type="system/test"
classpath="test.system.classpath"
test.dir="${system-test-build-dir}/test"
fileset.dir="${test.src.dir}"
hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
</macro-test-runner>
</target>
<target name="injectfaults"
description="Instrument classes with faults and other AOP advices">
<!--mkdir to prevent <subant> failure in case the folder has been removed-->
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
<weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
src.dir="${test.src.dir}/aop"
aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/common/hadoop-common-${project.version}.jar">
</weave-injectfault-aspects>
</target>
<!-- =============================================================== -->
<!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
<!-- cluster for system tests -->
<!-- =============================================================== -->
<target name="jar-system"
depends="inject-system-faults"
description="Make hadoop-hdfs-instrumented.jar with system injections.">
<macro-jar-fault-inject target.name="jar"
build.dir="${system-test-build-dir}"
jar.final.name="final.name"
jar.final.value="${instrumented.final.name}">
</macro-jar-fault-inject>
<jar jarfile="${system-test-build-dir}/${instrumented.final.name}-sources.jar"
update="yes">
<fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java" />
<fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj" />
</jar>
</target>
<target name="jar-test-system" depends="inject-system-faults, compile-test-system"
description="Make hadoop-hdfs-instrumented-test.jar with system injections.">
<subant buildpath="build.xml" target="-do-jar-test">
<property name="build.dir" value="${system-test-build-dir}"/>
<property name="test.hdfs.final.name" value="${name}-${herriot.suffix}-test-${version}"/>
<property name="test.hdfs.build.classes"
value="${system-test-build-dir}/test/classes"/>
</subant>
<jar jarfile="${hadoop-hdfs-instrumented-test-sources.jar}">
<fileset dir="${test.src.dir}/system/test" includes="org/apache/hadoop/**/*.java" />
</jar>
</target>
<target name="compile-test-system" description="Compiles system tests">
<subant buildpath="build.xml" target="-compile-test-system.wrapper">
<property name="build.dir" value="${system-test-build-dir}"/>
</subant>
</target>
<target name="-compile-test-system.wrapper" depends="inject-system-faults, ivy-retrieve-common, ivy-retrieve-hdfs, ivy-retrieve-system">
<macro-compile-hdfs-test
target.dir="${system-test-build-dir}/test/classes"
source.dir="${test.src.dir}/system/test"
dest.dir="${system-test-build-dir}/test/classes"
classpath="test.system.classpath"/>
</target>
<macrodef name="weave-injectfault-aspects">
<attribute name="dest.dir" />
<attribute name="src.dir" />
<attribute name="aspects.jar"/>
<attribute name="base.build.dir" default="${build-fi.dir}"/>
<sequential>
<subant buildpath="build.xml" target="-compile-fault-inject"
output="${compile-inject.output}">
<property name="build.dir" value="@{base.build.dir}" />
<property name="src.dir.path" value="@{src.dir}" />
<property name="dest.dir" value="@{dest.dir}" />
<property name="hadoop.instrumented.jar" value="@{aspects.jar}"/>
</subant>
</sequential>
</macrodef>
<target name="inject-system-faults"
description="Inject system faults">
<mkdir dir="${system-test-build-dir}"/>
<delete file="${compile-inject.output}"/>
<subant buildpath="build.xml" target="ivy-retrieve-system">
<property name="build.dir" value="${system-test-build-dir}"/>
</subant>
<weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop"
aspects.jar="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar"
base.build.dir="${system-test-build-dir}">
</weave-injectfault-aspects>
</target>
<macrodef name="macro-run-tests-fault-inject">
<attribute name="target.name" />
<attribute name="testcasesonly" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="test.fault.inject" value="yes"/>
<property name="test.include" value="TestFi*"/>
<!-- This one is needed for the special "regression" target only -->
<property name="special.fi.testcasesonly" value="@{testcasesonly}"/>
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make hadoop-fi.jar including all Fault injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-fault-inject">
<attribute name="target.name" />
<attribute name="build.dir" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="@{build.dir}"/>
<property name="@{jar.final.name}" value="@{jar.final.value}"/>
<property name="jar.extra.properties.list"
value="${test.src.dir}/fi-site.xml" />
</subant>
</sequential>
</macrodef>
<!-- ================================================================== -->
<!-- Make test jar files including all Fault Injected artifacts -->
<!-- ================================================================== -->
<macrodef name="macro-jar-test-fault-inject">
<attribute name="target.name" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
<property name="build.dir" value="${build-fi.dir}"/>
<property name="@{jar.final.name}"
value="@{jar.final.value}"/>
</subant>
</sequential>
</macrodef>
<!--End of Fault Injection (FI) related session-->
<!-- Start of cluster controller binary target -->
<property name="runAs.src"
value ="${test.src.dir}/system/c++/runAs"/>
<property name="runAs.build.dir"
value="${system-test-build-dir}/c++-build"/>
<property name="runAs.configure.script"
value="${runAs.build.dir}/configure"/>
<target name="init-runAs-build">
<condition property="runAs.parameters.passed">
<not>
<equals arg1="${run-as.hadoop.home.dir}"
arg2="$${run-as.hadoop.home.dir}"/>
</not>
</condition>
<fail unless="runAs.parameters.passed"
message="Required parameters run-as.hadoop.home.dir not passed to the build"/>
<mkdir dir="${runAs.build.dir}"/>
<copy todir="${runAs.build.dir}" overwrite="true">
<fileset dir="${runAs.src}" includes="**/*"/>
</copy>
<chmod perm="+x" file="${runAs.configure.script}">
</chmod>
</target>
<target name="configure-runAs"
depends="init-runAs-build">
<exec executable="${runAs.configure.script}"
dir="${runAs.build.dir}" failonerror="true">
<arg value="--with-home=${run-as.hadoop.home.dir}"/>
</exec>
</target>
<target name="run-as" depends="configure-runAs">
<exec executable="${make.cmd}" dir="${runAs.build.dir}"
searchpath="yes" failonerror="yes">
<arg value="all" />
</exec>
</target>
<!-- End of cluster controller binary target -->
<!-- Install Herriot artifacts to the local Maven -->
<target name="-mvn-system-install" depends="mvn-taskdef, jar-system, jar-test-system">
<artifact:pom file="${hadoop-hdfs-instrumented.pom}"
id="hadoop.hdfs.${herriot.suffix}"/>
<artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"
id="hadoop.hdfs.${herriot.suffix}.test"/>
<artifact:install file="${hadoop-hdfs-instrumented.jar}">
<pom refid="hadoop.hdfs.${herriot.suffix}"/>
<attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
</artifact:install>
<artifact:install file="${hadoop-hdfs-instrumented-test.jar}">
<pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
<attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
</artifact:install>
</target>
<target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system, jar-test-system">
<property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
<artifact:pom file="${hadoop-hdfs-instrumented.pom}"
id="hadoop.hdfs.${herriot.suffix}"/>
<artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"
id="hadoop.hdfs.${herriot.suffix}.test"/>
<artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
<artifact:deploy file="${hadoop-hdfs-instrumented.jar}">
<remoteRepository id="apache.snapshots.https" url="${repourl}"/>
<pom refid="hadoop.hdfs.${herriot.suffix}"/>
<attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
</artifact:deploy>
<artifact:deploy file="${hadoop-hdfs-instrumented-test.jar}">
<remoteRepository id="apache.snapshots.https" url="${repourl}"/>
<pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
<attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
</artifact:deploy>
</target>
<!-- End of Maven -->
</project>