Merging trunk to federation is completed
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/branches/HDFS-1052@1090029 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/CHANGES.txt b/CHANGES.txt
index 8a57aed..320d934 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -560,9 +560,6 @@
HDFS-1426. Remove unused method BlockInfo#listCount. (hairong)
- HDFS-1456. Provide builder for constructing instances of MiniDFSCluster.
- (jghoman)
-
HDFS-1472. Allow programmatic access to fsck output.
(Ramkumar Vadali via dhruba)
@@ -620,6 +617,9 @@
HDFS-1582. Remove auto-generated native build files. (rvs via eli)
+ HDFS-1456. Provide builder for constructing instances of MiniDFSCluster.
+ (jghoman)
+
OPTIMIZATIONS
HDFS-1140. Speedup INode.getPathComponents. (Dmytro Molkov via shv)
@@ -777,6 +777,7 @@
HDFS-1625. Ignore disk space values in TestDataNodeMXBean. (szetszwo)
+Release 0.21.1 - Unreleased
HDFS-1466. TestFcHdfsSymlink relies on /tmp/test not existing. (eli)
HDFS-874. TestHDFSFileContextMainOperations fails on weirdly
@@ -1445,9 +1446,6 @@
HDFS-806. Add new unit tests to the 10-mins 'run-commit-test' target (cos)
- HDFS-1411. Correct backup node startup command in hdfs user guide.
- (Ching-Shen Chen via shv)
-
OPTIMIZATIONS
HDFS-946. NameNode should not return full path name when lisitng a
diff --git a/build.xml b/build.xml
index f5e898d..80ab204 100644
--- a/build.xml
+++ b/build.xml
@@ -81,7 +81,6 @@
<property name="test.cache.data" value="${test.build.dir}/cache"/>
<property name="test.debug.data" value="${test.build.dir}/debug"/>
<property name="test.log.dir" value="${test.build.dir}/logs"/>
- <property name="test.build.classes" value="${test.build.dir}/classes"/>
<property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
<property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
<property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
@@ -96,7 +95,7 @@
<property name="test.junit.maxmemory" value="512m" />
<property name="test.conf.dir" value="${build.dir}/test/conf" />
- <property name="test.hdfs.build.classes" value="${test.build.dir}/classes"/>
+ <property name="test.hdfs.build.classes" value="${test.build.dir}/hdfs/classes"/>
<property name="test.hdfs.commit.tests.file" value="${test.src.dir}/commit-tests" />
<property name="test.hdfs.smoke.tests.file" value="${test.src.dir}/smoke-tests" />
@@ -239,7 +238,7 @@
<!-- the cluster test classpath: uses conf.dir for configuration -->
<path id="test.cluster.classpath">
<path refid="classpath"/>
- <pathelement location="${test.build.classes}" />
+ <pathelement location="${test.hdfs.build.classes}" />
<pathelement location="${test.src.dir}"/>
<pathelement location="${build.dir}"/>
</path>
@@ -273,7 +272,7 @@
<mkdir dir="${build.dir}/c++"/>
<mkdir dir="${test.build.dir}"/>
- <mkdir dir="${test.build.classes}"/>
+ <mkdir dir="${test.hdfs.build.classes}"/>
<mkdir dir="${test.build.extraconf}"/>
<tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
<touch millis="0" file="${touch.temp.file}">
@@ -459,9 +458,6 @@
<target name="jar-test" depends="jar-hdfs-test" description="Make hadoop-test.jar"/>
<target name="jar-hdfs-test" depends="compile-hdfs-test" description="Make hadoop-hdfs-test.jar">
- <copy todir="${test.build.classes}">
- <fileset dir="${test.hdfs.build.classes}"/>
- </copy>
<subant buildpath="build.xml" target="-do-jar-test">
</subant>
<jar jarfile="${hadoop-hdfs-test-sources.jar}">
@@ -472,7 +468,7 @@
<target name="-do-jar-test">
<jar jarfile="${build.dir}/${test.hdfs.final.name}.jar"
- basedir="${test.build.classes}">
+ basedir="${test.hdfs.build.classes}">
<manifest>
<attribute name="Main-Class"
value="org/apache/hadoop/test/HdfsTestDriver"/>
diff --git a/src/contrib/build-contrib.xml b/src/contrib/build-contrib.xml
index 8907851..d68146e 100644
--- a/src/contrib/build-contrib.xml
+++ b/src/contrib/build-contrib.xml
@@ -96,7 +96,6 @@
<!-- the unit test classpath -->
<path id="test.classpath">
<pathelement location="${build.test}" />
- <pathelement location="${hadoop.root}/build/test/classes"/>
<pathelement location="${hadoop.root}/build/test/core/classes"/>
<pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
<pathelement location="${hadoop.root}/build/test/mapred/classes"/>
diff --git a/src/contrib/hdfsproxy/bin/hdfsproxy b/src/contrib/hdfsproxy/bin/hdfsproxy
index 1b1e597..779c65b 100755
--- a/src/contrib/hdfsproxy/bin/hdfsproxy
+++ b/src/contrib/hdfsproxy/bin/hdfsproxy
@@ -87,8 +87,8 @@
if [ -d "$HDFSPROXY_HOME/build/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build
fi
-if [ -d "$HDFSPROXY_HOME/build/test/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/classes
+if [ -d "$HDFSPROXY_HOME/build/test/hdfs/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/hdfs/classes
fi
# so that filenames w/ spaces are handled correctly in loops below
diff --git a/src/contrib/hdfsproxy/bin/proxy-util b/src/contrib/hdfsproxy/bin/proxy-util
index d0ec35e..987afcb 100644
--- a/src/contrib/hdfsproxy/bin/proxy-util
+++ b/src/contrib/hdfsproxy/bin/proxy-util
@@ -88,8 +88,8 @@
if [ -d "$HDFSPROXY_HOME/build/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build
fi
-if [ -d "$HDFSPROXY_HOME/build/test/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/classes
+if [ -d "$HDFSPROXY_HOME/build/test/hdfs/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/hdfs/classes
fi
# so that filenames w/ spaces are handled correctly in loops below
diff --git a/src/contrib/hdfsproxy/build.xml b/src/contrib/hdfsproxy/build.xml
index d50e833..20ec56c 100644
--- a/src/contrib/hdfsproxy/build.xml
+++ b/src/contrib/hdfsproxy/build.xml
@@ -474,7 +474,7 @@
<path id="test.classpath">
<pathelement location="${proxy.conf.test}" />
<pathelement location="${test.build.dir}" />
- <pathelement location="${hadoop.root}/build/test/classes"/>
+ <pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
<!--<pathelement location="${hadoop.root}/src/contrib/test"/>-->
<pathelement location="${hadoop.root}/conf"/>
<pathelement location="${hadoop.root}/build"/>
diff --git a/src/test/aop/build/aop.xml b/src/test/aop/build/aop.xml
index 75e3563..e2728ba 100644
--- a/src/test/aop/build/aop.xml
+++ b/src/test/aop/build/aop.xml
@@ -171,7 +171,7 @@
<subant buildpath="build.xml" target="-do-jar-test">
<property name="build.dir" value="${system-test-build-dir}"/>
<property name="test.hdfs.final.name" value="${name}-${herriot.suffix}-test-${version}"/>
- <property name="test.build.classes"
+ <property name="test.hdfs.build.classes"
value="${system-test-build-dir}/test/classes"/>
</subant>
<jar jarfile="${hadoop-hdfs-instrumented-test-sources.jar}">
@@ -185,22 +185,23 @@
</subant>
</target>
- <target name="-compile-test-system.wrapper" depends="ivy-retrieve-common, ivy-retrieve-system">
+ <target name="-compile-test-system.wrapper" depends="inject-system-faults, ivy-retrieve-common, ivy-retrieve-system">
<macro-compile-hdfs-test
target.dir="${system-test-build-dir}/test/classes"
source.dir="${test.src.dir}/system/test"
dest.dir="${system-test-build-dir}/test/classes"
- classpath="test.classpath"/>
+ classpath="test.system.classpath"/>
</target>
<macrodef name="weave-injectfault-aspects">
<attribute name="dest.dir" />
<attribute name="src.dir" />
<attribute name="aspects.jar"/>
+ <attribute name="base.build.dir" default="${build-fi.dir}"/>
<sequential>
<subant buildpath="build.xml" target="-compile-fault-inject"
output="${compile-inject.output}">
- <property name="build.dir" value="${build-fi.dir}" />
+ <property name="build.dir" value="@{base.build.dir}" />
<property name="src.dir.path" value="@{src.dir}" />
<property name="dest.dir" value="@{dest.dir}" />
<property name="hadoop.instrumented.jar" value="@{aspects.jar}"/>
@@ -210,15 +211,15 @@
<target name="inject-system-faults"
description="Inject system faults">
- <property name="build-fi.dir" value="${system-test-build-dir}" />
- <mkdir dir="${build-fi.dir}"/>
+ <mkdir dir="${system-test-build-dir}"/>
<delete file="${compile-inject.output}"/>
<subant buildpath="build.xml" target="ivy-retrieve-system">
- <property name="build.dir" value="${build-fi.dir}"/>
+ <property name="build.dir" value="${system-test-build-dir}"/>
</subant>
<weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop"
- aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar">
+ aspects.jar="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar"
+ base.build.dir="${system-test-build-dir}">
</weave-injectfault-aspects>
</target>
diff --git a/src/test/hdfs/org/apache/hadoop/hdfs/TestClientBlockVerification.java b/src/test/hdfs/org/apache/hadoop/hdfs/TestClientBlockVerification.java
index 9ea30f8..ca9aed3 100644
--- a/src/test/hdfs/org/apache/hadoop/hdfs/TestClientBlockVerification.java
+++ b/src/test/hdfs/org/apache/hadoop/hdfs/TestClientBlockVerification.java
@@ -20,7 +20,6 @@
import java.util.List;
-import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.fs.Path;
@@ -31,8 +30,6 @@
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.never;
-import static org.junit.Assert.*;
-
public class TestClientBlockVerification {
static BlockReaderTestUtil util = null;
diff --git a/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java b/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java
index 7112150..3fd7872 100644
--- a/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java
+++ b/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java
@@ -51,7 +51,6 @@
private FileSystem fs;
private MiniDFSCluster cluster;
private Configuration conf;
- private String dataDir;
@Before
public void setUp() throws Exception {
@@ -60,7 +59,6 @@
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
fs = cluster.getFileSystem();
- dataDir = cluster.getDataDirectory();
}
@After
diff --git a/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFsck.java b/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
index eaf0a93..f371daf 100644
--- a/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
+++ b/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
@@ -28,7 +28,6 @@
import java.net.InetSocketAddress;
import java.nio.channels.FileChannel;
import java.security.PrivilegedExceptionAction;
-import java.util.Collection;
import java.util.Random;
import java.util.regex.Pattern;
diff --git a/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStartup.java b/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStartup.java
index 76a8e02..d5ac37a 100644
--- a/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStartup.java
+++ b/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStartup.java
@@ -230,11 +230,13 @@
sd = it.next();
if(sd.getStorageDirType().isOfType(NameNodeDirType.IMAGE)) {
- File imf = img.getStorage().getStorageFile(sd, NameNodeFile.IMAGE);
+ img.getStorage();
+ File imf = NNStorage.getStorageFile(sd, NameNodeFile.IMAGE);
LOG.info("--image file " + imf.getAbsolutePath() + "; len = " + imf.length() + "; expected = " + expectedImgSize);
assertEquals(expectedImgSize, imf.length());
} else if(sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) {
- File edf = img.getStorage().getStorageFile(sd, NameNodeFile.EDITS);
+ img.getStorage();
+ File edf = NNStorage.getStorageFile(sd, NameNodeFile.EDITS);
LOG.info("-- edits file " + edf.getAbsolutePath() + "; len = " + edf.length() + "; expected = " + expectedEditsSize);
assertEquals(expectedEditsSize, edf.length());
} else {