HADOOP-9368. Add timeouts to new tests in branch-trunk-win. Contributed by Arpit Agarwal.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-trunk-win@1453157 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt b/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt
index 9e58c9f..965bad4 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt
@@ -101,6 +101,9 @@
   HADOOP-9232. JniBasedUnixGroupsMappingWithFallback fails on Windows
   with UnsatisfiedLinkError. (Ivan Mitic via suresh)
 
+  HADOOP-9368. Add timeouts to new tests in branch-trunk-win.
+  (Arpit Agarwal via suresh)
+
 Patch equivalent to trunk committed to branch-trunk-win
 
   HADOOP-8924. Add maven plugin alternative to shell script to save
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
index d3e380f..ca9de83 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
@@ -43,7 +43,7 @@
     fc = FileContext.getFileContext();
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testFileContextResolveAfs() throws IOException {
     Configuration conf = new Configuration();
     localFs = FileSystem.get(conf);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
index 3c0051e..7208117 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
@@ -129,7 +129,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testListFiles() throws IOException {
     setupDirs();
     //Test existing files case 
@@ -156,7 +156,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testListAPI() throws IOException {
     setupDirs();
     //Test existing files case 
@@ -204,7 +204,7 @@
     Assert.assertTrue(!partitioned.exists());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDelete() throws IOException {
     setupDirs();
     boolean ret = FileUtil.fullyDelete(del);
@@ -219,7 +219,7 @@
    * (b) symlink to dir only and not the dir pointed to by symlink.
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteSymlinks() throws IOException {
     setupDirs();
     
@@ -249,7 +249,7 @@
    * (b) dangling symlink to directory properly
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteDanglingSymlinks() throws IOException {
     setupDirs();
     // delete the directory tmp to make tmpDir a dangling link to dir tmp and
@@ -276,7 +276,7 @@
     Assert.assertEquals(3, del.list().length);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteContents() throws IOException {
     setupDirs();
     boolean ret = FileUtil.fullyDeleteContents(del);
@@ -392,7 +392,7 @@
         zlink.exists());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDelete() throws IOException {
     if(Shell.WINDOWS) {
       // windows Dir.setWritable(false) does not work for directories
@@ -404,7 +404,7 @@
     validateAndSetWritablePermissions(true, ret);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteGrantPermissions() throws IOException {
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDelete(new MyFile(del), true);
@@ -473,7 +473,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteContents() throws IOException {
     if(Shell.WINDOWS) {
       // windows Dir.setWritable(false) does not work for directories
@@ -485,7 +485,7 @@
     validateAndSetWritablePermissions(true, ret);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteContentsGrantPermissions() throws IOException {
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true);
@@ -493,7 +493,7 @@
     validateAndSetWritablePermissions(false, ret);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testCopyMergeSingleDirectory() throws IOException {
     setupDirs();
     boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
@@ -552,7 +552,7 @@
    * and that directory sizes are not added to the final calculated size
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetDU() throws IOException {
     setupDirs();
 
@@ -563,7 +563,7 @@
     Assert.assertEquals(expected, du);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testSymlink() throws Exception {
     Assert.assertFalse(del.exists());
     del.mkdirs();
@@ -598,7 +598,7 @@
   /**
    * Test that rename on a symlink works as expected.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testSymlinkRenameTo() throws Exception {
     Assert.assertFalse(del.exists());
     del.mkdirs();
@@ -630,7 +630,7 @@
   /**
    * Test that deletion of a symlink works as expected.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testSymlinkDelete() throws Exception {
     Assert.assertFalse(del.exists());
     del.mkdirs();
@@ -654,7 +654,7 @@
   /**
    * Test that length on a symlink works as expected.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testSymlinkLength() throws Exception {
     Assert.assertFalse(del.exists());
     del.mkdirs();
@@ -720,7 +720,7 @@
     Assert.assertTrue(testFile.length() == 8);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUntar() throws IOException {
     String tarGzFileName = System.getProperty("test.cache.data",
         "build/test/cache") + "/test-untar.tgz";
@@ -733,7 +733,7 @@
     doUntarAndVerify(new File(tarFileName), untarDir);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCreateJarWithClassPath() throws Exception {
     // setup test directory for files
     Assert.assertFalse(tmp.exists());
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
index 8faabcd..d64292b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
@@ -121,7 +121,7 @@
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChmod() throws Exception {
     Path p1 = new Path(TEST_ROOT_DIR, "testChmod/fileExists");
 
@@ -177,7 +177,7 @@
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChown() throws Exception {
     Path p1 = new Path(TEST_ROOT_DIR, "testChown/fileExists");
 
@@ -233,7 +233,7 @@
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChgrp() throws Exception {
     Path p1 = new Path(TEST_ROOT_DIR, "testChgrp/fileExists");
 
@@ -278,7 +278,7 @@
     change(1, null, "admin", f2, f7);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
       throws Exception {
     Configuration conf = new Configuration();
@@ -310,7 +310,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testRmWithNonexistentGlob() throws Exception {
     Configuration conf = new Configuration();
     FsShell shell = new FsShell();
@@ -331,7 +331,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRmForceWithNonexistentGlob() throws Exception {
     Configuration conf = new Configuration();
     FsShell shell = new FsShell();
@@ -350,7 +350,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testInvalidDefaultFS() throws Exception {
     // if default fs doesn't exist or is invalid, but the path provided in 
     // arguments is valid - fsshell should work
@@ -381,7 +381,7 @@
     
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testInterrupt() throws Exception {
     MyFsShell shell = new MyFsShell();
     shell.setConf(new Configuration());
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
index 0a7c637..ab887b9 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
@@ -129,7 +129,7 @@
    * The second dir exists & is RW
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void test0() throws Exception {
     if (isWindows) return;
     String dir0 = buildBufferDir(ROOT, 0);
@@ -151,7 +151,7 @@
    * The second dir exists & is RW
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testROBufferDirAndRWBufferDir() throws Exception {
     if (isWindows) return;
     String dir1 = buildBufferDir(ROOT, 1);
@@ -171,7 +171,7 @@
   /** Two buffer dirs. Both do not exist but on a RW disk.
    * Check if tmp dirs are allocated in a round-robin
    */
-  @Test
+  @Test (timeout = 30000)
   public void testDirsNotExist() throws Exception {
     if (isWindows) return;
     String dir2 = buildBufferDir(ROOT, 2);
@@ -197,7 +197,7 @@
    * Later disk1 becomes read-only.
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testRWBufferDirBecomesRO() throws Exception {
     if (isWindows) return;
     String dir3 = buildBufferDir(ROOT, 3);
@@ -235,7 +235,7 @@
    * @throws Exception
    */
   static final int TRIALS = 100;
-  @Test
+  @Test (timeout = 30000)
   public void testCreateManyFiles() throws Exception {
     if (isWindows) return;
     String dir5 = buildBufferDir(ROOT, 5);
@@ -272,7 +272,7 @@
    * directory. With checkAccess true, the directory should not be created.
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testLocalPathForWriteDirCreation() throws IOException {
     String dir0 = buildBufferDir(ROOT, 0);
     String dir1 = buildBufferDir(ROOT, 1);
@@ -303,7 +303,7 @@
    * Test when mapred.local.dir not configured and called
    * getLocalPathForWrite
    */
-  @Test
+  @Test (timeout = 30000)
   public void testShouldNotthrowNPE() throws Exception {
     Configuration conf1 = new Configuration();
     try {
@@ -322,7 +322,7 @@
    * are mistakenly created from fully qualified path strings.
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testNoSideEffects() throws IOException {
     assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
@@ -344,7 +344,7 @@
    *
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetLocalPathToRead() throws IOException {
     assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
@@ -369,7 +369,7 @@
    *
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetAllLocalPathsToRead() throws IOException {
     assumeTrue(!isWindows);
     
@@ -417,7 +417,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testRemoveContext() throws IOException {
     String dir = buildBufferDir(ROOT, 0);
     try {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
index 92bbd9d..7a5843a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
@@ -17,6 +17,7 @@
  */
 
 package org.apache.hadoop.fs;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.net.URI;
@@ -32,6 +33,7 @@
 import static org.junit.Assert.fail;
 
 public class TestPath extends TestCase {
+  @Test (timeout = 30000)
   public void testToString() {
     toStringTest("/");
     toStringTest("/foo");
@@ -64,6 +66,7 @@
     assertEquals(pathString, new Path(pathString).toString());
   }
 
+  @Test (timeout = 30000)
   public void testNormalize() throws URISyntaxException {
     assertEquals("", new Path(".").toString());
     assertEquals("..", new Path("..").toString());
@@ -85,6 +88,7 @@
     }
   }
 
+  @Test (timeout = 30000)
   public void testIsAbsolute() {
     assertTrue(new Path("/").isAbsolute());
     assertTrue(new Path("/foo").isAbsolute());
@@ -97,6 +101,7 @@
     }
   }
 
+  @Test (timeout = 30000)
   public void testParent() {
     assertEquals(new Path("/foo"), new Path("/foo/bar").getParent());
     assertEquals(new Path("foo"), new Path("foo/bar").getParent());
@@ -107,6 +112,7 @@
     }
   }
 
+  @Test (timeout = 30000)
   public void testChild() {
     assertEquals(new Path("."), new Path(".", "."));
     assertEquals(new Path("/"), new Path("/", "."));
@@ -126,10 +132,12 @@
     }
   }
   
+  @Test (timeout = 30000)
   public void testEquals() {
     assertFalse(new Path("/").equals(new Path("/foo")));
   }
 
+  @Test (timeout = 30000)
   public void testDots() {
     // Test Path(String) 
     assertEquals(new Path("/foo/bar/baz").toString(), "/foo/bar/baz");
@@ -168,6 +176,7 @@
   }
 
   /** Test that Windows paths are correctly handled */
+  @Test (timeout = 5000)
   public void testWindowsPaths() throws URISyntaxException, IOException {
     if (!Path.WINDOWS) {
       return;
@@ -180,6 +189,7 @@
   }
 
   /** Test invalid paths on Windows are correctly rejected */
+  @Test (timeout = 5000)
   public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
     if (!Path.WINDOWS) {
       return;
@@ -199,17 +209,20 @@
   }
 
   /** Test Path objects created from other Path objects */
+  @Test (timeout = 30000)
   public void testChildParentResolution() throws URISyntaxException, IOException {
     Path parent = new Path("foo1://bar1/baz1");
     Path child  = new Path("foo2://bar2/baz2");
     assertEquals(child, new Path(parent, child));
   }
   
+  @Test (timeout = 30000)
   public void testScheme() throws java.io.IOException {
     assertEquals("foo:/bar", new Path("foo:/","/bar").toString()); 
     assertEquals("foo://bar/baz", new Path("foo://bar/","/baz").toString()); 
   }
 
+  @Test (timeout = 30000)
   public void testURI() throws URISyntaxException, IOException {
     URI uri = new URI("file:///bar#baz");
     Path path = new Path(uri);
@@ -232,6 +245,7 @@
   }
 
   /** Test URIs created from Path objects */
+  @Test (timeout = 30000)
   public void testPathToUriConversion() throws URISyntaxException, IOException {
     // Path differs from URI in that it ignores the query part..
     assertEquals(new URI(null, null, "/foo?bar", null, null),  new Path("/foo?bar").toUri());
@@ -252,6 +266,7 @@
   }
 
   /** Test reserved characters in URIs (and therefore Paths) */
+  @Test (timeout = 30000)
   public void testReservedCharacters() throws URISyntaxException, IOException {
     // URI encodes the path
     assertEquals("/foo%20bar", new URI(null, null, "/foo bar", null, null).getRawPath());
@@ -273,6 +288,7 @@
     assertEquals("/foo%3Fbar", new URI("http", "localhost", "/foo?bar", null, null).toURL().getPath());
   }
   
+  @Test (timeout = 30000)
   public void testMakeQualified() throws URISyntaxException {
     URI defaultUri = new URI("hdfs://host1/dir1");
     URI wd         = new URI("hdfs://host2/dir2");
@@ -286,6 +302,7 @@
                  new Path("file").makeQualified(defaultUri, new Path(wd)));
  }
 
+  @Test (timeout = 30000)
   public void testGetName() {
     assertEquals("", new Path("/").getName());
     assertEquals("foo", new Path("foo").getName());
@@ -295,12 +312,14 @@
     assertEquals("bar", new Path("hdfs://host/foo/bar").getName());
   }
   
+  @Test (timeout = 30000)
   public void testAvroReflect() throws Exception {
     AvroTestUtil.testReflect
       (new Path("foo"),
        "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");
   }
 
+  @Test (timeout = 30000)
   public void testGlobEscapeStatus() throws Exception {
     // This test is not meaningful on Windows where * is disallowed in file name.
     if (Shell.WINDOWS) return;
@@ -361,6 +380,7 @@
     assertEquals(new Path(testRoot, "*/f"), stats[0].getPath());
   }
 
+  @Test (timeout = 30000)
   public void testMergePaths() {
     assertEquals(new Path("/foo/bar"),
       Path.mergePaths(new Path("/foo"),
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
index a545a11..320a79e 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
@@ -61,7 +61,7 @@
     fs.close();
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testWithDirStringAndConf() throws Exception {
     String dirString = "d1";
     PathData item = new PathData(dirString, conf);
@@ -74,7 +74,7 @@
     checkPathData(dirString, item);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUnqualifiedUriContents() throws Exception {
     String dirString = "d1";
     PathData item = new PathData(dirString, conf);
@@ -85,7 +85,7 @@
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testQualifiedUriContents() throws Exception {
     String dirString = fs.makeQualified(new Path("d1")).toString();
     PathData item = new PathData(dirString, conf);
@@ -96,7 +96,7 @@
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCwdContents() throws Exception {
     String dirString = Path.CUR_DIR;
     PathData item = new PathData(dirString, conf);
@@ -107,7 +107,7 @@
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testToFile() throws Exception {
     PathData item = new PathData(".", conf);
     assertEquals(new File(testDir.toString()), item.toFile());
@@ -117,7 +117,7 @@
     assertEquals(new File(testDir + "/d1/f1"), item.toFile());
   }
 
-  @Test
+  @Test (timeout = 5000)
   public void testToFileRawWindowsPaths() throws Exception {
     if (!Path.WINDOWS) {
       return;
@@ -146,7 +146,7 @@
     assertEquals(new File(testDir + "\\foo\\bar"), item.toFile());
   }
 
-  @Test
+  @Test (timeout = 5000)
   public void testInvalidWindowsPath() throws Exception {
     if (!Path.WINDOWS) {
       return;
@@ -166,7 +166,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testAbsoluteGlob() throws Exception {
     PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf);
     assertEquals(
@@ -175,7 +175,7 @@
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRelativeGlob() throws Exception {
     PathData[] items = PathData.expandAsGlob("d1/f1*", conf);
     assertEquals(
@@ -184,7 +184,7 @@
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRelativeGlobBack() throws Exception {
     fs.setWorkingDirectory(new Path("d1"));
     PathData[] items = PathData.expandAsGlob("../d2/*", conf);
@@ -194,7 +194,7 @@
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testWithStringAndConfForBuggyPath() throws Exception {
     String dirString = "file:///tmp";
     Path tmpDir = new Path(dirString);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
index 293d0d4..0c8a6ac 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
@@ -46,7 +46,7 @@
   /**
    * Tests whether binary Avro data files are displayed correctly.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testDisplayForAvroFiles() throws Exception {
     // Create a small Avro data file on the local file system.
     createAvroFile(generateWeatherAvroBinaryData());
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
index 08d3490..0602d30 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
@@ -62,7 +62,7 @@
     TEST_DIR.mkdirs();
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFstat() throws Exception {
     if (Path.WINDOWS) {
       return;
@@ -88,7 +88,7 @@
    * NOTE: this test is likely to fail on RHEL 6.0 which has a non-threadsafe
    * implementation of getpwuid_r.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testMultiThreadedFstat() throws Exception {
     if (Path.WINDOWS) {
       return;
@@ -134,7 +134,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFstatClosedFd() throws Exception {
     if (Path.WINDOWS) {
       return;
@@ -151,7 +151,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testSetFilePointer() throws Exception {
     if (!Path.WINDOWS) {
       return;
@@ -198,7 +198,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCreateFile() throws Exception {
     if (!Path.WINDOWS) {
       return;
@@ -240,7 +240,7 @@
 
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testOpenMissingWithoutCreate() throws Exception {
     if (Path.WINDOWS) {
       return;
@@ -258,7 +258,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testOpenWithCreate() throws Exception {
     if (Path.WINDOWS) {
       return;
@@ -292,7 +292,7 @@
    * Test that opens and closes a file 10000 times - this would crash with
    * "Too many open files" if we leaked fds using this access pattern.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFDDoesntLeak() throws IOException {
     if (Path.WINDOWS) {
       return;
@@ -313,7 +313,7 @@
   /**
    * Test basic chmod operation
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChmod() throws Exception {
     if (Path.WINDOWS) {
       return;
@@ -338,7 +338,7 @@
   }
 
 
-  @Test
+  @Test (timeout = 30000)
   public void testPosixFadvise() throws Exception {
     if (Path.WINDOWS) {
       return;
@@ -381,7 +381,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testSyncFileRange() throws Exception {
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testSyncFileRange"));
@@ -416,7 +416,7 @@
     assertEquals(expected, perms.toShort());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testGetUserName() throws IOException {
     if (Path.WINDOWS) {
       return;
@@ -425,7 +425,7 @@
     assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testGetGroupName() throws IOException {
     if (Path.WINDOWS) {
       return;
@@ -434,7 +434,7 @@
     assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRenameTo() throws Exception {
     final File TEST_DIR = new File(new File(
         System.getProperty("test.build.data","build/test/data")), "renameTest");
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
index b8a95de..12f4b31 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
@@ -91,17 +91,17 @@
     UserGroupInformation.setLoginUser(null);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testSimpleLogin() throws IOException {
     tryLoginAuthenticationMethod(AuthenticationMethod.SIMPLE, true);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testTokenLogin() throws IOException {
     tryLoginAuthenticationMethod(AuthenticationMethod.TOKEN, false);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testProxyLogin() throws IOException {
     tryLoginAuthenticationMethod(AuthenticationMethod.PROXY, false);
   }
@@ -130,7 +130,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testGetRealAuthenticationMethod() {
     UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1");
     ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
@@ -141,7 +141,7 @@
     assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod());
   }
   /** Test login method */
-  @Test
+  @Test (timeout = 30000)
   public void testLogin() throws Exception {
     // login from unix
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
@@ -168,7 +168,7 @@
    * given user name - get all the groups.
    * Needs to happen before creating the test users
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetServerSideGroups() throws IOException,
                                                InterruptedException {
     // get the user name
@@ -228,7 +228,7 @@
   }
 
   /** test constructor */
-  @Test
+  @Test (timeout = 30000)
   public void testConstructor() throws Exception {
     UserGroupInformation ugi = 
       UserGroupInformation.createUserForTesting("user2/cron@HADOOP.APACHE.ORG", 
@@ -254,7 +254,7 @@
     assertTrue(gotException);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testEquals() throws Exception {
     UserGroupInformation uugi = 
       UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
@@ -272,7 +272,7 @@
     assertEquals(uugi.hashCode(), ugi3.hashCode());
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testEqualsWithRealUser() throws Exception {
     UserGroupInformation realUgi1 = UserGroupInformation.createUserForTesting(
         "RealUser", GROUP_NAMES);
@@ -285,7 +285,7 @@
     assertFalse(remoteUgi.equals(proxyUgi1));
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testGettingGroups() throws Exception {
     UserGroupInformation uugi = 
       UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
@@ -295,7 +295,7 @@
   }
 
   @SuppressWarnings("unchecked") // from Mockito mocks
-  @Test
+  @Test (timeout = 30000)
   public <T extends TokenIdentifier> void testAddToken() throws Exception {
     UserGroupInformation ugi = 
         UserGroupInformation.createRemoteUser("someone"); 
@@ -333,7 +333,7 @@
   }
 
   @SuppressWarnings("unchecked") // from Mockito mocks
-  @Test
+  @Test (timeout = 30000)
   public <T extends TokenIdentifier> void testGetCreds() throws Exception {
     UserGroupInformation ugi = 
         UserGroupInformation.createRemoteUser("someone"); 
@@ -359,7 +359,7 @@
   }
 
   @SuppressWarnings("unchecked") // from Mockito mocks
-  @Test
+  @Test (timeout = 30000)
   public <T extends TokenIdentifier> void testAddCreds() throws Exception {
     UserGroupInformation ugi = 
         UserGroupInformation.createRemoteUser("someone"); 
@@ -384,7 +384,7 @@
     assertSame(secret, ugi.getCredentials().getSecretKey(secretKey));
   }
 
-  @Test
+  @Test (timeout = 30000)
   public <T extends TokenIdentifier> void testGetCredsNotSame()
       throws Exception {
     UserGroupInformation ugi = 
@@ -412,7 +412,7 @@
   }
 
   @SuppressWarnings("unchecked") // from Mockito mocks
-  @Test
+  @Test (timeout = 30000)
   public <T extends TokenIdentifier> void testAddNamedToken() throws Exception {
     UserGroupInformation ugi = 
         UserGroupInformation.createRemoteUser("someone"); 
@@ -433,7 +433,7 @@
   }
 
   @SuppressWarnings("unchecked") // from Mockito mocks
-  @Test
+  @Test (timeout = 30000)
   public <T extends TokenIdentifier> void testUGITokens() throws Exception {
     UserGroupInformation ugi = 
       UserGroupInformation.createUserForTesting("TheDoctor", 
@@ -479,7 +479,7 @@
     assertTrue(otherSet.contains(t2));
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testTokenIdentifiers() throws Exception {
     UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
         "TheDoctor", new String[] { "TheTARDIS" });
@@ -507,7 +507,7 @@
     assertEquals(2, otherSet.size());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testTestAuthMethod() throws Exception {
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
     // verify the reverse mappings works
@@ -519,7 +519,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testUGIAuthMethod() throws Exception {
     final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
     final AuthenticationMethod am = AuthenticationMethod.KERBEROS;
@@ -535,7 +535,7 @@
     });
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testUGIAuthMethodInRealUser() throws Exception {
     final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
     UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser(
@@ -570,7 +570,7 @@
     Assert.assertEquals(proxyUgi3, proxyUgi4);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testLoginObjectInSubject() throws Exception {
     UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
     UserGroupInformation anotherUgi = new UserGroupInformation(loginUgi
@@ -583,7 +583,7 @@
     Assert.assertTrue(login1 == login2);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testLoginModuleCommit() throws Exception {
     UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
     User user1 = loginUgi.getSubject().getPrincipals(User.class).iterator()
@@ -617,7 +617,7 @@
    * with it, but that Subject was not created by Hadoop (ie it has no
    * associated User principal)
    */
-  @Test
+  @Test (timeout = 30000)
   public void testUGIUnderNonHadoopContext() throws Exception {
     Subject nonHadoopSubject = new Subject();
     Subject.doAs(nonHadoopSubject, new PrivilegedExceptionAction<Void>() {
@@ -631,7 +631,7 @@
   }
 
   /** Test hasSufficientTimeElapsed method */
-  @Test
+  @Test (timeout = 30000)
   public void testHasSufficientTimeElapsed() throws Exception {
     // Make hasSufficientTimeElapsed public
     Method method = UserGroupInformation.class
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index bf64af5..7dcc4ae 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -36,19 +36,23 @@
   final FsPermission defaultPerm = new FsPermission("755");
   final FsPermission invalidPerm = new FsPermission("000");
 
-  @Test public void testMkdirs_dirExists() throws Throwable {
+  @Test (timeout = 30000)
+  public void testMkdirs_dirExists() throws Throwable {
     _mkdirs(true, defaultPerm, defaultPerm);
   }
 
-  @Test public void testMkdirs_noDir() throws Throwable {
+  @Test (timeout = 30000)
+  public void testMkdirs_noDir() throws Throwable {
     _mkdirs(false, defaultPerm, defaultPerm);
   }
 
-  @Test public void testMkdirs_dirExists_badUmask() throws Throwable {
+  @Test (timeout = 30000)
+  public void testMkdirs_dirExists_badUmask() throws Throwable {
     _mkdirs(true, defaultPerm, invalidPerm);
   }
 
-  @Test public void testMkdirs_noDir_badUmask() throws Throwable {
+  @Test (timeout = 30000)
+  public void testMkdirs_noDir_badUmask() throws Throwable {
     _mkdirs(false, defaultPerm, invalidPerm);
   }
 
@@ -79,23 +83,28 @@
     }
   }
 
-  @Test public void testCheckDir_normal() throws Throwable {
+  @Test (timeout = 30000)
+  public void testCheckDir_normal() throws Throwable {
     _checkDirs(true, new FsPermission("755"), true);
   }
 
-  @Test public void testCheckDir_notDir() throws Throwable {
+  @Test (timeout = 30000)
+  public void testCheckDir_notDir() throws Throwable {
     _checkDirs(false, new FsPermission("000"), false);
   }
 
-  @Test public void testCheckDir_notReadable() throws Throwable {
+  @Test (timeout = 30000)
+  public void testCheckDir_notReadable() throws Throwable {
     _checkDirs(true, new FsPermission("000"), false);
   }
 
-  @Test public void testCheckDir_notWritable() throws Throwable {
+  @Test (timeout = 30000)
+  public void testCheckDir_notWritable() throws Throwable {
     _checkDirs(true, new FsPermission("444"), false);
   }
 
-  @Test public void testCheckDir_notListable() throws Throwable {
+  @Test (timeout = 30000)
+  public void testCheckDir_notListable() throws Throwable {
     _checkDirs(true, new FsPermission("666"), false);   // not listable
   }
 
@@ -131,27 +140,27 @@
    * permission for result of mapper.
    */
 
-  @Test
+  @Test (timeout = 30000)
   public void testCheckDir_normal_local() throws Throwable {
     _checkDirs(true, "755", true);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCheckDir_notDir_local() throws Throwable {
     _checkDirs(false, "000", false);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCheckDir_notReadable_local() throws Throwable {
     _checkDirs(true, "000", false);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCheckDir_notWritable_local() throws Throwable {
     _checkDirs(true, "444", false);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCheckDir_notListable_local() throws Throwable {
     _checkDirs(true, "666", false);
   }
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
index d8a12b2..4f06a31 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
@@ -46,7 +46,7 @@
   final private static String ESCAPED_STR_WITH_BOTH2 = 
     "\\,A\\\\\\,\\,B\\\\\\\\\\,";
   
-  @Test
+  @Test (timeout = 30000)
   public void testEscapeString() throws Exception {
     assertEquals(NULL_STR, StringUtils.escapeString(NULL_STR));
     assertEquals(EMPTY_STR, StringUtils.escapeString(EMPTY_STR));
@@ -60,7 +60,7 @@
         StringUtils.escapeString(STR_WITH_BOTH2));
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testSplit() throws Exception {
     assertEquals(NULL_STR, StringUtils.split(NULL_STR));
     String[] splits = StringUtils.split(EMPTY_STR);
@@ -90,7 +90,7 @@
     assertEquals(ESCAPED_STR_WITH_BOTH2, splits[0]);    
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testSimpleSplit() throws Exception {
     final String[] TO_TEST = {
         "a/b/c",
@@ -106,7 +106,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUnescapeString() throws Exception {
     assertEquals(NULL_STR, StringUtils.unEscapeString(NULL_STR));
     assertEquals(EMPTY_STR, StringUtils.unEscapeString(EMPTY_STR));
@@ -138,7 +138,7 @@
         StringUtils.unEscapeString(ESCAPED_STR_WITH_BOTH2));
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testTraditionalBinaryPrefix() throws Exception {
     //test string2long(..)
     String[] symbol = {"k", "m", "g", "t", "p", "e"};
@@ -264,7 +264,7 @@
     assertEquals("0.5430%", StringUtils.formatPercent(0.00543, 4));
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testJoin() {
     List<String> s = new ArrayList<String>();
     s.add("a");
@@ -276,7 +276,7 @@
     assertEquals("a:b:c", StringUtils.join(":", s.subList(0, 3)));
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testGetTrimmedStrings() throws Exception {
     String compactDirList = "/spindle1/hdfs,/spindle2/hdfs,/spindle3/hdfs";
     String spacedDirList = "/spindle1/hdfs, /spindle2/hdfs, /spindle3/hdfs";
@@ -298,7 +298,7 @@
     assertArrayEquals(emptyArray, estring);
   } 
 
-  @Test
+  @Test (timeout = 30000)
   public void testCamelize() {
     // common use cases
     assertEquals("Map", StringUtils.camelize("MAP"));
@@ -334,7 +334,7 @@
     assertEquals("Zz", StringUtils.camelize("zZ"));
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testStringToURI() {
     String[] str = new String[] { "file://" };
     try {
@@ -345,7 +345,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testSimpleHostName() {
     assertEquals("Should return hostname when FQDN is specified",
             "hadoop01",
@@ -358,7 +358,7 @@
             StringUtils.simpleHostname("10.10.5.68"));
   }
 
-  @Test
+  @Test (timeout = 5000)
   public void testReplaceTokensShellEnvVars() {
     Pattern pattern = StringUtils.SHELL_ENV_VAR_PATTERN;
     Map<String, String> replacements = new HashMap<String, String>();
@@ -381,7 +381,7 @@
       pattern, replacements));
   }
 
-  @Test
+  @Test (timeout = 5000)
   public void testReplaceTokensWinEnvVars() {
     Pattern pattern = StringUtils.WIN_ENV_VAR_PATTERN;
     Map<String, String> replacements = new HashMap<String, String>();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
index f5fe12b..29140db 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
@@ -68,7 +68,7 @@
     return b.toString();
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testLs() throws IOException {
     if (!Shell.WINDOWS) {
       // Not supported on non-Windows platforms
@@ -102,7 +102,7 @@
     assertFalse(testFile.exists());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testGroups() throws IOException {
     if (!Shell.WINDOWS) {
       // Not supported on non-Windows platforms
@@ -152,6 +152,7 @@
     assertEquals(expected, output);
   }
 
+  @Test (timeout = 30000)
   private void testChmodInternal(String mode, String expectedPerm)
       throws IOException {
     File a = new File(TEST_DIR, "file1");
@@ -170,6 +171,7 @@
     assertFalse(a.exists());
   }
 
+  @Test (timeout = 30000)
   private void testNewFileChmodInternal(String expectedPerm) throws IOException {
     // Create a new directory
     File dir = new File(TEST_DIR, "dir1");
@@ -191,6 +193,7 @@
     assertFalse(dir.exists());
   }
 
+  @Test (timeout = 30000)
   private void testChmodInternalR(String mode, String expectedPerm,
       String expectedPermx) throws IOException {
     // Setup test folder hierarchy
@@ -227,7 +230,7 @@
     assertTrue(FileUtil.fullyDelete(a));
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testBasicChmod() throws IOException {
     if (!Shell.WINDOWS) {
       // Not supported on non-Windows platforms
@@ -283,7 +286,7 @@
     assertTrue(aExe.delete());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testChmod() throws IOException {
     if (!Shell.WINDOWS) {
       // Not supported on non-Windows platforms
@@ -320,7 +323,7 @@
     assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testChown() throws IOException {
     if (!Shell.WINDOWS) {
       // Not supported on non-Windows platforms
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
index e60d881..12f1568 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -103,7 +103,7 @@
     System.out.println(Thread.currentThread().getStackTrace()[2] + " " + s);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testZeroSizeFile() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -146,7 +146,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testRecrusiveRm() throws IOException {
 	  Configuration conf = new HdfsConfiguration();
 	  MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -172,7 +172,7 @@
     }
   }
     
-  @Test
+  @Test (timeout = 30000)
   public void testDu() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -222,7 +222,8 @@
     }
                                   
   }
-  @Test
+
+  @Test (timeout = 30000)
   public void testPut() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -321,7 +322,7 @@
 
 
   /** check command error outputs and exit statuses. */
-  @Test
+  @Test (timeout = 30000)
   public void testErrOutPut() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = null;
@@ -471,7 +472,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testURIPaths() throws Exception {
     Configuration srcConf = new HdfsConfiguration();
     Configuration dstConf = new HdfsConfiguration();
@@ -564,7 +565,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testText() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = null;
@@ -680,7 +681,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCopyToLocal() throws IOException {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -778,7 +779,7 @@
     return path;
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCount() throws Exception {
     Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -945,7 +946,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testFilePermissions() throws IOException {
     Configuration conf = new HdfsConfiguration();
     
@@ -1011,7 +1012,7 @@
   /**
    * Tests various options of DFSShell.
    */
-  @Test
+  @Test (timeout = 120000)
   public void testDFSShell() throws IOException {
     Configuration conf = new HdfsConfiguration();
     /* This tests some properties of ChecksumFileSystem as well.
@@ -1391,7 +1392,7 @@
     String run(int exitcode, String... options) throws IOException;
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRemoteException() throws Exception {
     UserGroupInformation tmpUGI = 
       UserGroupInformation.createUserForTesting("tmpname", new String[] {"mygroup"});
@@ -1435,7 +1436,7 @@
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testGet() throws IOException {
     DFSTestUtil.setLogLevel2All(FSInputChecker.LOG);
 
@@ -1524,7 +1525,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testLsr() throws Exception {
     final Configuration conf = new HdfsConfiguration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -1582,7 +1583,7 @@
    * and return -1 exit code.
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testInvalidShell() throws Exception {
     Configuration conf = new Configuration(); // default FS (non-DFS)
     DFSAdmin admin = new DFSAdmin();
@@ -1592,7 +1593,7 @@
   }
 
   // force Copy Option is -f
-  @Test
+  @Test (timeout = 30000)
   public void testCopyCommandsWithForceOption() throws Exception {
     Configuration conf = new Configuration();
     MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1)
@@ -1719,7 +1720,7 @@
    * Test that the server trash configuration is respected when
    * the client configuration is not set.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testServerConfigRespected() throws Exception {
     deleteFileUsingTrash(true, false);
   }
@@ -1728,7 +1729,7 @@
    * Test that server trash configuration is respected even when the
    * client configuration is set.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testServerConfigRespectedWithClient() throws Exception {
     deleteFileUsingTrash(true, true);
   }
@@ -1737,7 +1738,7 @@
    * Test that the client trash configuration is respected when
    * the server configuration is not set.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testClientConfigRespected() throws Exception {
     deleteFileUsingTrash(false, true);
   }
@@ -1745,7 +1746,7 @@
   /**
    * Test that trash is disabled by default.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testNoTrashConfig() throws Exception {
     deleteFileUsingTrash(false, false);
   }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
index 724a0c2..c1aa9d1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
@@ -151,7 +151,7 @@
   /**
    * Test that that writes to an incomplete block are available to a reader
    */
-  @Test
+  @Test (timeout = 30000)
   public void testUnfinishedBlockRead()
     throws IOException {
     // create a new file in the root, write data, do no close
@@ -174,7 +174,7 @@
    * would result in too small a buffer to do the buffer-copy needed
    * for partial chunks.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testUnfinishedBlockPacketBufferOverrun() throws IOException {
     // check that / exists
     Path path = new Path("/");
@@ -200,7 +200,7 @@
   // use a small block size and a large write so that DN is busy creating
   // new blocks.  This makes it almost 100% sure we can reproduce
   // case of client getting a DN that hasn't yet created the blocks
-  @Test
+  @Test (timeout = 30000)
   public void testImmediateReadOfNewFile()
     throws IOException {
     final int blockSize = 64 * 1024;
@@ -277,12 +277,12 @@
 
   // for some reason, using tranferTo evokes the race condition more often
   // so test separately
-  @Test
+  @Test (timeout = 30000)
   public void testUnfinishedBlockCRCErrorTransferTo() throws IOException {
     runTestUnfinishedBlockCRCError(true, SyncType.SYNC, DEFAULT_WRITE_SIZE);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUnfinishedBlockCRCErrorTransferToVerySmallWrite()
     throws IOException {
     runTestUnfinishedBlockCRCError(true, SyncType.SYNC, SMALL_WRITE_SIZE);
@@ -290,18 +290,17 @@
 
   // fails due to issue w/append, disable 
   @Ignore
-  @Test
   public void _testUnfinishedBlockCRCErrorTransferToAppend()
     throws IOException {
     runTestUnfinishedBlockCRCError(true, SyncType.APPEND, DEFAULT_WRITE_SIZE);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUnfinishedBlockCRCErrorNormalTransfer() throws IOException {
     runTestUnfinishedBlockCRCError(false, SyncType.SYNC, DEFAULT_WRITE_SIZE);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite()
     throws IOException {
     runTestUnfinishedBlockCRCError(false, SyncType.SYNC, SMALL_WRITE_SIZE);
@@ -309,7 +308,6 @@
 
   // fails due to issue w/append, disable 
   @Ignore
-  @Test
   public void _testUnfinishedBlockCRCErrorNormalTransferAppend()
     throws IOException {
     runTestUnfinishedBlockCRCError(false, SyncType.APPEND, DEFAULT_WRITE_SIZE);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java
index 16bb8a1..566be8b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java
@@ -38,7 +38,7 @@
 
   private static final Log LOG = LogFactory.getLog(TestMapReduceChildJVM.class);
 
-  @Test
+  @Test (timeout = 30000)
   public void testCommandLine() throws Exception {
 
     MyMRApp app = new MyMRApp(1, 0, true, this.getClass().getName(), true);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
index 7cd488c..05497cc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
@@ -78,25 +78,29 @@
     fs.delete(p, true);
   }
 
-  @Test public void testJobIDtoString() {
+  @Test (timeout = 120000)
+  public void testJobIDtoString() {
     JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
     jid.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
     assertEquals("job_0_0000", MRApps.toString(jid));
   }
 
-  @Test public void testToJobID() {
+  @Test (timeout = 120000)
+  public void testToJobID() {
     JobId jid = MRApps.toJobID("job_1_1");
     assertEquals(1, jid.getAppId().getClusterTimestamp());
     assertEquals(1, jid.getAppId().getId());
     assertEquals(1, jid.getId()); // tests against some proto.id and not a job.id field
   }
 
-  @Test(expected=IllegalArgumentException.class) public void testJobIDShort() {
+  @Test (timeout = 120000, expected=IllegalArgumentException.class)
+  public void testJobIDShort() {
     MRApps.toJobID("job_0_0_0");
   }
 
   //TODO_get.set
-  @Test public void testTaskIDtoString() {
+  @Test (timeout = 120000)
+  public void testTaskIDtoString() {
     TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
     tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
     tid.getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
@@ -111,7 +115,8 @@
     assertEquals("task_0_0000_r_000000", MRApps.toString(tid));
   }
 
-  @Test public void testToTaskID() {
+  @Test (timeout = 120000)
+  public void testToTaskID() {
     TaskId tid = MRApps.toTaskID("task_1_2_r_3");
     assertEquals(1, tid.getJobId().getAppId().getClusterTimestamp());
     assertEquals(2, tid.getJobId().getAppId().getId());
@@ -123,16 +128,19 @@
     assertEquals(TaskType.MAP, tid.getTaskType());
   }
 
-  @Test(expected=IllegalArgumentException.class) public void testTaskIDShort() {
+  @Test(timeout = 120000, expected=IllegalArgumentException.class) 
+  public void testTaskIDShort() {
     MRApps.toTaskID("task_0_0000_m");
   }
 
-  @Test(expected=IllegalArgumentException.class) public void testTaskIDBadType() {
+  @Test(timeout = 120000, expected=IllegalArgumentException.class) 
+  public void testTaskIDBadType() {
     MRApps.toTaskID("task_0_0000_x_000000");
   }
 
   //TODO_get.set
-  @Test public void testTaskAttemptIDtoString() {
+  @Test (timeout = 120000)
+  public void testTaskAttemptIDtoString() {
     TaskAttemptId taid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
     taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class));
     taid.getTaskId().setTaskType(TaskType.MAP);
@@ -141,7 +149,8 @@
     assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid));
   }
 
-  @Test public void testToTaskAttemptID() {
+  @Test (timeout = 120000)
+  public void testToTaskAttemptID() {
     TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_m_2_3");
     assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp());
     assertEquals(1, taid.getTaskId().getJobId().getAppId().getId());
@@ -150,11 +159,13 @@
     assertEquals(3, taid.getId());
   }
 
-  @Test(expected=IllegalArgumentException.class) public void testTaskAttemptIDShort() {
+  @Test(timeout = 120000, expected=IllegalArgumentException.class) 
+  public void testTaskAttemptIDShort() {
     MRApps.toTaskAttemptID("attempt_0_0_0_m_0");
   }
 
-  @Test public void testGetJobFileWithUser() {
+  @Test (timeout = 120000)
+  public void testGetJobFileWithUser() {
     Configuration conf = new Configuration();
     conf.set(MRJobConfig.MR_AM_STAGING_DIR, "/my/path/to/staging");
     String jobFile = MRApps.getJobFile(conf, "dummy-user", 
@@ -164,7 +175,8 @@
         "/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile);
   }
 
-  @Test public void testSetClasspath() throws IOException {
+  @Test (timeout = 120000)
+  public void testSetClasspath() throws IOException {
     Job job = Job.getInstance();
     Map<String, String> environment = new HashMap<String, String>();
     MRApps.setClasspath(environment, job.getConfiguration());
@@ -187,7 +199,8 @@
     assertTrue(environment.get("CLASSPATH").contains(mrAppClasspath));
   }
   
-  @Test public void testSetClasspathWithArchives () throws IOException {
+  @Test (timeout = 120000)
+  public void testSetClasspathWithArchives () throws IOException {
     File testTGZ = new File(testWorkDir, "test.tgz");
     FileOutputStream out = new FileOutputStream(testTGZ);
     out.write(0);
@@ -211,7 +224,8 @@
     assertTrue(environment.get("CLASSPATH").contains("testTGZ"));
   }
 
- @Test public void testSetClasspathWithUserPrecendence() {
+ @Test (timeout = 120000)
+ public void testSetClasspathWithUserPrecendence() {
     Configuration conf = new Configuration();
     conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
     Map<String, String> env = new HashMap<String, String>();
@@ -229,7 +243,8 @@
       env_str.startsWith(expectedClasspath));
   }
 
-  @Test public void testSetClasspathWithNoUserPrecendence() {
+  @Test (timeout = 120000)
+  public void testSetClasspathWithNoUserPrecendence() {
     Configuration conf = new Configuration();
     conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false);
     Map<String, String> env = new HashMap<String, String>();
@@ -248,7 +263,8 @@
       env_str.startsWith(expectedClasspath));
   }
   
-  @Test public void testSetClasspathWithJobClassloader() throws IOException {
+  @Test (timeout = 120000)
+  public void testSetClasspathWithJobClassloader() throws IOException {
     Configuration conf = new Configuration();
     conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);
     Map<String, String> env = new HashMap<String, String>();
@@ -267,7 +283,7 @@
       + " classpath!", expectedAppClasspath, appCp);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testSetupDistributedCacheEmpty() throws IOException {
     Configuration conf = new Configuration();
     Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
@@ -277,7 +293,7 @@
   }
   
   @SuppressWarnings("deprecation")
-  @Test(expected = InvalidJobConfException.class)
+  @Test(timeout = 120000, expected = InvalidJobConfException.class)
   public void testSetupDistributedCacheConflicts() throws Exception {
     Configuration conf = new Configuration();
     conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
@@ -308,7 +324,7 @@
   }
   
   @SuppressWarnings("deprecation")
-  @Test(expected = InvalidJobConfException.class)
+  @Test(timeout = 120000, expected = InvalidJobConfException.class)
   public void testSetupDistributedCacheConflictsFiles() throws Exception {
     Configuration conf = new Configuration();
     conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
@@ -336,7 +352,7 @@
   }
   
   @SuppressWarnings("deprecation")
-  @Test
+  @Test (timeout = 30000)
   public void testSetupDistributedCache() throws Exception {
     Configuration conf = new Configuration();
     conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
index 9d3b7a4..348f379 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
@@ -144,7 +144,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testSleepJob() throws IOException, InterruptedException,
       ClassNotFoundException { 
     LOG.info("\n\n\nStarting testSleepJob().");
@@ -215,7 +215,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRandomWriter() throws IOException, InterruptedException,
       ClassNotFoundException {
     
@@ -277,7 +277,7 @@
             && counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue() != 0);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailingMapper() throws IOException, InterruptedException,
       ClassNotFoundException {
 
@@ -359,7 +359,7 @@
     return job;
   }
 
-  //@Test
+  //@Test (timeout = 30000)
   public void testSleepJobWithSecurityOn() throws IOException,
       InterruptedException, ClassNotFoundException {
 
@@ -542,7 +542,7 @@
           trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testDistributedCache() throws Exception {
     // Test with a local (file:///) Job Jar
     Path localJobJarPath = makeJobJarWithLib(TEST_ROOT_DIR.toUri().toString());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java
index 507b923..fc842b0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java
@@ -112,7 +112,7 @@
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testJobHistoryData() throws IOException, InterruptedException,
       AvroRemoteException, ClassNotFoundException {
     if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
index a416ea1..823433c 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
@@ -83,7 +83,7 @@
    *  (b) hadoop.tasklog.totalLogFileSize
    * for the children of java tasks in streaming jobs.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testStreamingTaskLogWithHadoopCmd() {
     try {
       final int numSlaves = 1;
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
index 2c9547a..dba676a 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
@@ -53,7 +53,7 @@
   String cacheString = "This is just the cache string";
   StreamJob job;
 
-  @Test
+  @Test (timeout = 60000)
   public void testSymLink() throws Exception
   {
     boolean mayExit = false;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
index 85fba8b..db3fd29 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
@@ -105,7 +105,7 @@
         new Path(TEST_ROOT_DIR.getAbsolutePath()), true);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testProcessTree() throws Exception {
 
     if (!Shell.LINUX) {
@@ -333,7 +333,7 @@
    * @throws IOException if there was a problem setting up the
    *                      fake procfs directories or files.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testCpuAndMemoryForProcessTree() throws IOException {
 
     // test processes
@@ -407,7 +407,7 @@
    * @throws IOException if there was a problem setting up the
    *                      fake procfs directories or files.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testMemForOlderProcesses() throws IOException {
     // initial list of processes
     String[] pids = { "100", "200", "300", "400" };
@@ -514,7 +514,7 @@
    * @throws IOException if there was a problem setting up the
    *                      fake procfs directories or files.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testDestroyProcessTree() throws IOException {
     // test process
     String pid = "100";
@@ -540,7 +540,7 @@
    *
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testProcessTreeDump()
       throws IOException {
 
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
index 436aaf2..ef1ee39 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java
@@ -23,6 +23,7 @@
 import org.apache.hadoop.util.Shell;
 
 import junit.framework.TestCase;
+import org.junit.Test;
 
 public class TestWindowsBasedProcessTree extends TestCase {
   private static final Log LOG = LogFactory
@@ -38,7 +39,8 @@
       return infoStr;
     }
   }
-  
+
+  @Test (timeout = 30000)
   public void testTree() {
     if( !Shell.WINDOWS) {
       LOG.info("Platform not Windows. Not testing");
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java
index ec4234f..70dde32 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsResourceCalculatorPlugin.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.yarn.util;
 
 import junit.framework.TestCase;
+import org.junit.Test;
 
 public class TestWindowsResourceCalculatorPlugin extends TestCase {
   
@@ -30,7 +31,8 @@
       return infoStr;
     }    
   }
-  
+
+  @Test (timeout = 30000)
   public void testParseSystemInfoString() {
     WindowsResourceCalculatorPluginTester tester = new WindowsResourceCalculatorPluginTester();
     // info str derived from windows shell command has \r\n termination
@@ -47,7 +49,8 @@
     assertTrue(tester.cumulativeCpuTimeMs == 6261812L);
     assertTrue(tester.cpuUsage == -1);
   }
-  
+
+  @Test (timeout = 20000)
   public void testRefreshAndCpuUsage() throws InterruptedException {
     WindowsResourceCalculatorPluginTester tester = new WindowsResourceCalculatorPluginTester();
     // info str derived from windows shell command has \r\n termination
@@ -70,7 +73,8 @@
     assertTrue(tester.memAvailable == 5400417792L);
     assertTrue(tester.cpuUsage >= 0.1);
   }
-  
+
+  @Test (timeout = 20000)
   public void testErrorInGetSystemInfo() {
     WindowsResourceCalculatorPluginTester tester = new WindowsResourceCalculatorPluginTester();
     // info str derived from windows shell command has \r\n termination
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/util/TestProcessIdFileReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/util/TestProcessIdFileReader.java
index eaa1964..0f9e64f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/util/TestProcessIdFileReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/util/TestProcessIdFileReader.java
@@ -33,7 +33,7 @@
 public class TestProcessIdFileReader {
 
   
-  @Test
+  @Test (timeout = 30000)
   public void testNullPath() {
     String pid = null;
     try {
@@ -45,7 +45,7 @@
     assert(pid == null);
   }
   
-  @Test 
+  @Test (timeout = 30000)
   public void testSimpleGet() throws IOException {
     String rootDir = new File(System.getProperty(
         "test.build.data", "/tmp")).getAbsolutePath();
@@ -74,7 +74,7 @@
   }
 
     
-  @Test
+  @Test (timeout = 30000)
   public void testComplexGet() throws IOException {
     String rootDir = new File(System.getProperty(
         "test.build.data", "/tmp")).getAbsolutePath();