Merge r1293501 through r1293896 from 0.23.


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23-PB@1293899 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/hadoop-assemblies/pom.xml b/hadoop-assemblies/pom.xml
index cdf4547..9e02692 100644
--- a/hadoop-assemblies/pom.xml
+++ b/hadoop-assemblies/pom.xml
@@ -20,12 +20,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-assemblies</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>Apache Hadoop Assemblies</name>
   <description>Apache Hadoop Assemblies</description>
 
diff --git a/hadoop-client/pom.xml b/hadoop-client/pom.xml
index fe6e3d5..df46b3a 100644
--- a/hadoop-client/pom.xml
+++ b/hadoop-client/pom.xml
@@ -18,12 +18,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-client</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <packaging>jar</packaging>
 
   <description>Apache Hadoop Client</description>
diff --git a/hadoop-common-project/hadoop-annotations/pom.xml b/hadoop-common-project/hadoop-annotations/pom.xml
index 8342ea4..8d3f33e 100644
--- a/hadoop-common-project/hadoop-annotations/pom.xml
+++ b/hadoop-common-project/hadoop-annotations/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-annotations</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Annotations</description>
   <name>Apache Hadoop Annotations</name>
   <packaging>jar</packaging>
diff --git a/hadoop-common-project/hadoop-auth-examples/pom.xml b/hadoop-common-project/hadoop-auth-examples/pom.xml
index 246528d..6544c5a 100644
--- a/hadoop-common-project/hadoop-auth-examples/pom.xml
+++ b/hadoop-common-project/hadoop-auth-examples/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-auth-examples</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <packaging>war</packaging>
 
   <name>Apache Hadoop Auth Examples</name>
diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml
index bdcf5e2..d6134d9 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-auth</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <packaging>jar</packaging>
 
   <name>Apache Hadoop Auth</name>
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 35ab2ac..49c68c1 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -71,11 +71,24 @@
     HADOOP-7931. o.a.h.ipc.WritableRpcEngine should have a way to force
                  initialization (atm)
 
+Release 0.23.3 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
 Release 0.23.2 - UNRELEASED
 
   NEW FEATURES
 
   IMPROVEMENTS
+
     HADOOP-8048. Allow merging of Credentials (Daryn Sharp via tgraves)
  
     HADOOP-8032. mvn site:stage-deploy should be able to use the scp protocol
@@ -85,6 +98,7 @@
     (szetszwo)
 
   OPTIMIZATIONS
+
     HADOOP-8071. Avoid an extra packet in client code when nagling is
     disabled. (todd)
 
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index ec87638..ab4ea710 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project-dist</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project-dist</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-common</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Common</description>
   <name>Apache Hadoop Common</name>
   <packaging>jar</packaging>
diff --git a/hadoop-common-project/pom.xml b/hadoop-common-project/pom.xml
index c9b03cd..d91ba8e 100644
--- a/hadoop-common-project/pom.xml
+++ b/hadoop-common-project/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-common-project</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Common Project</description>
   <name>Apache Hadoop Common Project</name>
   <packaging>pom</packaging>
diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml
index 2220d44..58c38a2 100644
--- a/hadoop-dist/pom.xml
+++ b/hadoop-dist/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-dist</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Distribution</description>
   <name>Apache Hadoop Distribution</name>
   <packaging>jar</packaging>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
index d2b2feb..cabaad5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
@@ -19,12 +19,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-hdfs-httpfs</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <packaging>war</packaging>
 
   <name>Apache Hadoop HttpFS</name>
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index ea4ffe4..61a243f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -122,6 +122,20 @@
     HDFS-2968. Protocol translator for BlockRecoveryCommand broken when
     multiple blocks need recovery. (todd)
 
+Release 0.23.3 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+    HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm)
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
 Release 0.23.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 0dcff87..467a1ff 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project-dist</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project-dist</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-hdfs</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop HDFS</description>
   <name>Apache Hadoop HDFS</name>
   <packaging>jar</packaging>
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 20f5971..7d3b3b9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -140,6 +140,8 @@
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
+import org.apache.hadoop.hdfs.server.common.Storage.StorageDirType;
+import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.common.Util;
@@ -4423,6 +4425,30 @@
   public String getBlockPoolId() {
     return blockPoolId;
   }
+  
+  @Override  // NameNodeMXBean
+  public String getNameDirStatuses() {
+    Map<String, Map<File, StorageDirType>> statusMap =
+      new HashMap<String, Map<File, StorageDirType>>();
+    
+    Map<File, StorageDirType> activeDirs = new HashMap<File, StorageDirType>();
+    for (Iterator<StorageDirectory> it
+        = getFSImage().getStorage().dirIterator(); it.hasNext();) {
+      StorageDirectory st = it.next();
+      activeDirs.put(st.getRoot(), st.getStorageDirType());
+    }
+    statusMap.put("active", activeDirs);
+    
+    List<Storage.StorageDirectory> removedStorageDirs
+        = getFSImage().getStorage().getRemovedStorageDirs();
+    Map<File, StorageDirType> failedDirs = new HashMap<File, StorageDirType>();
+    for (StorageDirectory st : removedStorageDirs) {
+      failedDirs.put(st.getRoot(), st.getStorageDirType());
+    }
+    statusMap.put("failed", failedDirs);
+    
+    return JSON.toString(statusMap);
+  }
 
   /** @return the block manager. */
   public BlockManager getBlockManager() {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeMXBean.java
index 6e1d8e7..1fb8869 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeMXBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeMXBean.java
@@ -166,4 +166,12 @@
    * @return the block pool id
    */
   public String getBlockPoolId();
+
+  /**
+   * Get status information about the directories storing image and edits logs
+   * of the NN.
+   * 
+   * @return the name dir status information, as a JSON string.
+   */
+  public String getNameDirStatuses();
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
index 1701664..6647042 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
@@ -17,23 +17,33 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import static org.junit.Assert.*;
+
+import java.io.File;
 import java.lang.management.ManagementFactory;
+import java.net.URI;
+import java.util.Collection;
+import java.util.Map;
 
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.util.VersionInfo;
 
 import org.junit.Test;
+import org.mortbay.util.ajax.JSON;
+
 import junit.framework.Assert;
 
 /**
  * Class for testing {@link NameNodeMXBean} implementation
  */
 public class TestNameNodeMXBean {
+  @SuppressWarnings({ "unchecked", "deprecation" })
   @Test
   public void testNameNodeMXBeanInfo() throws Exception {
     Configuration conf = new Configuration();
@@ -88,8 +98,46 @@
       String deadnodeinfo = (String) (mbs.getAttribute(mxbeanName,
           "DeadNodes"));
       Assert.assertEquals(fsn.getDeadNodes(), deadnodeinfo);
+      // get attribute NameDirStatuses
+      String nameDirStatuses = (String) (mbs.getAttribute(mxbeanName,
+          "NameDirStatuses"));
+      Assert.assertEquals(fsn.getNameDirStatuses(), nameDirStatuses);
+      Map<String, Map<String, String>> statusMap =
+        (Map<String, Map<String, String>>) JSON.parse(nameDirStatuses);
+      Collection<URI> nameDirUris = cluster.getNameDirs(0);
+      for (URI nameDirUri : nameDirUris) {
+        File nameDir = new File(nameDirUri);
+        System.out.println("Checking for the presence of " + nameDir +
+            " in active name dirs.");
+        assertTrue(statusMap.get("active").containsKey(nameDir.getAbsolutePath()));
+      }
+      assertEquals(2, statusMap.get("active").size());
+      assertEquals(0, statusMap.get("failed").size());
+      
+      // This will cause the first dir to fail.
+      File failedNameDir = new File(nameDirUris.toArray(new URI[0])[0]);
+      assertEquals(0, FileUtil.chmod(failedNameDir.getAbsolutePath(), "000"));
+      cluster.getNameNodeRpc().rollEditLog();
+      
+      nameDirStatuses = (String) (mbs.getAttribute(mxbeanName,
+          "NameDirStatuses"));
+      statusMap = (Map<String, Map<String, String>>) JSON.parse(nameDirStatuses);
+      for (URI nameDirUri : nameDirUris) {
+        File nameDir = new File(nameDirUri);
+        String expectedStatus =
+            nameDir.equals(failedNameDir) ? "failed" : "active";
+        System.out.println("Checking for the presence of " + nameDir +
+            " in " + expectedStatus + " name dirs.");
+        assertTrue(statusMap.get(expectedStatus).containsKey(
+            nameDir.getAbsolutePath()));
+      }
+      assertEquals(1, statusMap.get("active").size());
+      assertEquals(1, statusMap.get("failed").size());
     } finally {
       if (cluster != null) {
+        for (URI dir : cluster.getNameDirs(0)) {
+          FileUtil.chmod(new File(dir).toString(), "700");
+        }
         cluster.shutdown();
       }
     }
diff --git a/hadoop-hdfs-project/pom.xml b/hadoop-hdfs-project/pom.xml
index 2988ff7..90bf18d 100644
--- a/hadoop-hdfs-project/pom.xml
+++ b/hadoop-hdfs-project/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-hdfs-project</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop HDFS Project</description>
   <name>Apache Hadoop HDFS Project</name>
   <packaging>pom</packaging>
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index e22241b..13717ed 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -29,6 +29,18 @@
    MAPREDUCE-2942. TestNMAuditLogger.testNMAuditLoggerWithIP failing (Thomas Graves 
    via mahadev)
 
+Release 0.23.3 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
 Release 0.23.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -52,9 +64,13 @@
     MAPREDUCE-3730. Modified RM to allow restarted NMs to be able to join the
     cluster without waiting for expiry. (Jason Lowe via vinodkv)
 
+    MAPREDUCE-2793. Corrected AppIDs, JobIDs, TaskAttemptIDs to be of correct
+    format on the web pages. (Bikas Saha via vinodkv)
+
   OPTIMIZATIONS
 
   BUG FIXES
+
     MAPREDUCE-3918  proc_historyserver no longer in command line arguments for
     HistoryServer (Jon Eagles via bobby)
 
@@ -90,6 +106,13 @@
     MAPREDUCE-3904 Job history produced with mapreduce.cluster.acls.enabled
     false can not be viewed with mapreduce.cluster.acls.enabled true 
     (Jonathon Eagles via tgraves)
+
+    MAPREDUCE-3910. Fixed a bug in CapacityScheduler LeafQueue which was causing
+    app-submission to fail. (John George via vinodkv)
+
+    MAPREDUCE-3686. Fixed two bugs in Counters because of which web app displays
+    zero counter values for framework counters. (Bhallamudi Venkata Siva Kamesh
+    via vinodkv)
  
 Release 0.23.1 - 2012-02-17 
 
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
index 61a8ce4..4e20c05 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client-app</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-mapreduce-client-app</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
index 01c5c95..f62cba0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
@@ -99,6 +99,14 @@
     try {
       jobId = MRApps.toJobID(jid);
     } catch (YarnException e) {
+      // TODO: after MAPREDUCE-2793 YarnException is probably not expected here
+      // anymore but keeping it for now just in case other stuff starts failing.
+      // Also, the webservice should ideally return BadRequest (HTTP:400) when
+      // the id is malformed instead of NotFound (HTTP:404). The webserver on
+      // top of which AMWebServices is built seems to automatically do that for
+      // unhandled exceptions
+      throw new NotFoundException(e.getMessage());
+    } catch (IllegalArgumentException e) {
       throw new NotFoundException(e.getMessage());
     }
     if (jobId == null) {
@@ -121,10 +129,18 @@
     try {
       taskID = MRApps.toTaskID(tid);
     } catch (YarnException e) {
+      // TODO: after MAPREDUCE-2793 YarnException is probably not expected here
+      // anymore but keeping it for now just in case other stuff starts failing.
+      // Also, the webservice should ideally return BadRequest (HTTP:400) when
+      // the id is malformed instead of NotFound (HTTP:404). The webserver on
+      // top of which AMWebServices is built seems to automatically do that for
+      // unhandled exceptions
       throw new NotFoundException(e.getMessage());
     } catch (NumberFormatException ne) {
       throw new NotFoundException(ne.getMessage());
-    }
+    } catch (IllegalArgumentException e) {
+      throw new NotFoundException(e.getMessage());
+    } 
     if (taskID == null) {
       throw new NotFoundException("taskid " + tid + " not found or invalid");
     }
@@ -146,9 +162,17 @@
     try {
       attemptId = MRApps.toTaskAttemptID(attId);
     } catch (YarnException e) {
+      // TODO: after MAPREDUCE-2793 YarnException is probably not expected here
+      // anymore but keeping it for now just in case other stuff starts failing.
+      // Also, the webservice should ideally return BadRequest (HTTP:400) when
+      // the id is malformed instead of NotFound (HTTP:404). The webserver on
+      // top of which AMWebServices is built seems to automatically do that for
+      // unhandled exceptions
       throw new NotFoundException(e.getMessage());
     } catch (NumberFormatException ne) {
       throw new NotFoundException(ne.getMessage());
+    } catch (IllegalArgumentException e) {
+      throw new NotFoundException(e.getMessage());
     }
     if (attemptId == null) {
       throw new NotFoundException("task attempt id " + attId
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
index c8e20f6..cfecb32 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
@@ -106,6 +106,20 @@
     return newAppName();
   }
 
+  /**
+   * Create numJobs in a map with jobs having appId==jobId
+   */
+  public static Map<JobId, Job> newJobs(int numJobs, int numTasksPerJob,
+      int numAttemptsPerTask) {
+    Map<JobId, Job> map = Maps.newHashMap();
+    for (int j = 0; j < numJobs; ++j) {
+      ApplicationId appID = MockJobs.newAppID(j);
+      Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
+      map.put(job.getID(), job);
+    }
+    return map;
+  }
+  
   public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp,
       int numTasksPerJob, int numAttemptsPerTask) {
     Map<JobId, Job> map = Maps.newHashMap();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
index e33a506..9be01d5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
@@ -396,36 +396,36 @@
   public void testTaskAttemptIdBogus() throws JSONException, Exception {
 
     testTaskAttemptIdErrorGeneric("bogusid",
-        "java.lang.Exception: Error parsing attempt ID: bogusid");
+        "java.lang.Exception: TaskAttemptId string : bogusid is not properly formed");
   }
 
   @Test
   public void testTaskAttemptIdNonExist() throws JSONException, Exception {
 
     testTaskAttemptIdErrorGeneric(
-        "attempt_12345_0_0_r_1_0",
-        "java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0");
+        "attempt_0_12345_m_000000_0",
+        "java.lang.Exception: Error getting info on task attempt id attempt_0_12345_m_000000_0");
   }
 
   @Test
   public void testTaskAttemptIdInvalid() throws JSONException, Exception {
 
-    testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0",
-        "java.lang.Exception: Unknown task symbol: d");
+    testTaskAttemptIdErrorGeneric("attempt_0_12345_d_000000_0",
+        "java.lang.Exception: Bad TaskType identifier. TaskAttemptId string : attempt_0_12345_d_000000_0 is not properly formed.");
   }
 
   @Test
   public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
 
-    testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0",
-        "java.lang.Exception: For input string: \"r\"");
+    testTaskAttemptIdErrorGeneric("attempt_12345_m_000000_0",
+        "java.lang.Exception: TaskAttemptId string : attempt_12345_m_000000_0 is not properly formed");
   }
 
   @Test
   public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
 
-    testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1",
-        "java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1");
+    testTaskAttemptIdErrorGeneric("attempt_0_12345_m_000000",
+        "java.lang.Exception: TaskAttemptId string : attempt_0_12345_m_000000 is not properly formed");
   }
 
   private void testTaskAttemptIdErrorGeneric(String attid, String error)
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
index a0846e4..1ede672 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
@@ -320,7 +320,7 @@
 
     try {
       r.path("ws").path("v1").path("mapreduce").path("jobs")
-          .path("job_1234_1_2").get(JSONObject.class);
+          .path("job_0_1234").get(JSONObject.class);
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
@@ -333,7 +333,7 @@
       String type = exception.getString("exception");
       String classname = exception.getString("javaClassName");
       WebServicesTestUtils.checkStringMatch("exception message",
-          "java.lang.Exception: job, job_1234_1_2, is not found", message);
+          "java.lang.Exception: job, job_0_1234, is not found", message);
       WebServicesTestUtils.checkStringMatch("exception type",
           "NotFoundException", type);
       WebServicesTestUtils.checkStringMatch("exception classname",
@@ -351,7 +351,7 @@
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
-      assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
       assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
       JSONObject msg = response.getEntity(JSONObject.class);
       JSONObject exception = msg.getJSONObject("RemoteException");
@@ -374,7 +374,7 @@
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
-      assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
       assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
       JSONObject msg = response.getEntity(JSONObject.class);
       JSONObject exception = msg.getJSONObject("RemoteException");
@@ -397,7 +397,7 @@
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
-      assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
       assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
       String msg = response.getEntity(String.class);
       System.out.println(msg);
@@ -418,11 +418,12 @@
 
   private void verifyJobIdInvalid(String message, String type, String classname) {
     WebServicesTestUtils.checkStringMatch("exception message",
-        "For input string: \"foo\"", message);
+        "java.lang.Exception: JobId string : job_foo is not properly formed",
+        message);
     WebServicesTestUtils.checkStringMatch("exception type",
-        "NumberFormatException", type);
+        "NotFoundException", type);
     WebServicesTestUtils.checkStringMatch("exception classname",
-        "java.lang.NumberFormatException", classname);
+        "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
   }
 
   @Test
@@ -443,8 +444,11 @@
       String message = exception.getString("message");
       String type = exception.getString("exception");
       String classname = exception.getString("javaClassName");
-      WebServicesTestUtils.checkStringMatch("exception message",
-          "java.lang.Exception: Error parsing job ID: bogusfoo", message);
+      WebServicesTestUtils
+          .checkStringMatch(
+              "exception message",
+              "java.lang.Exception: JobId string : bogusfoo is not properly formed",
+              message);
       WebServicesTestUtils.checkStringMatch("exception type",
           "NotFoundException", type);
       WebServicesTestUtils.checkStringMatch("exception classname",
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
index e3fdd93..0fdcba8 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
@@ -424,7 +424,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: Error parsing task ID: bogustaskid", message);
+            "java.lang.Exception: TaskId string : "
+                + "bogustaskid is not properly formed", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -439,7 +440,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_0_m_0";
+      String tid = "task_0_0000_m_000000";
       try {
         r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
             .path("tasks").path(tid).get(JSONObject.class);
@@ -455,7 +456,7 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: task not found with id task_1234_0_0_m_0",
+            "java.lang.Exception: task not found with id task_0_0000_m_000000",
             message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
@@ -471,7 +472,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_0_d_0";
+      String tid = "task_0_0000_d_000000";
       try {
         r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
             .path("tasks").path(tid).get(JSONObject.class);
@@ -487,7 +488,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: Unknown task symbol: d", message);
+            "java.lang.Exception: Bad TaskType identifier. TaskId string : "
+                + "task_0_0000_d_000000 is not properly formed.", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -502,7 +504,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_m_0";
+      String tid = "task_0_m_000000";
       try {
         r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
             .path("tasks").path(tid).get(JSONObject.class);
@@ -518,7 +520,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: For input string: \"m\"", message);
+            "java.lang.Exception: TaskId string : "
+                + "task_0_m_000000 is not properly formed", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -533,7 +536,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_0_m";
+      String tid = "task_0_0000_m";
       try {
         r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
             .path("tasks").path(tid).get(JSONObject.class);
@@ -549,8 +552,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: Error parsing task ID: task_1234_0_0_m",
-            message);
+            "java.lang.Exception: TaskId string : "
+                + "task_0_0000_m is not properly formed", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
index 60ee5e1..a37558f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client-common</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-mapreduce-client-common</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java
index c1da2fe..a42f9eb 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java
@@ -506,11 +506,9 @@
       sb.append(address.getHostName());
     }
     sb.append(":").append(address.getPort());
-    sb.append("/jobhistory/job/"); // TODO This will change when the history server
-                            // understands apps.
-    // TOOD Use JobId toString once UI stops using _id_id
-    sb.append("job_").append(appId.getClusterTimestamp());
-    sb.append("_").append(appId.getId()).append("_").append(appId.getId());
+    sb.append("/jobhistory/job/");
+    JobID jobId = TypeConverter.fromYarn(appId);
+    sb.append(jobId.toString());
     return sb.toString();
   }
 }
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
index 572ebde..3849795 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.mapreduce.v2.util;
 
-import static org.apache.hadoop.yarn.util.StringHelper._join;
-import static org.apache.hadoop.yarn.util.StringHelper._split;
-
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
@@ -30,7 +27,6 @@
 import java.net.URL;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -39,7 +35,11 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
@@ -50,12 +50,10 @@
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.api.records.LocalResourceType;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 
@@ -65,64 +63,28 @@
 @Private
 @Unstable
 public class MRApps extends Apps {
-  public static final String JOB = "job";
-  public static final String TASK = "task";
-  public static final String ATTEMPT = "attempt";
-
   public static String toString(JobId jid) {
-    return _join(JOB, jid.getAppId().getClusterTimestamp(), jid.getAppId().getId(), jid.getId());
+    return jid.toString();
   }
 
   public static JobId toJobID(String jid) {
-    Iterator<String> it = _split(jid).iterator();
-    return toJobID(JOB, jid, it);
-  }
-
-  // mostly useful for parsing task/attempt id like strings
-  public static JobId toJobID(String prefix, String s, Iterator<String> it) {
-    ApplicationId appId = toAppID(prefix, s, it);
-    shouldHaveNext(prefix, s, it);
-    JobId jobId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
-    jobId.setAppId(appId);
-    jobId.setId(Integer.parseInt(it.next()));
-    return jobId;
+    return TypeConverter.toYarn(JobID.forName(jid));
   }
 
   public static String toString(TaskId tid) {
-    return _join("task", tid.getJobId().getAppId().getClusterTimestamp(), tid.getJobId().getAppId().getId(),
-                 tid.getJobId().getId(), taskSymbol(tid.getTaskType()), tid.getId());
+    return tid.toString();
   }
 
   public static TaskId toTaskID(String tid) {
-    Iterator<String> it = _split(tid).iterator();
-    return toTaskID(TASK, tid, it);
-  }
-
-  public static TaskId toTaskID(String prefix, String s, Iterator<String> it) {
-    JobId jid = toJobID(prefix, s, it);
-    shouldHaveNext(prefix, s, it);
-    TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
-    tid.setJobId(jid);
-    tid.setTaskType(taskType(it.next()));
-    shouldHaveNext(prefix, s, it);
-    tid.setId(Integer.parseInt(it.next()));
-    return tid;
+    return TypeConverter.toYarn(TaskID.forName(tid));
   }
 
   public static String toString(TaskAttemptId taid) {
-    return _join("attempt", taid.getTaskId().getJobId().getAppId().getClusterTimestamp(),
-                 taid.getTaskId().getJobId().getAppId().getId(), taid.getTaskId().getJobId().getId(),
-                 taskSymbol(taid.getTaskId().getTaskType()), taid.getTaskId().getId(), taid.getId());
+    return taid.toString(); 
   }
 
   public static TaskAttemptId toTaskAttemptID(String taid) {
-    Iterator<String> it = _split(taid).iterator();
-    TaskId tid = toTaskID(ATTEMPT, taid, it);
-    shouldHaveNext(ATTEMPT, taid, it);
-    TaskAttemptId taId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
-    taId.setTaskId(tid);
-    taId.setId(Integer.parseInt(it.next()));
-    return taId;
+    return TypeConverter.toYarn(TaskAttemptID.forName(taid));
   }
 
   public static String taskSymbol(TaskType type) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
index 715b6c6..94ce417 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
@@ -43,18 +43,18 @@
   @Test public void testJobIDtoString() {
     JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
     jid.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
-    assertEquals("job_0_0_0", MRApps.toString(jid));
+    assertEquals("job_0_0000", MRApps.toString(jid));
   }
 
   @Test public void testToJobID() {
-    JobId jid = MRApps.toJobID("job_1_1_1");
+    JobId jid = MRApps.toJobID("job_1_1");
     assertEquals(1, jid.getAppId().getClusterTimestamp());
     assertEquals(1, jid.getAppId().getId());
-    assertEquals(1, jid.getId());
+    assertEquals(1, jid.getId()); // tests against some proto.id and not a job.id field
   }
 
-  @Test(expected=YarnException.class) public void testJobIDShort() {
-    MRApps.toJobID("job_0_0");
+  @Test(expected=IllegalArgumentException.class) public void testJobIDShort() {
+    MRApps.toJobID("job_0_0_0");
   }
 
   //TODO_get.set
@@ -68,29 +68,29 @@
     type = TaskType.REDUCE;
     System.err.println(type);
     System.err.println(tid.getTaskType());
-    assertEquals("task_0_0_0_m_0", MRApps.toString(tid));
+    assertEquals("task_0_0000_m_000000", MRApps.toString(tid));
     tid.setTaskType(TaskType.REDUCE);
-    assertEquals("task_0_0_0_r_0", MRApps.toString(tid));
+    assertEquals("task_0_0000_r_000000", MRApps.toString(tid));
   }
 
   @Test public void testToTaskID() {
-    TaskId tid = MRApps.toTaskID("task_1_2_3_r_4");
+    TaskId tid = MRApps.toTaskID("task_1_2_r_3");
     assertEquals(1, tid.getJobId().getAppId().getClusterTimestamp());
     assertEquals(2, tid.getJobId().getAppId().getId());
-    assertEquals(3, tid.getJobId().getId());
+    assertEquals(2, tid.getJobId().getId());
     assertEquals(TaskType.REDUCE, tid.getTaskType());
-    assertEquals(4, tid.getId());
+    assertEquals(3, tid.getId());
 
-    tid = MRApps.toTaskID("task_1_2_3_m_4");
+    tid = MRApps.toTaskID("task_1_2_m_3");
     assertEquals(TaskType.MAP, tid.getTaskType());
   }
 
-  @Test(expected=YarnException.class) public void testTaskIDShort() {
-    MRApps.toTaskID("task_0_0_0_m");
+  @Test(expected=IllegalArgumentException.class) public void testTaskIDShort() {
+    MRApps.toTaskID("task_0_0000_m");
   }
 
-  @Test(expected=YarnException.class) public void testTaskIDBadType() {
-    MRApps.toTaskID("task_0_0_0_x_0");
+  @Test(expected=IllegalArgumentException.class) public void testTaskIDBadType() {
+    MRApps.toTaskID("task_0_0000_x_000000");
   }
 
   //TODO_get.set
@@ -100,19 +100,19 @@
     taid.getTaskId().setTaskType(TaskType.MAP);
     taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
     taid.getTaskId().getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
-    assertEquals("attempt_0_0_0_m_0_0", MRApps.toString(taid));
+    assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid));
   }
 
   @Test public void testToTaskAttemptID() {
-    TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_2_m_3_4");
+    TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_m_2_3");
     assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp());
     assertEquals(1, taid.getTaskId().getJobId().getAppId().getId());
-    assertEquals(2, taid.getTaskId().getJobId().getId());
-    assertEquals(3, taid.getTaskId().getId());
-    assertEquals(4, taid.getId());
+    assertEquals(1, taid.getTaskId().getJobId().getId());
+    assertEquals(2, taid.getTaskId().getId());
+    assertEquals(3, taid.getId());
   }
 
-  @Test(expected=YarnException.class) public void testTaskAttemptIDShort() {
+  @Test(expected=IllegalArgumentException.class) public void testTaskAttemptIDShort() {
     MRApps.toTaskAttemptID("attempt_0_0_0_m_0");
   }
 
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
index c4e82af..fb48be5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client-core</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-mapreduce-client-core</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskAttemptID.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskAttemptID.java
index 70fcf10..f2467f0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskAttemptID.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskAttemptID.java
@@ -159,6 +159,7 @@
                                       ) throws IllegalArgumentException {
     if(str == null)
       return null;
+    String exceptionMsg = null;
     try {
       String[] parts = str.split(Character.toString(SEPARATOR));
       if(parts.length == 6) {
@@ -171,14 +172,19 @@
              Integer.parseInt(parts[2]),
              t, Integer.parseInt(parts[4]), 
              Integer.parseInt(parts[5]));  
-          } else throw new Exception();
+          } else
+            exceptionMsg = "Bad TaskType identifier. TaskAttemptId string : "
+                + str + " is not properly formed.";
         }
       }
     } catch (Exception ex) {
       //fall below
     }
-    throw new IllegalArgumentException("TaskAttemptId string : " + str 
-        + " is not properly formed");
+    if (exceptionMsg == null) {
+      exceptionMsg = "TaskAttemptId string : " + str
+          + " is not properly formed";
+    }
+    throw new IllegalArgumentException(exceptionMsg);
   }
 
 }
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
index c98ec90..3dc2bab 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
@@ -184,6 +184,7 @@
     throws IllegalArgumentException {
     if(str == null)
       return null;
+    String exceptionMsg = null;
     try {
       String[] parts = str.split("_");
       if(parts.length == 5) {
@@ -196,13 +197,17 @@
                                                      Integer.parseInt(parts[2]),
                                                      t, 
                                                      Integer.parseInt(parts[4]));
-          } else throw new Exception();
+          } else
+            exceptionMsg = "Bad TaskType identifier. TaskId string : " + str
+                + " is not properly formed.";
         }
       }
     }catch (Exception ex) {//fall below
     }
-    throw new IllegalArgumentException("TaskId string : " + str 
-        + " is not properly formed");
+    if (exceptionMsg == null) {
+      exceptionMsg = "TaskId string : " + str + " is not properly formed";
+    }
+    throw new IllegalArgumentException(exceptionMsg);
   }
   /**
    * Gets the character representing the {@link TaskType}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
index 73434ae..768f1c7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
@@ -326,12 +326,10 @@
    */
   public synchronized void incrAllCounters(AbstractCounters<C, G> other) {
     for(G right : other) {
-      G left = groups.get(right.getName());
+      String groupName = right.getName();
+      G left = (isFrameworkGroup(groupName) ? fgroups : groups).get(groupName);
       if (left == null) {
-        limits.checkGroups(groups.size() + 1);
-        left = groupFactory.newGroup(right.getName(), right.getDisplayName(),
-                                     limits);
-        groups.put(right.getName(), left);
+        left = addGroup(groupName, right.getDisplayName());
       }
       left.incrAllCounters(right);
     }
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java
index a402f74..7f392f2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java
@@ -107,6 +107,8 @@
     if (gf != null) return gf.newGroup(name);
     if (name.equals(FS_GROUP_NAME)) {
       return newFileSystemGroup();
+    } else if (s2i.get(name) != null) {
+      return newFrameworkGroup(s2i.get(name));
     }
     return newGenericGroup(name, displayName, limits);
   }
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
index 4d2bfbb..97e08a2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client-hs</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-mapreduce-client-hs</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
index 7ba200f..94a9f7a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
@@ -408,36 +408,40 @@
   public void testTaskAttemptIdBogus() throws JSONException, Exception {
 
     testTaskAttemptIdErrorGeneric("bogusid",
-        "java.lang.Exception: Error parsing attempt ID: bogusid");
+        "java.lang.Exception: TaskAttemptId string : "
+            + "bogusid is not properly formed");
   }
 
   @Test
   public void testTaskAttemptIdNonExist() throws JSONException, Exception {
 
     testTaskAttemptIdErrorGeneric(
-        "attempt_12345_0_0_r_1_0",
-        "java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0");
+        "attempt_0_1234_m_000000_0",
+        "java.lang.Exception: Error getting info on task attempt id attempt_0_1234_m_000000_0");
   }
 
   @Test
   public void testTaskAttemptIdInvalid() throws JSONException, Exception {
 
-    testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0",
-        "java.lang.Exception: Unknown task symbol: d");
+    testTaskAttemptIdErrorGeneric("attempt_0_1234_d_000000_0",
+        "java.lang.Exception: Bad TaskType identifier. TaskAttemptId string : "
+            + "attempt_0_1234_d_000000_0 is not properly formed.");
   }
 
   @Test
   public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
 
-    testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0",
-        "java.lang.Exception: For input string: \"r\"");
+    testTaskAttemptIdErrorGeneric("attempt_1234_m_000000_0",
+        "java.lang.Exception: TaskAttemptId string : "
+            + "attempt_1234_m_000000_0 is not properly formed");
   }
 
   @Test
   public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
 
-    testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1",
-        "java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1");
+    testTaskAttemptIdErrorGeneric("attempt_0_1234_m_000000",
+        "java.lang.Exception: TaskAttemptId string : "
+            + "attempt_0_1234_m_000000 is not properly formed");
   }
 
   private void testTaskAttemptIdErrorGeneric(String attid, String error)
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
index fd81180..ec7df9b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
@@ -367,7 +367,7 @@
 
     try {
       r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
-          .path("job_1234_1_2").get(JSONObject.class);
+          .path("job_0_1234").get(JSONObject.class);
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
@@ -380,7 +380,7 @@
       String type = exception.getString("exception");
       String classname = exception.getString("javaClassName");
       WebServicesTestUtils.checkStringMatch("exception message",
-          "java.lang.Exception: job, job_1234_1_2, is not found", message);
+          "java.lang.Exception: job, job_0_1234, is not found", message);
       WebServicesTestUtils.checkStringMatch("exception type",
           "NotFoundException", type);
       WebServicesTestUtils.checkStringMatch("exception classname",
@@ -399,7 +399,7 @@
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
-      assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
       assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
       JSONObject msg = response.getEntity(JSONObject.class);
       JSONObject exception = msg.getJSONObject("RemoteException");
@@ -423,7 +423,7 @@
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
-      assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
       assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
       JSONObject msg = response.getEntity(JSONObject.class);
       JSONObject exception = msg.getJSONObject("RemoteException");
@@ -447,7 +447,7 @@
       fail("should have thrown exception on invalid uri");
     } catch (UniformInterfaceException ue) {
       ClientResponse response = ue.getResponse();
-      assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
       assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
       String msg = response.getEntity(String.class);
       System.out.println(msg);
@@ -468,11 +468,12 @@
 
   private void verifyJobIdInvalid(String message, String type, String classname) {
     WebServicesTestUtils.checkStringMatch("exception message",
-        "For input string: \"foo\"", message);
+        "java.lang.Exception: JobId string : job_foo is not properly formed",
+        message);
     WebServicesTestUtils.checkStringMatch("exception type",
-        "NumberFormatException", type);
+        "NotFoundException", type);
     WebServicesTestUtils.checkStringMatch("exception classname",
-        "java.lang.NumberFormatException", classname);
+        "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
   }
 
   @Test
@@ -494,7 +495,8 @@
       String type = exception.getString("exception");
       String classname = exception.getString("javaClassName");
       WebServicesTestUtils.checkStringMatch("exception message",
-          "java.lang.Exception: Error parsing job ID: bogusfoo", message);
+          "java.lang.Exception: JobId string : "
+              + "bogusfoo is not properly formed", message);
       WebServicesTestUtils.checkStringMatch("exception type",
           "NotFoundException", type);
       WebServicesTestUtils.checkStringMatch("exception classname",
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
index 74af1f6..7376798 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
@@ -72,30 +72,26 @@
   private static HsWebApp webApp;
 
   static class TestAppContext implements AppContext {
-    final ApplicationAttemptId appAttemptID;
-    final ApplicationId appID;
     final String user = MockJobs.newUserName();
     final Map<JobId, Job> jobs;
     final long startTime = System.currentTimeMillis();
 
-    TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
-      appID = MockJobs.newAppID(appid);
-      appAttemptID = MockJobs.newAppAttemptID(appID, 0);
-      jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+    TestAppContext(int numJobs, int numTasks, int numAttempts) {
+      jobs = MockJobs.newJobs(numJobs, numTasks, numAttempts);
     }
 
     TestAppContext() {
-      this(0, 3, 2, 1);
+      this(3, 2, 1);
     }
 
     @Override
     public ApplicationAttemptId getApplicationAttemptId() {
-      return appAttemptID;
+      return null;
     }
 
     @Override
     public ApplicationId getApplicationID() {
-      return appID;
+      return null;
     }
 
     @Override
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
index b0780aff..3ada5be 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
@@ -435,7 +435,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: Error parsing task ID: bogustaskid", message);
+            "java.lang.Exception: TaskId string : "
+                + "bogustaskid is not properly formed", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -450,7 +451,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_0_m_0";
+      String tid = "task_0_0000_m_000000";
       try {
         r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
             .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@@ -466,7 +467,7 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: task not found with id task_1234_0_0_m_0",
+            "java.lang.Exception: task not found with id task_0_0000_m_000000",
             message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
@@ -482,7 +483,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_0_d_0";
+      String tid = "task_0_0000_d_000000";
       try {
         r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
             .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@@ -498,7 +499,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: Unknown task symbol: d", message);
+            "java.lang.Exception: Bad TaskType identifier. TaskId string : "
+                + "task_0_0000_d_000000 is not properly formed.", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -513,7 +515,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_m_0";
+      String tid = "task_0000_m_000000";
       try {
         r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
             .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@@ -529,7 +531,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: For input string: \"m\"", message);
+            "java.lang.Exception: TaskId string : "
+                + "task_0000_m_000000 is not properly formed", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -544,7 +547,7 @@
     Map<JobId, Job> jobsMap = appContext.getAllJobs();
     for (JobId id : jobsMap.keySet()) {
       String jobId = MRApps.toString(id);
-      String tid = "task_1234_0_0_m";
+      String tid = "task_0_0000_m";
       try {
         r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
             .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@@ -560,8 +563,8 @@
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
         WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: Error parsing task ID: task_1234_0_0_m",
-            message);
+            "java.lang.Exception: TaskId string : "
+                + "task_0_0000_m is not properly formed", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml
index 0a2c957..e6f4610 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-mapreduce-client-jobclient</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestCounters.java
index ccdf516d..7b85bd4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestCounters.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestCounters.java
@@ -70,6 +70,29 @@
       testMaxGroups(new Counters());
     }
   }
+  
+  @Test
+  public void testCountersIncrement() {
+    Counters fCounters = new Counters();
+    Counter fCounter = fCounters.findCounter(FRAMEWORK_COUNTER);
+    fCounter.setValue(100);
+    Counter gCounter = fCounters.findCounter("test", "foo");
+    gCounter.setValue(200);
+
+    Counters counters = new Counters();
+    counters.incrAllCounters(fCounters);
+    Counter counter;
+    for (CounterGroup cg : fCounters) {
+      CounterGroup group = counters.getGroup(cg.getName());
+      if (group.getName().equals("test")) {
+        counter = counters.findCounter("test", "foo");
+        assertEquals(200, counter.getValue());
+      } else {
+        counter = counters.findCounter(FRAMEWORK_COUNTER);
+        assertEquals(100, counter.getValue());
+      }
+    }
+  }
 
   static final Enum<?> FRAMEWORK_COUNTER = TaskCounter.CPU_MILLISECONDS;
   static final long FRAMEWORK_COUNTER_VALUE = 8;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/pom.xml
index 4230d7b..30ee492 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-mapreduce-client-shuffle</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
index 02f4cc1..a21cee7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-mapreduce-client</name>
   <packaging>pom</packaging>
 
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
index c06815e..3a560ff 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-examples</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop MapReduce Examples</description>
   <name>Apache Hadoop MapReduce Examples</name>
   <packaging>jar</packaging>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/pom.xml
index 0885683..dd5eb45 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-api</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-api</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
index 3468f8f..cc58a34 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn-applications</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-applications-distributedshell</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-applications-distributedshell</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/pom.xml
index be89bda..a7b1010 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-applications</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-applications</name>
   <packaging>pom</packaging>
 
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml
index 88d03f0..90bf8da 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-common</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-common</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml
index 263fa57..413e2f7 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn-server</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-server-common</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-server-common</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
index dea4758..bd10a90 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn-server</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-server-nodemanager</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-server-nodemanager</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
index d7a9050..c3ab120 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn-server</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-server-resourcemanager</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
index f21cfc2..91f1c8e 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
@@ -572,12 +572,7 @@
     // Careful! Locking order is important!
 
     // Check queue ACLs
-    UserGroupInformation userUgi;
-    try {
-      userUgi = UserGroupInformation.getCurrentUser();
-    } catch (IOException ioe) {
-      throw new AccessControlException(ioe);
-    }
+    UserGroupInformation userUgi = UserGroupInformation.createRemoteUser(userName);
     if (!hasAccess(QueueACL.SUBMIT_APPLICATIONS, userUgi)) {
       throw new AccessControlException("User " + userName + " cannot submit" +
           " applications to queue " + getQueuePath());
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java
index 52430e8..75e0d20 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java
@@ -119,10 +119,11 @@
   private static final String B = "b";
   private static final String C = "c";
   private static final String C1 = "c1";
+  private static final String D = "d";
   private void setupQueueConfiguration(CapacitySchedulerConfiguration conf) {
     
     // Define top-level queues
-    conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {A, B, C});
+    conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {A, B, C, D});
     conf.setCapacity(CapacitySchedulerConfiguration.ROOT, 100);
     conf.setMaximumCapacity(CapacitySchedulerConfiguration.ROOT, 100);
     conf.setAcl(CapacitySchedulerConfiguration.ROOT, QueueACL.SUBMIT_APPLICATIONS, " ");
@@ -133,7 +134,7 @@
     conf.setAcl(Q_A, QueueACL.SUBMIT_APPLICATIONS, "*");
     
     final String Q_B = CapacitySchedulerConfiguration.ROOT + "." + B;
-    conf.setCapacity(Q_B, 90);
+    conf.setCapacity(Q_B, 80);
     conf.setMaximumCapacity(Q_B, 99);
     conf.setAcl(Q_B, QueueACL.SUBMIT_APPLICATIONS, "*");
 
@@ -146,6 +147,11 @@
 
     final String Q_C1 = Q_C + "." + C1;
     conf.setCapacity(Q_C1, 100);
+
+    final String Q_D = CapacitySchedulerConfiguration.ROOT + "." + D;
+    conf.setCapacity(Q_D, 10);
+    conf.setMaximumCapacity(Q_D, 11);
+    conf.setAcl(Q_D, QueueACL.SUBMIT_APPLICATIONS, "user_d");
     
   }
 
@@ -202,8 +208,8 @@
 	  assertEquals(0.2, a.getAbsoluteMaximumCapacity(), epsilon);
 	  
 	  LeafQueue b = stubLeafQueue((LeafQueue)queues.get(B));
-	  assertEquals(0.9, b.getCapacity(), epsilon);
-	  assertEquals(0.9, b.getAbsoluteCapacity(), epsilon);
+	  assertEquals(0.80, b.getCapacity(), epsilon);
+	  assertEquals(0.80, b.getAbsoluteCapacity(), epsilon);
 	  assertEquals(0.99, b.getMaximumCapacity(), epsilon);
 	  assertEquals(0.99, b.getAbsoluteMaximumCapacity(), epsilon);
 
@@ -257,10 +263,35 @@
     
     // Only 1 container
     a.assignContainers(clusterResource, node_0);
-    assertEquals(7*GB, a.getMetrics().getAvailableMB());
+    assertEquals(6*GB, a.getMetrics().getAvailableMB());
   }
 
   @Test
+  public void testUserQueueAcl() throws Exception {
+
+    // Manipulate queue 'a'
+    LeafQueue d = stubLeafQueue((LeafQueue) queues.get(D));
+
+    // Users
+    final String user_d = "user_d";
+
+    // Submit applications
+    final ApplicationAttemptId appAttemptId_0 = TestUtils
+        .getMockApplicationAttemptId(0, 1);
+    SchedulerApp app_0 = new SchedulerApp(appAttemptId_0, user_d, d, null,
+        rmContext, null);
+    d.submitApplication(app_0, user_d, D);
+
+    // Attempt the same application again
+    final ApplicationAttemptId appAttemptId_1 = TestUtils
+        .getMockApplicationAttemptId(0, 2);
+    SchedulerApp app_1 = new SchedulerApp(appAttemptId_1, user_d, d, null,
+        rmContext, null);
+    d.submitApplication(app_1, user_d, D); // same user
+  }
+
+
+  @Test
   public void testAppAttemptMetrics() throws Exception {
 
     // Manipulate queue 'a'
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml
index 673a93d..5f9b14d 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml
@@ -16,11 +16,11 @@
   <parent>
     <artifactId>hadoop-yarn-server</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-server-tests</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-server-tests</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
index b15b54e..081a8cd 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
@@ -16,7 +16,7 @@
   <parent>
     <artifactId>hadoop-yarn-server</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml
index 0235045..38d32b0 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-server</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-server</name>
   <packaging>pom</packaging>
 
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/pom.xml
index 284990f..9cfb5ff 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/pom.xml
@@ -16,12 +16,12 @@
   <parent>
     <artifactId>hadoop-yarn</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn-site</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <name>hadoop-yarn-site</name>
 
   <properties>
diff --git a/hadoop-mapreduce-project/hadoop-yarn/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/pom.xml
index 40f9476..085a38c 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-yarn</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>hadoop-yarn</name>
 
diff --git a/hadoop-mapreduce-project/pom.xml b/hadoop-mapreduce-project/pom.xml
index eac1040..6a112c4 100644
--- a/hadoop-mapreduce-project/pom.xml
+++ b/hadoop-mapreduce-project/pom.xml
@@ -18,12 +18,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>hadoop-mapreduce</name>
   <url>http://hadoop.apache.org/mapreduce/</url>
diff --git a/hadoop-minicluster/pom.xml b/hadoop-minicluster/pom.xml
index e2c36a8..417cd4a 100644
--- a/hadoop-minicluster/pom.xml
+++ b/hadoop-minicluster/pom.xml
@@ -18,12 +18,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-minicluster</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <packaging>jar</packaging>
 
   <description>Apache Hadoop Mini-Cluster</description>
diff --git a/hadoop-project-dist/pom.xml b/hadoop-project-dist/pom.xml
index cd61eed..cb297f1 100644
--- a/hadoop-project-dist/pom.xml
+++ b/hadoop-project-dist/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-project-dist</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Project Dist POM</description>
   <name>Apache Hadoop Project Dist POM</name>
   <packaging>pom</packaging>
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index a59b636..782fb04 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -17,11 +17,11 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-main</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-project</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Project POM</description>
   <name>Apache Hadoop Project POM</name>
   <packaging>pom</packaging>
diff --git a/hadoop-tools/hadoop-archives/pom.xml b/hadoop-tools/hadoop-archives/pom.xml
index eee196b..7faafbe 100644
--- a/hadoop-tools/hadoop-archives/pom.xml
+++ b/hadoop-tools/hadoop-archives/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-archives</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Archives</description>
   <name>Apache Hadoop Archives</name>
   <packaging>jar</packaging>
diff --git a/hadoop-tools/hadoop-distcp/pom.xml b/hadoop-tools/hadoop-distcp/pom.xml
index 6e36414..87f6cf4 100644
--- a/hadoop-tools/hadoop-distcp/pom.xml
+++ b/hadoop-tools/hadoop-distcp/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-distcp</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Distributed Copy</description>
   <name>Apache Hadoop Distributed Copy</name>
   <packaging>jar</packaging>
diff --git a/hadoop-tools/hadoop-extras/pom.xml b/hadoop-tools/hadoop-extras/pom.xml
index 8592186..54bf347 100644
--- a/hadoop-tools/hadoop-extras/pom.xml
+++ b/hadoop-tools/hadoop-extras/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-extras</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Extras</description>
   <name>Apache Hadoop Extras</name>
   <packaging>jar</packaging>
diff --git a/hadoop-tools/hadoop-rumen/pom.xml b/hadoop-tools/hadoop-rumen/pom.xml
index be8b83c..2c8f8a2 100644
--- a/hadoop-tools/hadoop-rumen/pom.xml
+++ b/hadoop-tools/hadoop-rumen/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-rumen</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Rumen</description>
   <name>Apache Hadoop Rumen</name>
   <packaging>jar</packaging>
diff --git a/hadoop-tools/hadoop-streaming/pom.xml b/hadoop-tools/hadoop-streaming/pom.xml
index 8697fc7..2c575ea 100644
--- a/hadoop-tools/hadoop-streaming/pom.xml
+++ b/hadoop-tools/hadoop-streaming/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-streaming</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop MapReduce Streaming</description>
   <name>Apache Hadoop MapReduce Streaming</name>
   <packaging>jar</packaging>
diff --git a/hadoop-tools/hadoop-tools-dist/pom.xml b/hadoop-tools/hadoop-tools-dist/pom.xml
index d9fd24b..3dedde0 100644
--- a/hadoop-tools/hadoop-tools-dist/pom.xml
+++ b/hadoop-tools/hadoop-tools-dist/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project-dist</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../../hadoop-project-dist</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-tools-dist</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Tools Dist</description>
   <name>Apache Hadoop Tools Dist</name>
   <packaging>jar</packaging>
diff --git a/hadoop-tools/pom.xml b/hadoop-tools/pom.xml
index 76fba11..078b6ed 100644
--- a/hadoop-tools/pom.xml
+++ b/hadoop-tools/pom.xml
@@ -17,12 +17,12 @@
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
-    <version>0.23.2-SNAPSHOT</version>
+    <version>0.23.3-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-tools</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Tools</description>
   <name>Apache Hadoop Tools</name>
   <packaging>pom</packaging>
diff --git a/pom.xml b/pom.xml
index 7f65858..fd2ef7d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -16,7 +16,7 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-main</artifactId>
-  <version>0.23.2-SNAPSHOT</version>
+  <version>0.23.3-SNAPSHOT</version>
   <description>Apache Hadoop Main</description>
   <name>Apache Hadoop Main</name>
   <packaging>pom</packaging>