SENTRY-432: Rebase diff on master
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
index 2375d1b..ebd063f 100644
--- a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
@@ -17,26 +17,21 @@
  */
 package org.apache.sentry.hdfs;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
 
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclEntryScope;
 import org.apache.hadoop.fs.permission.AclEntryType;
-import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.server.namenode.AclFeature;
-import org.apache.hadoop.hdfs.server.namenode.AuthorizationProvider;
-import org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider;
-import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.hdfs.server.namenode.INodeAttributeProvider;
+import org.apache.hadoop.hdfs.server.namenode.INodeAttributes;
+import org.apache.hadoop.hdfs.server.namenode.XAttrFeature;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,7 +39,7 @@
 import com.google.common.collect.ImmutableList;
 
 public class SentryAuthorizationProvider 
-    implements AuthorizationProvider, Configurable {
+    extends INodeAttributeProvider implements Configurable {
   
   static class SentryAclFeature extends AclFeature {
     public SentryAclFeature(ImmutableList<AclEntry> entries) {
@@ -57,10 +52,9 @@
 
   private boolean started;
   private Configuration conf;
-  private AuthorizationProvider defaultAuthzProvider;
   private String user;
   private String group;
-  private FsPermission permission;
+  private short permission;
   private boolean originalAuthzAsAcl;
   private SentryAuthorizationInfo authzInfo;
 
@@ -94,8 +88,6 @@
         throw new RuntimeException("HDFS ACLs must be enabled");
       }
 
-      defaultAuthzProvider = new DefaultAuthorizationProvider();
-      defaultAuthzProvider.start();
       // Configuration is read from hdfs-sentry.xml and NN configuration, in
       // that order of precedence.
       Configuration conf = new Configuration(this.conf);
@@ -104,10 +96,9 @@
           SentryAuthorizationConstants.HDFS_USER_DEFAULT);
       group = conf.get(SentryAuthorizationConstants.HDFS_GROUP_KEY,
           SentryAuthorizationConstants.HDFS_GROUP_DEFAULT);
-      permission = FsPermission.createImmutable(
-          (short) conf.getLong(SentryAuthorizationConstants.HDFS_PERMISSION_KEY,
-              SentryAuthorizationConstants.HDFS_PERMISSION_DEFAULT)
-      );
+      permission = (short) conf.getLong(
+          SentryAuthorizationConstants.HDFS_PERMISSION_KEY,
+          SentryAuthorizationConstants.HDFS_PERMISSION_DEFAULT);
       originalAuthzAsAcl = conf.getBoolean(
           SentryAuthorizationConstants.INCLUDE_HDFS_AUTHZ_AS_ACL_KEY,
           SentryAuthorizationConstants.INCLUDE_HDFS_AUTHZ_AS_ACL_DEFAULT);
@@ -130,142 +121,104 @@
   public synchronized void stop() {
     LOG.debug("Stopping");
     authzInfo.stop();
-    defaultAuthzProvider.stop();
-    defaultAuthzProvider = null;
   }
 
-  @Override
-  public void setSnaphottableDirs(Map<INodeAuthorizationInfo, Integer>
-      snapshotableDirs) {
-    defaultAuthzProvider.setSnaphottableDirs(snapshotableDirs);
-  }
+  private static final AclFeature EMPTY_ACL_FEATURE =
+      new AclFeature(AclFeature.EMPTY_ENTRY_LIST);
 
-  @Override
-  public void addSnapshottable(INodeAuthorizationInfo dir) {
-    defaultAuthzProvider.addSnapshottable(dir);
-  }
+  private class INodeAttributesX implements INodeAttributes {
+    private boolean useDefault;
+    private INodeAttributes node;
+    private AclFeature aclFeature;
+    
+    public INodeAttributesX(boolean useDefault, INodeAttributes node,
+        AclFeature aclFeature) {
+      this.node = node;
+      this.useDefault = useDefault;
+      this.aclFeature = aclFeature;
+    }
+    
+    @Override
+    public boolean isDirectory() {
+      return node.isDirectory();
+    }
 
-  @Override
-  public void removeSnapshottable(INodeAuthorizationInfo dir) {
-    defaultAuthzProvider.removeSnapshottable(dir);
-  }
+    @Override
+    public byte[] getLocalNameBytes() {
+      return node.getLocalNameBytes();
+    }
 
-  @Override
-  public void createSnapshot(INodeAuthorizationInfo dir, int snapshotId)
-      throws IOException{
-    defaultAuthzProvider.createSnapshot(dir, snapshotId);
-  }
+    public String getUserName() {
+      return (useDefault) ? node.getUserName() : user;
+    }
 
-  @Override
-  public void removeSnapshot(INodeAuthorizationInfo dir, int snapshotId)
-      throws IOException {
-    defaultAuthzProvider.removeSnapshot(dir, snapshotId);
-  }
+    @Override
+    public String getGroupName() {
+      return (useDefault) ? node.getGroupName() : group;
+    }
 
-  @Override
-  public void checkPermission(String user, Set<String> groups,
-      INodeAuthorizationInfo[] inodes, int snapshotId,
-      boolean doCheckOwner, FsAction ancestorAccess, FsAction parentAccess,
-      FsAction access, FsAction subAccess, boolean ignoreEmptyDir)
-      throws AccessControlException, UnresolvedLinkException {
-    defaultAuthzProvider.checkPermission(user, groups, inodes, snapshotId,
-        doCheckOwner, ancestorAccess, parentAccess, access, subAccess,
-        ignoreEmptyDir);
-  }
+    @Override
+    public FsPermission getFsPermission() {
+      return (useDefault) ? node.getFsPermission()
+                          : new FsPermission(getFsPermissionShort());
+    }
 
-  private static final String[] EMPTY_STRING_ARRAY = new String[0];
+    @Override
+    public short getFsPermissionShort() {
+      return (useDefault) ? node.getFsPermissionShort()
+                          : (short) getPermissionLong();
+    }
+
+    @Override
+    public long getPermissionLong() {
+      return (useDefault) ? node.getPermissionLong() : permission;
+    }
+
+    @Override
+    public AclFeature getAclFeature() {
+      AclFeature feature;
+      if (useDefault) {
+        feature = node.getAclFeature();
+        if (feature == null) {
+          feature = EMPTY_ACL_FEATURE;
+        }
+      } else {
+        feature = aclFeature;
+      }
+      return feature;
+    }
+
+    @Override
+    public XAttrFeature getXAttrFeature() {
+      return node.getXAttrFeature();
+    }
+
+    @Override
+    public long getModificationTime() {
+      return node.getModificationTime();
+    }
+
+    @Override
+    public long getAccessTime() {
+      return node.getAccessTime();
+    }
+  }
   
-  private String[] getPathElements(INodeAuthorizationInfo node) {
-    return getPathElements(node, 0);
-  }
-
-  private String[] getPathElements(INodeAuthorizationInfo node, int idx) {
-    String[] paths;
-    INodeAuthorizationInfo parent = node.getParent();
-    if (parent == null) {
-      paths = (idx > 0) ? new String[idx] : EMPTY_STRING_ARRAY;
-    } else {
-      paths = getPathElements(parent, idx + 1);
-      paths[paths.length - 1 - idx] = node.getLocalName();
-    }
-    return paths;
-  }
-
   @Override
-  public void setUser(INodeAuthorizationInfo node, String user) {
-    defaultAuthzProvider.setUser(node, user);
-  }
-
-  @Override
-  public String getUser(INodeAuthorizationInfo node, int snapshotId) {
-    String user;
-    String[] pathElements = getPathElements(node);
-    if (!authzInfo.isManaged(pathElements)) {
-      user = defaultAuthzProvider.getUser(node, snapshotId);
-    } else {
-      if (!authzInfo.isStale()) {
+  public INodeAttributes getAttributes(String[] pathElements,
+      INodeAttributes node) {
+    if (authzInfo.isManaged(pathElements)) {
+      boolean stale = authzInfo.isStale();
+      AclFeature aclFeature = getAclFeature(pathElements, node, stale);
+      if (!stale) {
         if (authzInfo.doesBelongToAuthzObject(pathElements)) {
-          user = this.user;
-        } else {
-          user = defaultAuthzProvider.getUser(node, snapshotId);
+          node = new INodeAttributesX(false, node, aclFeature);
         }
       } else {
-        user = this.user;
+        node = new INodeAttributesX(true, node, aclFeature);
       }
     }
-    return user;
-  }
-
-  @Override
-  public void setGroup(INodeAuthorizationInfo node, String group) {
-    defaultAuthzProvider.setGroup(node, group);
-  }
-
-  @Override
-  public String getGroup(INodeAuthorizationInfo node, int snapshotId) {
-    String group;
-    String[] pathElements = getPathElements(node);
-    if (!authzInfo.isManaged(pathElements)) {
-      group = defaultAuthzProvider.getGroup(node, snapshotId);
-    } else {
-      if (!authzInfo.isStale()) {
-        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
-          group = this.group;
-        } else {
-          group = defaultAuthzProvider.getGroup(node, snapshotId);
-        }
-      } else {
-        group = this.group;
-      }
-    }
-    return group;
-  }
-
-  @Override
-  public void setPermission(INodeAuthorizationInfo node,
-      FsPermission permission) {
-    defaultAuthzProvider.setPermission(node, permission);
-  }
-
-  @Override
-  public FsPermission getFsPermission(
-      INodeAuthorizationInfo node, int snapshotId) {
-    FsPermission permission;
-    String[] pathElements = getPathElements(node);
-    if (!authzInfo.isManaged(pathElements)) {
-      permission = defaultAuthzProvider.getFsPermission(node, snapshotId);
-    } else {
-      if (!authzInfo.isStale()) {
-        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
-          permission = this.permission;
-        } else {
-          permission = defaultAuthzProvider.getFsPermission(node, snapshotId);
-        }
-      } else {
-        permission = this.permission;
-      }
-    }
-    return permission;
+    return node;
   }
 
   private List<AclEntry> createAclEntries(String user, String group,
@@ -291,50 +244,30 @@
     return list;
   }
 
-  @Override
-  public AclFeature getAclFeature(INodeAuthorizationInfo node, int snapshotId) {
+  public AclFeature getAclFeature(String[] pathElements, INodeAttributes node, 
+      boolean stale) {
     AclFeature f = null;
-    String[] pathElements = getPathElements(node);
     String p = Arrays.toString(pathElements);
-    boolean isManaged = false;
-    boolean isStale = false;
     boolean hasAuthzObj = false;
-    if (!authzInfo.isManaged(pathElements)) {
-      isManaged = false;
-      f = defaultAuthzProvider.getAclFeature(node, snapshotId);
-    } else {
-      isManaged = true;
-      List<AclEntry> list = new ArrayList<AclEntry>();
-      if (originalAuthzAsAcl) {
-        String user = defaultAuthzProvider.getUser(node, snapshotId);
-        String group = defaultAuthzProvider.getGroup(node, snapshotId);
-        INodeAuthorizationInfo pNode = node.getParent();
-        while  (group == null || pNode != null) {
-          group = defaultAuthzProvider.getGroup(pNode, snapshotId);
-          pNode = pNode.getParent();
-        }
-        FsPermission perm = defaultAuthzProvider.getFsPermission(node, snapshotId);
-        list.addAll(createAclEntries(user, group, perm));
-      }
-      if (!authzInfo.isStale()) { 
-        isStale = false;
-        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
-          hasAuthzObj = true;
-          list.addAll(authzInfo.getAclEntries(pathElements));
-          f = new SentryAclFeature(ImmutableList.copyOf(list));
-        } else {
-          hasAuthzObj = false;
-          f = defaultAuthzProvider.getAclFeature(node, snapshotId);
-        }
-      } else {
-        isStale = true;
+    List<AclEntry> list = new ArrayList<AclEntry>();
+    if (originalAuthzAsAcl) {
+      String user = node.getUserName();
+      String group = node.getGroupName();
+      FsPermission perm = node.getFsPermission();
+      list.addAll(createAclEntries(user, group, perm));
+    }
+    if (!stale) { 
+      if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+        hasAuthzObj = true;
+        list.addAll(authzInfo.getAclEntries(pathElements));
         f = new SentryAclFeature(ImmutableList.copyOf(list));
       }
+    } else {
+      f = new SentryAclFeature(ImmutableList.copyOf(list));
     }
     if (LOG.isDebugEnabled()) {
-      LOG.debug("### getAclEntry [" + (p == null ? "null" : p) + "] : ["
-          + "isManaged=" + isManaged
-          + ",isStale=" + isStale
+      LOG.debug("### getAclEntry [" + p + "] : ["
+          + "isStale=" + stale
           + ",hasAuthzObj=" + hasAuthzObj
           + ",origAtuhzAsAcl=" + originalAuthzAsAcl + "]"
           + "[" + (f == null ? "null" : f.getEntries()) + "]");
@@ -342,29 +275,4 @@
     return f;
   }
 
-  @Override
-  public void removeAclFeature(INodeAuthorizationInfo node) {
-    AclFeature aclFeature = node.getAclFeature(CURRENT_STATE_ID);
-    if (aclFeature.getClass() != SentryAclFeature.class) {
-      defaultAuthzProvider.removeAclFeature(node);
-    }
-  }
-
-  @Override
-  public void addAclFeature(INodeAuthorizationInfo node, AclFeature f) {
-    String[] pathElements = getPathElements(node);
-    if (!authzInfo.isManaged(pathElements)) {
-      defaultAuthzProvider.addAclFeature(node, f);
-    }
-  }
-
-  @Override 
-  public boolean doesAllowChanges(INodeAuthorizationInfo node) {
-    String[] pathElements = getPathElements(node);
-    if (!authzInfo.isManaged(pathElements)) {
-      return defaultAuthzProvider.doesAllowChanges(node);
-    }
-    return !authzInfo.doesBelongToAuthzObject(getPathElements(node));
-  }
-
 }
diff --git a/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java b/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
index 67919fa..c9bd9a3 100644
--- a/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
+++ b/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
@@ -56,7 +56,7 @@
       public Void run() throws Exception {
         System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, "target/test/data");
         Configuration conf = new HdfsConfiguration();
-        conf.set(DFSConfigKeys.DFS_NAMENODE_AUTHORIZATION_PROVIDER_KEY,
+        conf.set(DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_KEY,
             MockSentryAuthorizationProvider.class.getName());
         conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
         EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyAdapter.java b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyAdapter.java
deleted file mode 100644
index 24c63a5..0000000
--- a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyAdapter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-//import org.apache.sentry.provider.db.service.thrift.UpdateForwarder;
-
-public class DummyAdapter {
-//public class DummyAdapter<K extends UpdateForwarder.Update> {
-//
-//  private final UpdateForwarder<K> destCache;
-//  private final UpdateForwarder<K> srcCache;
-//
-//  public DummyAdapter(UpdateForwarder<K> destCache, UpdateForwarder<K> srcCache) {
-//    super();
-//    this.destCache = destCache;
-//    this.srcCache = srcCache;
-//  }
-//
-//  public void getDestToPullUpdatesFromSrc() {
-//    for (K update : srcCache.getAllUpdatesFrom(destCache.getLastCommitted() + 1)) {
-//      destCache.handleUpdateNotification(update);
-//    }
-//  }
-}
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyAuthzSource.java b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyAuthzSource.java
deleted file mode 100644
index 57299c8..0000000
--- a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyAuthzSource.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-//import org.apache.sentry.hdfs.old.AuthzPermCache.AuthzSource;
-//import org.apache.sentry.hdfs.old.AuthzPermCache.PrivilegeInfo;
-//import org.apache.sentry.hdfs.old.AuthzPermCache.RoleInfo;
-
-public class DummyAuthzSource {
-//public class DummyAuthzSource implements AuthzSource{
-//
-//  public Map<String, PrivilegeInfo> privs = new HashMap<String, PrivilegeInfo>();
-//  public Map<String, RoleInfo> roles = new HashMap<String, RoleInfo>();
-//
-//  @Override
-//  public PrivilegeInfo loadPrivilege(String authzObj) throws Exception {
-//    return privs.get(authzObj);
-//  }
-//
-//  @Override
-//  public RoleInfo loadGroupsForRole(String group) throws Exception {
-//    return roles.get(group);
-//  }
-//
-//  @Override
-//  public PermissionsUpdate createFullImage(long seqNum) {
-//    PermissionsUpdate retVal = new PermissionsUpdate(seqNum, true);
-//    for (Map.Entry<String, PrivilegeInfo> pE : privs.entrySet()) {
-//      PrivilegeChanges pUpdate = retVal.addPrivilegeUpdate(pE.getKey());
-//      PrivilegeInfo pInfo = pE.getValue();
-//      for (Map.Entry<String, FsAction> ent : pInfo.roleToPermission.entrySet()) {
-//        pUpdate.addPrivilege(ent.getKey(), ent.getValue().SYMBOL);
-//      }
-//    }
-//    for (Map.Entry<String, RoleInfo> rE : roles.entrySet()) {
-//      RoleChanges rUpdate = retVal.addRoleUpdate(rE.getKey());
-//      RoleInfo rInfo = rE.getValue();
-//      for (String role : rInfo.groups) {
-//        rUpdate.addGroup(role);
-//      }
-//    }
-//    return retVal;
-//  }
-
-}
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyHMSClient.java b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyHMSClient.java
deleted file mode 100644
index 3f66c87..0000000
--- a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/DummyHMSClient.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.sentry.hdfs.MetastoreClient;
-
-public class DummyHMSClient implements MetastoreClient {
-
-  private HashMap<Database, HashMap<Table, HashSet<Partition>>> hmsData =
-      new HashMap<Database, HashMap<Table, HashSet<Partition>>>();
-
-  @Override
-  public List<Database> getAllDatabases() {
-    return new ArrayList<Database>(hmsData.keySet());
-  }
-
-  @Override
-  public List<Table> getAllTablesOfDatabase(Database db) {
-    if (hmsData.containsKey(db)) {
-      return new ArrayList<Table>(hmsData.get(db).keySet());
-    }
-    return new ArrayList<Table>();
-  }
-
-  @Override
-  public List<Partition> listAllPartitions(Database db, Table tbl) {
-    if (hmsData.containsKey(db)) {
-      if (hmsData.get(db).containsKey(tbl)) {
-        return new ArrayList<Partition>(hmsData.get(db).get(tbl));
-      }
-    }
-    return new ArrayList<Partition>();
-  }
-
-  public Database addDb(String dbName, String location) {
-    Database db = new Database(dbName, null, location, null);
-    hmsData.put(db, new HashMap<Table, HashSet<Partition>>());
-    return db;
-  }
-
-  public Table addTable(Database db, String tblName, String location) {
-    Table tbl = 
-        new Table(tblName, db.getName(), null, 0, 0, 0, 
-            new StorageDescriptor(null, location, null, null, false, 0, null, null, null, null),
-            null, null, null, null, null);
-    hmsData.get(db).put(tbl, new HashSet<Partition>());
-    return tbl;
-  }
-  
-  public void addPartition(Database db, Table tbl, String partitionPath) {
-    Partition part = new Partition(null, db.getName(), tbl.getTableName(), 0, 0,
-        new StorageDescriptor(null, partitionPath, null, null, false, 0, null, null, null, null), null);
-    hmsData.get(db).get(tbl).add(part);
-  }
-}
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestAuthzPathCacheOld.java b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestAuthzPathCacheOld.java
deleted file mode 100644
index ca3ebfe..0000000
--- a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestAuthzPathCacheOld.java
+++ /dev/null
@@ -1,523 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.Table;
-//import org.apache.sentry.hdfs.old.AuthzPathCacheOld;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.Arrays;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-
-public class TestAuthzPathCacheOld {
-
-//  @Test
-//  public void testGetPathElements() {
-//    String[] as2 = AuthzPathCacheOld.getPathElements(new String("/a/b"));
-//    String[] as1 = AuthzPathCacheOld.getPathElements(new String("/a/b"));
-//    Assert.assertArrayEquals(as1, as2);
-//
-//    String[] as = AuthzPathCacheOld.getPathElements(new String("/a/b"));
-//    Assert.assertArrayEquals(new String[] {"a", "b"}, as);
-//
-//    as = AuthzPathCacheOld.getPathElements(new String("//a/b"));
-//    Assert.assertArrayEquals(new String[]{"a", "b"}, as);
-//
-//    as = AuthzPathCacheOld.getPathElements(new String("/a//b"));
-//    Assert.assertArrayEquals(new String[]{"a", "b"}, as);
-//
-//    as = AuthzPathCacheOld.getPathElements(new String("/a/b/"));
-//    Assert.assertArrayEquals(new String[]{"a", "b"}, as);
-//
-//    as = AuthzPathCacheOld.getPathElements(new String("//a//b//"));
-//    Assert.assertArrayEquals(new String[]{"a", "b"}, as);
-//  }
-//
-//  @Test
-//  public void testGetPathsElements() {
-//    String[][] as1 = AuthzPathCacheOld.gePathsElements(
-//        new String[]{new String("/a/b")});
-//    String[][] as2 = AuthzPathCacheOld.gePathsElements(
-//        new String[]{new String("/a/b")});
-//    Assert.assertEquals(as1.length, as2.length);
-//    Assert.assertArrayEquals(as1[0], as2[0]);
-//  }
-//
-//  @Test
-//  public void testEntryType() {
-//    Assert.assertTrue(AuthzPathCacheOld.EntryType.DIR.isRemoveIfDangling());
-//    Assert.assertFalse(AuthzPathCacheOld.EntryType.PREFIX.isRemoveIfDangling());
-//    Assert.assertTrue(
-//        AuthzPathCacheOld.EntryType.AUTHZ_OBJECT.isRemoveIfDangling());
-//  }
-//  
-//  @Test
-//  public void testRootEntry() {
-//    AuthzPathCacheOld.Entry root = AuthzPathCacheOld.Entry.createRoot(false);
-//    root.toString();
-//    Assert.assertNull(root.getParent());
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.DIR, root.getType());
-//    // NOTE : This was causing some problems during serialization.. so dissabling 
-////    Assert.assertNull(root.getPathElement());
-//    Assert.assertNull(root.getAuthzObj());
-//    Assert.assertEquals(Path.SEPARATOR, root.getFullPath());
-//    Assert.assertTrue(root.getChildren().isEmpty());
-//    root.delete();
-//    try {
-//      root.find(null, true);
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//    try {
-//      root.find(new String[0], true);
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//    try {
-//      root.find(null, false);
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//    try {
-//      root.find(new String[0], false);
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//    Assert.assertEquals(root, root.find(new String[]{"a"}, true));
-//    Assert.assertNull(root.find(new String[]{"a"}, false));
-//    Assert.assertNull(root.findPrefixEntry(new String[]{"a"}));
-//
-//    root.delete();
-//  }
-//
-//  @Test
-//  public void testRootPrefixEntry() {
-//    AuthzPathCacheOld.Entry root = AuthzPathCacheOld.Entry.createRoot(true);
-//    root.toString();
-//
-//    Assert.assertEquals(root, root.find(new String[]{"a"}, true));
-//    Assert.assertEquals(null, root.find(new String[]{"a"}, false));
-//    Assert.assertEquals(root, root.findPrefixEntry(new String[]{"a"}));
-//    Assert.assertEquals(root, root.findPrefixEntry(new String[]{"a", "b"}));
-//
-//    try {
-//      root.createPrefix(new String[]{"a"});
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//  }
-//
-//  @Test
-//  public void testImmediatePrefixEntry() {
-//    AuthzPathCacheOld.Entry root = AuthzPathCacheOld.Entry.createRoot(false);
-//    AuthzPathCacheOld.Entry entry = root.createPrefix(new String[] {"a"});
-//    entry.toString();
-//    
-//    Assert.assertEquals(1, root.getChildren().size());
-//
-//    Assert.assertEquals(root, entry.getParent());
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.PREFIX, entry.getType());
-//    Assert.assertEquals("a", entry.getPathElement());
-//    Assert.assertNull(entry.getAuthzObj());
-//    Assert.assertEquals(Path.SEPARATOR + "a", entry.getFullPath());
-//    Assert.assertTrue(entry.getChildren().isEmpty());
-//
-//    Assert.assertEquals(entry, root.find(new String[]{"a"}, true));
-//    Assert.assertEquals(entry, root.find(new String[]{"a"}, false));
-//    Assert.assertEquals(entry, root.findPrefixEntry(new String[]{"a"}));
-//    Assert.assertEquals(entry, root.findPrefixEntry(new String[]{"a", "b"}));
-//
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b"}, true));
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "c"}, true));
-//    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
-//
-//    Assert.assertNull(root.find(new String[]{"b"}, false));
-//    Assert.assertNull(root.findPrefixEntry(new String[]{"b"}));
-//
-//    try {
-//      root.createPrefix(new String[]{"a", "b"});
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//
-//    try {
-//      root.createPrefix(new String[]{"a", "b", "c"});
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//
-//    entry.delete();
-//    Assert.assertTrue(root.getChildren().isEmpty());
-//  }
-//
-//  @Test
-//  public void testFurtherPrefixEntry() {
-//    AuthzPathCacheOld.Entry root = AuthzPathCacheOld.Entry.createRoot(false);
-//    AuthzPathCacheOld.Entry entry = root.createPrefix(new String[]{"a", "b"});
-//    entry.toString();
-//
-//    Assert.assertEquals(1, root.getChildren().size());
-//
-//    Assert.assertEquals(root, entry.getParent().getParent());
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.PREFIX, entry.getType());
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.DIR, 
-//        entry.getParent().getType());
-//    Assert.assertEquals("b", entry.getPathElement());
-//    Assert.assertEquals("a", entry.getParent().getPathElement());
-//    Assert.assertNull(entry.getAuthzObj());
-//    Assert.assertNull(entry.getParent().getAuthzObj());
-//    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b", 
-//        entry.getFullPath());
-//    Assert.assertEquals(Path.SEPARATOR + "a", entry.getParent().getFullPath());
-//    Assert.assertTrue(entry.getChildren().isEmpty());
-//    Assert.assertEquals(1, entry.getParent().getChildren().size());
-//
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b"}, true));
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b"}, false));
-//    Assert.assertEquals(entry, root.findPrefixEntry(new String[]{"a", "b"}));
-//    Assert.assertNull(root.findPrefixEntry(new String[]{"a"}));
-//
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "c"}, true));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "c"}, false));
-//
-//    try {
-//      root.createPrefix(new String[]{"a", "b"});
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//
-//    try {
-//      root.createPrefix(new String[]{"a", "b", "c"});
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//
-//    entry.delete();
-//    Assert.assertTrue(root.getChildren().isEmpty());
-//  }
-//
-//  @Test
-//  public void testImmediateAuthzEntry() {
-//    AuthzPathCacheOld.Entry root = AuthzPathCacheOld.Entry.createRoot(false);
-//    AuthzPathCacheOld.Entry prefix = root.createPrefix(new String[]{"a", "b"});
-//
-//    AuthzPathCacheOld.Entry entry = root.createAuthzObjPath(
-//        new String[]{"a", "b", "p1"}, "A");
-//    Assert.assertEquals(prefix, entry.getParent());
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.AUTHZ_OBJECT, entry.getType());
-//    Assert.assertEquals("p1", entry.getPathElement());
-//    Assert.assertEquals("A", entry.getAuthzObj());
-//    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b" + 
-//            Path.SEPARATOR + "p1", entry.getFullPath());
-//
-//    try {
-//      root.createPrefix(new String[]{"a", "b", "p1", "c"});
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1"}, true));
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1"}, false));
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1", "c"}, 
-//        true));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "p1", "c"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "p1"}));
-//
-//    root.find(new String[]{"a", "b", "p1"}, true).delete();
-//    Assert.assertNull(root.find(new String[]{"a", "b", "p1"}, false));
-//    Assert.assertNotNull(root.find(new String[]{"a", "b"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "p1"}));
-//
-//  }
-//
-//  @Test
-//  public void testFurtherAuthzEntry() {
-//    AuthzPathCacheOld.Entry root = AuthzPathCacheOld.Entry.createRoot(false);
-//    AuthzPathCacheOld.Entry prefix = root.createPrefix(new String[]{"a", "b"});
-//
-//    AuthzPathCacheOld.Entry entry = root.createAuthzObjPath(
-//        new String[]{"a", "b", "t", "p1"}, "A");
-//    Assert.assertEquals(prefix, entry.getParent().getParent());
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.AUTHZ_OBJECT, entry.getType());
-//    Assert.assertEquals("p1", entry.getPathElement());
-//    Assert.assertEquals("A", entry.getAuthzObj());
-//    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b" +
-//        Path.SEPARATOR + "t" + Path.SEPARATOR + "p1", entry.getFullPath());
-//
-//    try {
-//      root.createPrefix(new String[]{"a", "b", "p1", "t", "c"});
-//      Assert.fail();
-//    } catch (IllegalArgumentException ex) {
-//      //NOP
-//    }
-//
-//    AuthzPathCacheOld.Entry ep2 = root.createAuthzObjPath(
-//        new String[]{"a", "b", "t", "p1", "p2"}, "A");
-//
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.AUTHZ_OBJECT, entry.getType());
-//    Assert.assertEquals("p1", entry.getPathElement());
-//    Assert.assertEquals("A", entry.getAuthzObj());
-//
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.AUTHZ_OBJECT, ep2.getType());
-//    Assert.assertEquals("p2", ep2.getPathElement());
-//    Assert.assertEquals("A", entry.getAuthzObj());
-//
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1"}, 
-//        true));
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1"}, 
-//        false));
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1", "c"},
-//        true));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "c"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "t", "p1"}));
-//
-//    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2"},
-//        true));
-//    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2"},
-//        false));
-//    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2", "c"},
-//        true));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "p2", "c"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "t", "p1", "p2"}));
-//
-//    root.find(new String[]{"a", "b", "t", "p1"}, false).delete();
-//
-//    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1"},
-//        true));
-//    Assert.assertEquals(AuthzPathCacheOld.EntryType.DIR, entry.getType());
-//    Assert.assertNull(entry.getAuthzObj());
-//
-//    Assert.assertNotNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
-//    Assert.assertNotNull(root.find(new String[]{"a", "b", "t"}, false));
-//    Assert.assertNotNull(root.find(new String[]{"a", "b"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "t", "p1"}));
-//
-//    root.find(new String[]{"a", "b", "t", "p1", "p2"}, false).delete();
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
-//    Assert.assertNotNull(root.find(new String[]{"a", "b"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "t", "p1"}));
-//
-//  }
-//
-//  @Test
-//  public void testMultipleAuthzEntry() {
-//    AuthzPathCacheOld.Entry root = AuthzPathCacheOld.Entry.createRoot(false);
-//    AuthzPathCacheOld.Entry prefix = root.createPrefix(new String[]{"a", "b"});
-//
-//    AuthzPathCacheOld.Entry e1 = root.createAuthzObjPath(
-//        new String[]{"a", "b", "t", "p1"}, "A");
-//    AuthzPathCacheOld.Entry e2 = root.createAuthzObjPath(
-//        new String[]{"a", "b", "t", "p2"}, "A");
-//
-//
-//    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1"}, true));
-//    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1"}, 
-//        false));
-//    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1", "c"},
-//        true));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "c"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "t", "p1"}));
-//
-//    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2"}, true));
-//    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2"}, 
-//        false));
-//    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2", "c"},
-//        true));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p2", "c"}, false));
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "t", "p2"}));
-//
-//    root.find(new String[]{"a", "b", "t", "p1"}, true).delete();
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
-//    Assert.assertNotNull(root.find(new String[]{"a", "b", "t"}, false));
-//
-//    root.find(new String[]{"a", "b", "t", "p2"}, true).delete();
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p2"}, false));
-//    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
-//    Assert.assertNotNull(root.find(new String[]{"a", "b"}, false));
-//
-//    Assert.assertEquals(prefix, root.findPrefixEntry(
-//        new String[]{"a", "b", "t", "p3"}));
-//  }
-//
-//  @Test
-//  public void testUpdateHandling() throws Exception {
-//    DummyHMSClient mock = new DummyHMSClient();
-//    Database db1 = mock.addDb("db1", "/db1");
-//    Table tbl11 = mock.addTable(db1, "tbl11", "/db1/tbl11");
-//    mock.addPartition(db1, tbl11, "/db1/tbl11/part111");
-//    mock.addPartition(db1, tbl11, "/db1/tbl11/part112");
-//    AuthzPathCacheOld AuthzPathUpdater = new AuthzPathCacheOld(mock, new String[]{"/db1"}, 10000);
-//
-//    // Trigger Initial refresh (full dump)
-//    AuthzPathUpdater.handleUpdateNotification(new PathsUpdate(10, null));
-//    waitToCommit(AuthzPathUpdater);
-//    assertEquals("db1.tbl11", AuthzPathUpdater.findAuthzObject("/db1/tbl11/part111".split("^/")[1].split("/")));
-//    assertEquals("db1.tbl11", AuthzPathUpdater.findAuthzObject("/db1/tbl11/part112".split("^/")[1].split("/")));
-//
-//    // Handle preUpdate from HMS plugin
-//    PathsUpdate update = new PathsUpdate(11, null);
-//    update.addPathUpdate("db1.tbl12").addPath("/db1/tbl12").addPath("/db1/tbl12/part121");
-//    update.addPathUpdate("db1.tbl11").delPath("/db1/tbl11/part112");
-//
-//    // Ensure JSON serialization is working :
-//    assertEquals(PathsUpdate.toJsonString(update), 
-//        PathsUpdate.toJsonString(
-//            PathsUpdate.fromJsonString(
-//                PathsUpdate.toJsonString(update))));
-//
-//    AuthzPathUpdater.handleUpdateNotification(update);
-//    waitToCommit(AuthzPathUpdater);
-//    assertNull(AuthzPathUpdater.findAuthzObject("/db1/tbl11/part112".split("^/")[1].split("/"), false));
-//    assertEquals("db1.tbl12", AuthzPathUpdater.findAuthzObject("/db1/tbl12/part121".split("^/")[1].split("/")));
-//
-//    // Add more entries to HMS
-//    Table tbl13 = mock.addTable(db1, "tbl13", "/db1/tbl13");
-//    mock.addPartition(db1, tbl13, "/db1/tbl13/part131");
-//
-//    // Simulate missed preUpdate (Send empty preUpdate with seqNum 13)
-//    // On missed preUpdate, refresh again
-//    AuthzPathUpdater.handleUpdateNotification(new PathsUpdate(13, null));
-//    waitToCommit(AuthzPathUpdater);
-//    assertEquals("db1.tbl13", AuthzPathUpdater.findAuthzObject("/db1/tbl13/part131".split("^/")[1].split("/")));
-//  }
-//
-//  @Test
-//  public void testGetUpdatesFromSrcCache() throws InterruptedException {
-//    DummyHMSClient mock = new DummyHMSClient();
-//    Database db1 = mock.addDb("db1", "/db1");
-//    Table tbl11 = mock.addTable(db1, "tbl11", "/db1/tbl11");
-//    mock.addPartition(db1, tbl11, "/db1/tbl11/part111");
-//    mock.addPartition(db1, tbl11, "/db1/tbl11/part112");
-//
-//    // This would live in the Sentry Service
-//    AuthzPathCacheOld srcCache = new AuthzPathCacheOld(mock, new String[]{"/db1"}, 10000);
-//
-//    // Trigger Initial full Image fetch
-//    srcCache.handleUpdateNotification(new PathsUpdate(10, null));
-//    waitToCommit(srcCache);
-//
-//    // This entity would live in the NN plugin : a downstream cache with no updateLog
-//    AuthzPathCacheOld destCache = new AuthzPathCacheOld(null, new String[]{"/db1"}, 0);
-//
-//    // Adapter to pull updates from upstream cache to downstream Cache
-//    DummyAdapter<PathsUpdate> adapter = new DummyAdapter<PathsUpdate>(destCache, srcCache);
-//    adapter.getDestToPullUpdatesFromSrc();
-//    waitToCommit(destCache);
-//    // Check if NN plugin received the updates from Sentry Cache
-//    assertEquals("db1.tbl11", destCache.findAuthzObject("/db1/tbl11/part111".split("^/")[1].split("/")));
-//    assertEquals("db1.tbl11", destCache.findAuthzObject("/db1/tbl11/part112".split("^/")[1].split("/")));
-//
-//    // Create Upsteram HMS preUpdate
-//    PathsUpdate update = new PathsUpdate(11, null);
-//    update.addPathUpdate("db1.tbl12").addPath("/db1/tbl12").addPath("/db1/tbl12/part121");
-//    update.addPathUpdate("db1.tbl11").delPath("/db1/tbl11/part112");
-//
-//    // Send Update to Upstream Cache
-//    srcCache.handleUpdateNotification(update);
-//    waitToCommit(srcCache);
-//    // Pull preUpdate to downstream Cache
-//    adapter.getDestToPullUpdatesFromSrc();
-//    waitToCommit(destCache);
-//
-//    assertNull(srcCache.findAuthzObject("/db1/tbl11/part112".split("^/")[1].split("/"), false));
-//    assertNull(destCache.findAuthzObject("/db1/tbl11/part112".split("^/")[1].split("/"), false));
-//    assertEquals("db1.tbl11", destCache.findAuthzObject("/db1/tbl11/part112".split("^/")[1].split("/")));
-//    assertEquals("db1.tbl12", destCache.findAuthzObject("/db1/tbl12/part121".split("^/")[1].split("/")));
-//  }
-//
-////  @Test(expected = IllegalArgumentException.class)
-////  public void testAuthzPathUpdaterRootPrefix() {
-////    AuthzPathCacheOld cache = new AuthzPathCacheOld(new String[]{"/", "/b/c"});
-////  }
-//  
-//  @Test
-//  public void testAuthzPathUpdater() {
-//    AuthzPathCacheOld cache = new AuthzPathCacheOld(null, new String[] { "/a", "/b/c"}, 0);
-//    Assert.assertTrue(cache.isUnderPrefix("/a".split("^/")[1].split("/")));
-//    Assert.assertTrue(cache.isUnderPrefix("/a/x".split("^/")[1].split("/")));
-//    Assert.assertTrue(cache.isUnderPrefix("/b/c/".split("^/")[1].split("/")));
-//    Assert.assertFalse(cache.isUnderPrefix("/x".split("^/")[1].split("/")));
-//
-//    Assert.assertNull((cache.findAuthzObject("/a/x".split("^/")[1].split("/"))));
-//    Assert.assertNull((cache.findAuthzObject("/x".split("^/")[1].split("/"))));
-//    
-//    cache.addAuthzObject("T", Arrays.asList("/a/T/p1", "/a/T/p2"));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p2".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1/x".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1/x/x".split("^/")[1].split("/")));
-//    Assert.assertNull((cache.findAuthzObject("/a/T/p3".split("^/")[1].split("/"))));
-//
-//    cache.addPathsToAuthzObject("T", Arrays.asList("/a/T/p3"));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p2".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1/x".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1/x/x".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p3".split("^/")[1].split("/")));
-//
-//    cache.deletePathsFromAuthzObject("T", Arrays.asList("/a/T/p2"));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1".split("^/")[1].split("/")));
-//    Assert.assertNull((cache.findAuthzObject("/a/T/p2".split("^/")[1].split("/"))));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1/x".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p1/x/x".split("^/")[1].split("/")));
-//    Assert.assertEquals("T", cache.findAuthzObject("/a/T/p3".split("^/")[1].split("/")));
-//
-//    cache.deleteAuthzObject("T");
-//    Assert.assertNull((cache.findAuthzObject("/a/T/p1".split("^/")[1].split("/"))));
-//    Assert.assertNull((cache.findAuthzObject("/a/T/p2".split("^/")[1].split("/"))));
-//    Assert.assertNull((cache.findAuthzObject("/a/T/p3".split("^/")[1].split("/"))));
-//  }
-//
-//  private void waitToCommit(AuthzPathCacheOld hmsCache) throws InterruptedException {
-//    int counter = 0;
-//    while(!hmsCache.areAllUpdatesCommited()) {
-//      Thread.sleep(200);
-//      counter++;
-//      if (counter > 10000) {
-//        fail("Updates taking too long to commit !!");
-//      }
-//    }
-//  }
-  
-}
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestAuthzPermCache.java b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestAuthzPermCache.java
deleted file mode 100644
index f4e569f..0000000
--- a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestAuthzPermCache.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import org.apache.hadoop.fs.permission.FsAction;
-//import org.apache.sentry.hdfs.old.AuthzPermCache;
-//import org.apache.sentry.hdfs.old.AuthzPermCache.PrivilegeInfo;
-//import org.apache.sentry.hdfs.old.AuthzPermCache.RoleInfo;
-import org.junit.Test;
-
-public class TestAuthzPermCache {
-
-//  @Test
-//  public void testAuthzAddRemove() throws InterruptedException {
-//    DummyAuthzSource src = new DummyAuthzSource();
-//    AuthzPermCache authzCache = new AuthzPermCache(10000, src, 0);
-//    src.privs.put("db1.tbl11", new PrivilegeInfo("db1.tbl11").setPermission("r1", FsAction.READ_WRITE));
-//    src.privs.put("db1.tbl12", new PrivilegeInfo("db1.tbl12").setPermission("r1", FsAction.READ).setPermission("r2", FsAction.WRITE));
-//    src.privs.put("db1.tbl13", new PrivilegeInfo("db1.tbl13").setPermission("r2", FsAction.READ).setPermission("r3", FsAction.WRITE));
-//    src.roles.put("r1", new RoleInfo("r1").addGroup("g1"));
-//    src.roles.put("r2", new RoleInfo("r2").addGroup("g2").addGroup("g1"));
-//    src.roles.put("r3", new RoleInfo("r3").addGroup("g3").addGroup("g2").addGroup("g1"));
-//    authzCache.handleUpdateNotification(new PermissionsUpdate(10, false));
-//    waitToCommit(authzCache);
-//
-//    assertEquals(FsAction.READ_WRITE, authzCache.getPermissions("db1.tbl11").get("g1"));
-//    assertEquals(FsAction.READ_WRITE, authzCache.getPermissions("db1.tbl12").get("g1"));
-//    assertEquals(FsAction.WRITE, authzCache.getPermissions("db1.tbl12").get("g2"));
-//    assertEquals(FsAction.READ_WRITE, authzCache.getPermissions("db1.tbl13").get("g1"));
-//    assertEquals(FsAction.READ_WRITE, authzCache.getPermissions("db1.tbl13").get("g2"));
-//    assertEquals(FsAction.WRITE, authzCache.getPermissions("db1.tbl13").get("g3"));
-//  }
-//
-//  private void waitToCommit(AuthzPermCache authzCache) throws InterruptedException {
-//    int counter = 0;
-//    while(!authzCache.areAllUpdatesCommited()) {
-//      Thread.sleep(200);
-//      counter++;
-//      if (counter > 10000) {
-//        fail("Updates taking too long to commit !!");
-//      }
-//    }
-//  }
-}
diff --git a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/persistent/SentryStore.java b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/persistent/SentryStore.java
index 6e66823..6d0efd3 100644
--- a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/persistent/SentryStore.java
+++ b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/persistent/SentryStore.java
@@ -1410,12 +1410,12 @@
           String existingPriv = pUpdate.getAddPrivileges().get(mRole.getRoleName());
           if (existingPriv == null) {
             pUpdate.putToAddPrivileges(mRole.getRoleName(),
-                ACTION_MAPPING.get(mPriv.getAction()).SYMBOL);
+                ACTION_MAPPING.get(mPriv.getAction().toUpperCase()).SYMBOL);
           } else {
             pUpdate.putToAddPrivileges(
                 mRole.getRoleName(),
                 FsAction.getFsAction(existingPriv)
-                    .or(ACTION_MAPPING.get(mPriv.getAction())).SYMBOL);
+                    .or(ACTION_MAPPING.get(mPriv.getAction().toUpperCase())).SYMBOL);
           }
         }
       }
diff --git a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
index 685c906..8ede069 100644
--- a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
+++ b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
@@ -264,7 +264,7 @@
         PermissionsUpdate update = new PermissionsUpdate(permSeqNum.incrementAndGet(), false);
         update.addPrivilegeUpdate(authzObj).putToAddPrivileges(
             request.getRoleName(),
-            SentryStore.ACTION_MAPPING.get(request.getPrivilege().getAction())
+            SentryStore.ACTION_MAPPING.get(request.getPrivilege().getAction().toUpperCase())
                 .SYMBOL);
         permsUpdater.handleUpdateNotification(update);
         LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + "]..");
@@ -306,7 +306,7 @@
         PermissionsUpdate update = new PermissionsUpdate(permSeqNum.incrementAndGet(), false);
         update.addPrivilegeUpdate(authzObj).putToDelPrivileges(
             request.getRoleName(),
-            SentryStore.ACTION_MAPPING.get(request.getPrivilege().getAction())
+            SentryStore.ACTION_MAPPING.get(request.getPrivilege().getAction().toUpperCase())
                 .SYMBOL);
         permsUpdater.handleUpdateNotification(update);
         LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + ", " + authzObj + "]..");