SENTRY-498: Sentry integration with Hive authorization framework V2 (Dapeng Sun, reviewed by Colin Ma)
diff --git a/pom.xml b/pom.xml
index bf3a94d..a369621 100644
--- a/pom.xml
+++ b/pom.xml
@@ -69,6 +69,7 @@
     <derby.version>10.10.2.0</derby.version>
     <commons-cli.version>1.2</commons-cli.version>
     <hive.version>1.1.0</hive.version>
+    <hive-v2.version>1.3.0-SNAPSHOT</hive-v2.version>
     <hadoop.version>2.6.0</hadoop.version>
     <fest.reflect.version>1.4.1</fest.reflect.version>
     <guava.version>11.0.2</guava.version>
@@ -382,6 +383,11 @@
       </dependency>
       <dependency>
         <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-binding-hive-v2</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
         <artifactId>sentry-binding-solr</artifactId>
         <version>${project.version}</version>
       </dependency>
diff --git a/sentry-binding/pom.xml b/sentry-binding/pom.xml
index 15a962f..4283edb 100644
--- a/sentry-binding/pom.xml
+++ b/sentry-binding/pom.xml
@@ -31,6 +31,7 @@
 
   <modules>
     <module>sentry-binding-hive</module>
+    <module>sentry-binding-hive-v2</module>
     <module>sentry-binding-solr</module>
     <module>sentry-binding-sqoop</module>
   </modules>
diff --git a/sentry-binding/sentry-binding-hive-v2/pom.xml b/sentry-binding/sentry-binding-hive-v2/pom.xml
new file mode 100644
index 0000000..ef6048c
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/pom.xml
@@ -0,0 +1,158 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.sentry</groupId>
+    <artifactId>sentry-binding</artifactId>
+    <version>1.7.0-incubating-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>sentry-binding-hive-v2</artifactId>
+  <name>Sentry Binding v2 for Hive</name>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-binding-hive</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${hive-v2.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <version>${hive-v2.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${hive-v2.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <version>${hive-v2.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-serde</artifactId>
+      <version>${hive-v2.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${hive-v2.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-core-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-core-model-db</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-common</artifactId>
+    </dependency>
+    <!-- required for SentryGrantRevokeTask -->
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-db</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-beeline</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-metastore</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-file</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-policy-db</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+</project>
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
new file mode 100644
index 0000000..67cf266
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.DDLTask;
+import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.DDLWork;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.core.common.Subject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HiveAuthzBindingHookV2 extends AbstractSemanticAnalyzerHook {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HiveAuthzBindingHookV2.class);
+  private final HiveAuthzBinding hiveAuthzBinding;
+  private final HiveAuthzConf authzConf;
+
+  public HiveAuthzBindingHookV2() throws Exception {
+    SessionState session = SessionState.get();
+    if(session == null) {
+      throw new IllegalStateException("Session has not been started");
+    }
+
+    HiveConf hiveConf = session.getConf();
+    if(hiveConf == null) {
+      throw new IllegalStateException("Session HiveConf is null");
+    }
+    authzConf = HiveAuthzBindingHook.loadAuthzConf(hiveConf);
+    hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
+  }
+
+  @Override
+  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+      throws SemanticException {
+    return ast;
+  }
+
+  /**
+   * Post analyze hook that invokes hive auth bindings
+   */
+  @Override
+  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+    HiveOperation stmtOperation = getCurrentHiveStmtOp();
+    Subject subject = new Subject(context.getUserName());
+    for (int i = 0; i < rootTasks.size(); i++) {
+      Task<? extends Serializable> task = rootTasks.get(i);
+      if (task instanceof DDLTask) {
+        SentryFilterDDLTask filterTask =
+            new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation);
+        filterTask.setWork((DDLWork)task.getWork());
+        rootTasks.set(i, filterTask);
+      }
+    }
+  }
+
+  private HiveOperation getCurrentHiveStmtOp() {
+    SessionState sessState = SessionState.get();
+    if (sessState == null) {
+      LOG.warn("SessionState is null");
+      return null;
+    }
+    return sessState.getHiveOperation();
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
new file mode 100644
index 0000000..3fbb626
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.session.HiveSessionHookContext;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+
+import com.google.common.base.Joiner;
+
+/**
+ * The session hook is the Session Hook for HiveAuthzBindingSessionHookV2, The configuration of
+ * session will update for Hive Authz v2.
+ */
+public class HiveAuthzBindingSessionHookV2 implements
+    org.apache.hive.service.cli.session.HiveSessionHook {
+  public static final String SCRATCH_DIR_PERMISSIONS = "700";
+  public static final String SEMANTIC_HOOK = HiveAuthzBindingHookV2.class.getName();
+  public static final String ACCESS_RESTRICT_LIST = Joiner.on(",").join(
+      ConfVars.SEMANTIC_ANALYZER_HOOK.varname, ConfVars.PREEXECHOOKS.varname,
+      ConfVars.SCRATCHDIR.varname, ConfVars.LOCALSCRATCHDIR.varname,
+      ConfVars.METASTOREURIS.varname, ConfVars.METASTORECONNECTURLKEY.varname,
+      ConfVars.HADOOPBIN.varname, ConfVars.HIVESESSIONID.varname, ConfVars.HIVEAUXJARS.varname,
+      ConfVars.HIVESTATSDBCONNECTIONSTRING.varname, ConfVars.SCRATCHDIRPERMISSION.varname,
+      ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.varname,
+      ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname,
+      ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY.varname, HiveAuthzConf.HIVE_ACCESS_CONF_URL,
+      HiveAuthzConf.HIVE_SENTRY_CONF_URL, HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME,
+      HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET);
+
+  /**
+   * The session hook for sentry authorization that sets the required session level configuration 1.
+   * Setup the sentry hooks - semantic, exec and filter hooks 2. Set additional config properties
+   * required for auth set HIVE_EXTENDED_ENITITY_CAPTURE = true set SCRATCHDIRPERMISSION = 700 3.
+   * Add sensitive config parameters to the config restrict list so that they can't be overridden by
+   * users
+   */
+  @Override
+  public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException {
+    // Add sentry hooks to the session configuration
+    HiveConf sessionConf = sessionHookContext.getSessionConf();
+
+    appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SEMANTIC_HOOK);
+    // enable sentry authorization V2
+    sessionConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, true);
+    sessionConf.setBoolean(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, false);
+    sessionConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname,
+        "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator");
+
+    // grant all privileges for table to its owner
+    sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, "");
+
+    // Enable compiler to capture transform URI referred in the query
+    sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true);
+
+    // set security command list
+    HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf);
+    String commandWhitelist =
+        authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
+            HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);
+    sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist);
+
+    // set additional configuration properties required for auth
+    sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS);
+
+    // setup restrict list
+    sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST);
+
+    // set user name
+    sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser());
+    sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser());
+
+    // Set MR ACLs to session user
+    appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB, sessionHookContext.getSessionUser());
+    appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, sessionHookContext.getSessionUser());
+  }
+
+  // Setup given sentry hooks
+  private void appendConfVar(HiveConf sessionConf, String confVar, String sentryConfVal) {
+    String currentValue = sessionConf.get(confVar, "").trim();
+    if (currentValue.isEmpty()) {
+      currentValue = sentryConfVal;
+    } else {
+      currentValue = sentryConfVal + "," + currentValue;
+    }
+    sessionConf.set(confVar, currentValue);
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
new file mode 100644
index 0000000..4a5cbcf
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryAccessController;
+import org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryValidator;
+import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAccessController;
+import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizationValidator;
+import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizer;
+
+import com.google.common.annotations.VisibleForTesting;
+
+public class SentryAuthorizerFactory implements HiveAuthorizerFactory {
+  public static final String HIVE_SENTRY_ACCESS_CONTROLLER =
+      "hive.security.sentry.access.controller";
+  public static final String HIVE_SENTRY_AUTHORIZATION_CONTROLLER =
+      "hive.security.sentry.authorization.controller";
+  private HiveAuthzConf authzConf;
+
+  @Override
+  public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+      HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx)
+          throws HiveAuthzPluginException {
+    HiveAuthzSessionContext sessionContext;
+    try {
+      this.authzConf = HiveAuthzBindingHook.loadAuthzConf(conf);
+      sessionContext = applyTestSettings(ctx, conf);
+      assertHiveCliAuthDisabled(conf, sessionContext);
+    } catch (Exception e) {
+      throw new HiveAuthzPluginException(e);
+    }
+    SentryHiveAccessController accessController =
+        getAccessController(conf, authzConf, authenticator, sessionContext);
+    SentryHiveAuthorizationValidator authzValidator =
+        getAuthzValidator(conf, authzConf, authenticator);
+
+    return new SentryHiveAuthorizer(accessController, authzValidator);
+  }
+
+  private HiveAuthzSessionContext applyTestSettings(HiveAuthzSessionContext ctx, HiveConf conf) {
+    if (conf.getBoolVar(ConfVars.HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE)
+        && ctx.getClientType() == CLIENT_TYPE.HIVECLI) {
+      // create new session ctx object with HS2 as client type
+      HiveAuthzSessionContext.Builder ctxBuilder = new HiveAuthzSessionContext.Builder(ctx);
+      ctxBuilder.setClientType(CLIENT_TYPE.HIVESERVER2);
+      return ctxBuilder.build();
+    }
+    return ctx;
+  }
+
+  private void assertHiveCliAuthDisabled(HiveConf conf, HiveAuthzSessionContext ctx)
+      throws HiveAuthzPluginException {
+    if (ctx.getClientType() == CLIENT_TYPE.HIVECLI
+        && conf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+      throw new HiveAuthzPluginException(
+          "SQL standards based authorization should not be enabled from hive cli"
+              + "Instead the use of storage based authorization in hive metastore is reccomended. Set "
+              + ConfVars.HIVE_AUTHORIZATION_ENABLED.varname + "=false to disable authz within cli");
+    }
+  }
+
+  /**
+   * just for testing
+   */
+  @VisibleForTesting
+  protected HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+      HiveConf conf, HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator,
+      HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
+    SentryHiveAccessController accessController =
+        getAccessController(conf, authzConf, authenticator, ctx);
+    SentryHiveAuthorizationValidator authzValidator =
+        getAuthzValidator(conf, authzConf, authenticator);
+
+    return new SentryHiveAuthorizer(accessController, authzValidator);
+  }
+
+  /**
+   * Get instance of SentryAccessController from configuration
+   * Default return DefaultSentryAccessController
+   *
+   * @param conf
+   * @param authzConf
+   * @param hiveAuthzBinding
+   * @param authenticator
+   * @throws HiveAuthzPluginException
+   */
+  public static SentryHiveAccessController getAccessController(HiveConf conf,
+      HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator,
+      HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
+    Class<? extends SentryHiveAccessController> clazz =
+        conf.getClass(HIVE_SENTRY_ACCESS_CONTROLLER, DefaultSentryAccessController.class,
+            SentryHiveAccessController.class);
+
+    if (clazz == null) {
+      // should not happen as default value is set
+      throw new HiveAuthzPluginException("Configuration value " + HIVE_SENTRY_ACCESS_CONTROLLER
+          + " is not set to valid SentryAccessController subclass");
+    }
+
+    try {
+      return new DefaultSentryAccessController(conf, authzConf, authenticator, ctx);
+    } catch (Exception e) {
+      throw new HiveAuthzPluginException(e);
+    }
+
+  }
+
+  /**
+   * Get instance of SentryAuthorizationValidator from configuration
+   * Default return DefaultSentryAuthorizationValidator
+   *
+   * @param conf
+   * @param authzConf
+   * @param authenticator
+   * @throws HiveAuthzPluginException
+   */
+  public static SentryHiveAuthorizationValidator getAuthzValidator(HiveConf conf,
+      HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator)
+      throws HiveAuthzPluginException {
+    Class<? extends SentryHiveAuthorizationValidator> clazz =
+        conf.getClass(HIVE_SENTRY_AUTHORIZATION_CONTROLLER, DefaultSentryValidator.class,
+            SentryHiveAuthorizationValidator.class);
+
+    if (clazz == null) {
+      // should not happen as default value is set
+      throw new HiveAuthzPluginException("Configuration value "
+          + HIVE_SENTRY_AUTHORIZATION_CONTROLLER
+          + " is not set to valid SentryAuthorizationValidator subclass");
+    }
+
+    try {
+      return new DefaultSentryValidator(conf, authzConf, authenticator);
+    } catch (Exception e) {
+      throw new HiveAuthzPluginException(e);
+    }
+
+  }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
new file mode 100644
index 0000000..2d4bf64
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl;
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
+
+public class SentryHiveAuthorizationTaskFactoryImplV2 extends HiveAuthorizationTaskFactoryImpl {
+
+  public SentryHiveAuthorizationTaskFactoryImplV2(HiveConf conf, Hive db) {
+    super(conf, db);
+  }
+
+  @Override
+  protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException {
+    SentryHivePrivilegeObjectDesc subject = new SentryHivePrivilegeObjectDesc();
+    ASTNode child = (ASTNode) ast.getChild(0);
+    ASTNode gchild = (ASTNode) child.getChild(0);
+    if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
+      subject.setTable(true);
+      String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild);
+      subject.setObject(BaseSemanticAnalyzer.getDotName(qualified));
+    } else if (child.getType() == HiveParser.TOK_URI_TYPE) {
+      subject.setUri(true);
+      subject.setObject(gchild.getText());
+    } else if (child.getType() == HiveParser.TOK_SERVER_TYPE) {
+      subject.setServer(true);
+      subject.setObject(gchild.getText());
+    } else {
+      subject.setTable(false);
+      subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText()));
+    }
+    // if partition spec node is present, set partition spec
+    for (int i = 1; i < child.getChildCount(); i++) {
+      gchild = (ASTNode) child.getChild(i);
+      if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
+        subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild));
+      } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
+        subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild));
+      }
+    }
+    return subject;
+  }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
new file mode 100644
index 0000000..6277385
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+public class SentryHivePrivilegeObject extends HivePrivilegeObject {
+
+  boolean isServer = false;
+
+  boolean isUri = false;
+
+  String objectName = "";
+
+  public SentryHivePrivilegeObject(HivePrivilegeObjectType type, String objectName) {
+    super(type, null, objectName);
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
new file mode 100644
index 0000000..9e72b78
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
@@ -0,0 +1,558 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.SentryHiveConstants;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.SentryUserException;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding.HiveHook;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.binding.hive.v2.util.SentryAuthorizerUtil;
+import org.apache.sentry.core.common.ActiveRoleSet;
+import org.apache.sentry.core.common.Authorizable;
+import org.apache.sentry.core.model.db.AccessConstants;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.provider.db.SentryAccessDeniedException;
+import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
+import org.apache.sentry.provider.db.service.thrift.TSentryPrivilege;
+import org.apache.sentry.provider.db.service.thrift.TSentryRole;
+import org.apache.sentry.service.thrift.SentryServiceClientFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+
+public class DefaultSentryAccessController extends SentryHiveAccessController {
+
+  public static final Logger LOG = LoggerFactory.getLogger(DefaultSentryAccessController.class);
+
+  public static final String REQUIRED_AUTHZ_SERVER_NAME = "Config "
+      + AuthzConfVars.AUTHZ_SERVER_NAME.getVar() + " is required";
+
+  private HiveAuthenticationProvider authenticator;
+  private String serverName;
+  private HiveConf conf;
+  private HiveAuthzConf authzConf;
+  private HiveAuthzSessionContext ctx;
+
+  private HiveHook hiveHook;
+  private HiveAuthzBinding hiveAuthzBinding;
+  protected SentryPolicyServiceClient sentryClient;
+
+
+  public DefaultSentryAccessController(HiveConf conf, HiveAuthzConf authzConf,
+      HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception {
+    initilize(conf, authzConf, authenticator, ctx);
+    this.hiveHook = HiveHook.HiveServer2;
+  }
+
+  public DefaultSentryAccessController(HiveHook hiveHook, HiveConf conf, HiveAuthzConf authzConf,
+      HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception {
+    initilize(conf, authzConf, authenticator, ctx);
+    this.hiveHook = hiveHook;
+  }
+
+  /**
+   * initialize authenticator and hiveAuthzBinding.
+   */
+  protected void initilize(HiveConf conf, HiveAuthzConf authzConf,
+      HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception {
+    Preconditions.checkNotNull(conf, "HiveConf cannot be null");
+    Preconditions.checkNotNull(authzConf, "HiveAuthzConf cannot be null");
+    Preconditions.checkNotNull(authenticator, "Hive authenticator provider cannot be null");
+    Preconditions.checkNotNull(ctx, "HiveAuthzSessionContext cannot be null");
+
+    this.conf = conf;
+    this.authzConf = authzConf;
+    this.authenticator = authenticator;
+    this.ctx = ctx;
+    this.serverName =
+        Preconditions.checkNotNull(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()),
+            REQUIRED_AUTHZ_SERVER_NAME);
+  }
+
+  @Override
+  public void createRole(String roleName, HivePrincipal adminGrantor)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
+      String msg =
+          "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES;
+      throw new HiveAccessControlException(msg);
+    }
+    try {
+      sentryClient = getSentryClient();
+      sentryClient.createRole(authenticator.getUserName(), roleName);
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp = HiveOperation.CREATEROLE;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (SentryUserException e) {
+      String msg = "Error occurred when Sentry client creating role: " + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+    }
+  }
+
+  @Override
+  public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
+    if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
+      String msg =
+          "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES;
+      throw new HiveAccessControlException(msg);
+    }
+    try {
+      sentryClient = getSentryClient();
+      sentryClient.dropRole(authenticator.getUserName(), roleName);
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp = HiveOperation.DROPROLE;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (SentryUserException e) {
+      String msg = "Error occurred when Sentry client creating role: " + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+    }
+  }
+
+  @Override
+  public List<String> getAllRoles() throws HiveAccessControlException, HiveAuthzPluginException {
+    List<String> roles = new ArrayList<String>();
+    try {
+      sentryClient = getSentryClient();
+      roles = convert2RoleList(sentryClient.listRoles(authenticator.getUserName()));
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp = HiveOperation.SHOW_ROLES;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (SentryUserException e) {
+      String msg = "Error when sentryClient listRoles: " + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+    }
+    return roles;
+  }
+
+  @Override
+  public void grantPrivileges(List<HivePrincipal> hivePrincipals,
+      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal,
+        grantOption, true);
+  }
+
+  @Override
+  public void revokePrivileges(List<HivePrincipal> hivePrincipals,
+      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal,
+        grantOption, false);
+  }
+
+  @Override
+  public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles,
+      boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantOption, grantorPrinc, true);
+  }
+
+  @Override
+  public void revokeRole(List<HivePrincipal> hivePrincipals, List<String> roles,
+      boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantOption, grantorPrinc, false);
+  }
+
+
+  @Override
+  public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    if (principal.getType() != HivePrincipalType.ROLE) {
+      String msg =
+          SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+      throw new HiveAuthzPluginException(msg);
+    }
+    List<HivePrivilegeInfo> infoList = new ArrayList<HivePrivilegeInfo>();
+    try {
+      sentryClient = getSentryClient();
+      List<List<DBModelAuthorizable>> authorizables =
+          SentryAuthorizerUtil.getAuthzHierarchy(new Server(serverName), privObj);
+      Set<TSentryPrivilege> tPrivilges = new HashSet<TSentryPrivilege>();
+      if (authorizables != null && !authorizables.isEmpty()) {
+        for (List<? extends Authorizable> authorizable : authorizables) {
+          tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(),
+              principal.getName(), authorizable));
+        }
+      } else {
+        tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(),
+            principal.getName(), null));
+      }
+
+      if (tPrivilges != null && !tPrivilges.isEmpty()) {
+        for (TSentryPrivilege privilege : tPrivilges) {
+          infoList.add(SentryAuthorizerUtil.convert2HivePrivilegeInfo(privilege, principal));
+        }
+      }
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp = HiveOperation.SHOW_GRANT;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (SentryUserException e) {
+      String msg = "Error when sentryClient listPrivilegesByRoleName: " + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+    }
+    return infoList;
+  }
+
+  @Override
+  public void setCurrentRole(String roleName) throws HiveAccessControlException,
+      HiveAuthzPluginException {
+    try {
+      sentryClient = getSentryClient();
+      hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf);
+      hiveAuthzBinding.setActiveRoleSet(roleName,
+          sentryClient.listUserRoles(authenticator.getUserName()));
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp = HiveOperation.GRANT_ROLE;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (Exception e) {
+      String msg = "Error when sentryClient setCurrentRole: " + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+      if (hiveAuthzBinding != null) {
+        hiveAuthzBinding.close();
+      }
+    }
+  }
+
+  @Override
+  public List<String> getCurrentRoleNames() throws HiveAuthzPluginException {
+    List<String> roles = new ArrayList<String>();
+    try {
+      sentryClient = getSentryClient();
+      hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf);
+      ActiveRoleSet roleSet = hiveAuthzBinding.getActiveRoleSet();
+      if (roleSet.isAll()) {
+        roles = convert2RoleList(sentryClient.listUserRoles(authenticator.getUserName()));
+      } else {
+        roles.addAll(roleSet.getRoles());
+      }
+    } catch (Exception e) {
+      String msg = "Error when sentryClient listUserRoles: " + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+      if (hiveAuthzBinding != null) {
+        hiveAuthzBinding.close();
+      }
+    }
+    return roles;
+  }
+
+  @Override
+  public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName)
+      throws HiveAuthzPluginException {
+    // TODO we will support in future
+    throw new HiveAuthzPluginException("Not supported of SHOW_ROLE_PRINCIPALS in Sentry");
+  }
+
+  @Override
+  public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal)
+      throws HiveAccessControlException, HiveAuthzPluginException {
+    List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
+    try {
+      sentryClient = getSentryClient();
+
+      if (principal.getType() != HivePrincipalType.GROUP) {
+        String msg =
+            SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+        throw new HiveAuthzPluginException(msg);
+      }
+      Set<TSentryRole> roles =
+          sentryClient.listRolesByGroupName(authenticator.getUserName(), principal.getName());
+      if (roles != null && !roles.isEmpty()) {
+        for (TSentryRole role : roles) {
+          hiveRoleGrants.add(SentryAuthorizerUtil.convert2HiveRoleGrant(role));
+        }
+      }
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp = HiveOperation.SHOW_ROLE_GRANT;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (SentryUserException e) {
+      String msg = "Error when sentryClient listRolesByGroupName: " + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+    }
+    return hiveRoleGrants;
+  }
+
+  @Override
+  public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
+    // Apply rest of the configuration only to HiveServer2
+    if (ctx.getClientType() != CLIENT_TYPE.HIVESERVER2
+        || !hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+      throw new HiveAuthzPluginException("Sentry just support for hiveserver2");
+    }
+  }
+
+  /**
+   * Grant(isGrant is true) or revoke(isGrant is false) db privileges to/from role via sentryClient,
+   * which is a instance of SentryPolicyServiceClientV2
+   *
+   * @param hivePrincipals
+   * @param hivePrivileges
+   * @param hivePrivObject
+   * @param grantorPrincipal
+   * @param grantOption
+   * @param isGrant
+   */
+  private void grantOrRevokePrivlegeOnRole(List<HivePrincipal> hivePrincipals,
+      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+      HivePrincipal grantorPrincipal, boolean grantOption, boolean isGrant)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      sentryClient = getSentryClient();
+
+      for (HivePrincipal principal : hivePrincipals) {
+        // Sentry only support grant privilege to ROLE
+        if (principal.getType() != HivePrincipalType.ROLE) {
+          String msg =
+              SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+          throw new HiveAuthzPluginException(msg);
+        }
+        for (HivePrivilege privilege : hivePrivileges) {
+          String grantorName = authenticator.getUserName();
+          String roleName = principal.getName();
+          String action = SentryAuthorizerUtil.convert2SentryAction(privilege);
+          List<String> columnNames = privilege.getColumns();
+          Boolean grantOp = null;
+          if (isGrant) {
+            grantOp = grantOption;
+          }
+
+          switch (hivePrivObject.getType()) {
+            case GLOBAL:
+              if (isGrant) {
+                sentryClient.grantServerPrivilege(grantorName, roleName,
+                    hivePrivObject.getObjectName(), action, grantOp);
+              } else {
+                sentryClient.revokeServerPrivilege(grantorName, roleName,
+                    hivePrivObject.getObjectName(), action, grantOp);
+              }
+              break;
+            case DATABASE:
+              if (isGrant) {
+                sentryClient.grantDatabasePrivilege(grantorName, roleName, serverName,
+                    hivePrivObject.getDbname(), action, grantOp);
+              } else {
+                sentryClient.revokeDatabasePrivilege(grantorName, roleName, serverName,
+                    hivePrivObject.getDbname(), action, grantOp);
+              }
+              break;
+            case TABLE_OR_VIEW:
+              // For column level security
+              if (columnNames != null && !columnNames.isEmpty()) {
+                if (action.equalsIgnoreCase(AccessConstants.INSERT)
+                    || action.equalsIgnoreCase(AccessConstants.ALL)) {
+                  String msg =
+                      SentryHiveConstants.PRIVILEGE_NOT_SUPPORTED + privilege.getName()
+                          + " on Column";
+                  throw new HiveAuthzPluginException(msg);
+                }
+                if (isGrant) {
+                  sentryClient.grantColumnsPrivileges(grantorName, roleName, serverName,
+                      hivePrivObject.getDbname(), hivePrivObject.getObjectName(), columnNames,
+                      action, grantOp);
+                } else {
+                  sentryClient.revokeColumnsPrivilege(grantorName, roleName, serverName,
+                      hivePrivObject.getDbname(), hivePrivObject.getObjectName(), columnNames,
+                      action, grantOp);
+                }
+              } else {
+                if (isGrant) {
+                  sentryClient.grantTablePrivilege(grantorName, roleName, serverName,
+                      hivePrivObject.getDbname(), hivePrivObject.getObjectName(), action, grantOp);
+                } else {
+                  sentryClient.revokeTablePrivilege(grantorName, roleName, serverName,
+                      hivePrivObject.getDbname(), hivePrivObject.getObjectName(), action, grantOp);
+                }
+              }
+              break;
+            case LOCAL_URI:
+            case DFS_URI:
+              String uRIString = hivePrivObject.getObjectName().replace("'", "").replace("\"", "");
+              if (isGrant) {
+                sentryClient.grantURIPrivilege(grantorName, roleName, serverName,
+                    uRIString, grantOp);
+              } else {
+                sentryClient.revokeURIPrivilege(grantorName, roleName, serverName,
+                    uRIString, grantOp);
+              }
+              break;
+            case FUNCTION:
+            case PARTITION:
+            case COLUMN:
+            case COMMAND_PARAMS:
+              // not support these type
+              throw new HiveAuthzPluginException(hivePrivObject.getType().name()
+                  + " are not supported in sentry");
+            default:
+              break;
+          }
+        }
+      }
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp =
+          isGrant ? HiveOperation.GRANT_PRIVILEGE : HiveOperation.REVOKE_PRIVILEGE;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (SentryUserException e) {
+      String msg = "Error when sentryClient grant/revoke privilege:" + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+    }
+  }
+
+  /**
+   * Grant(isGrant is true) or revoke(isGrant is false) role to/from group via sentryClient, which
+   * is a instance of SentryPolicyServiceClientV2
+   *
+   * @param hivePrincipals
+   * @param roles
+   * @param grantOption
+   * @param grantorPrinc
+   * @param isGrant
+   */
+  private void grantOrRevokeRoleOnGroup(List<HivePrincipal> hivePrincipals, List<String> roles,
+      boolean grantOption, HivePrincipal grantorPrinc, boolean isGrant)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      sentryClient = getSentryClient();
+      // get principals
+      Set<String> groups = Sets.newHashSet();
+      for (HivePrincipal principal : hivePrincipals) {
+        if (principal.getType() != HivePrincipalType.GROUP) {
+          String msg =
+              SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+          throw new HiveAuthzPluginException(msg);
+        }
+        groups.add(principal.getName());
+      }
+
+      // grant/revoke role to/from principals
+      for (String roleName : roles) {
+        if (isGrant) {
+          sentryClient.grantRoleToGroups(grantorPrinc.getName(), roleName, groups);
+        } else {
+          sentryClient.revokeRoleFromGroups(grantorPrinc.getName(), roleName, groups);
+        }
+      }
+
+    } catch (SentryAccessDeniedException e) {
+      HiveOperation hiveOp = isGrant ? HiveOperation.GRANT_ROLE : HiveOperation.REVOKE_ROLE;
+      executeOnFailureHooks(hiveOp, e);
+    } catch (SentryUserException e) {
+      String msg = "Error when sentryClient grant/revoke role:" + e.getMessage();
+      executeOnErrorHooks(msg, e);
+    } finally {
+      if (sentryClient != null) {
+        sentryClient.close();
+      }
+    }
+  }
+
+  private void executeOnFailureHooks(HiveOperation hiveOp, SentryAccessDeniedException e)
+      throws HiveAccessControlException {
+    SentryOnFailureHookContext hookCtx =
+        new SentryOnFailureHookContextImpl(SessionState.get().getCmd(), null, null, hiveOp, null,
+            null, null, null, authenticator.getUserName(), null, new AuthorizationException(e),
+            authzConf);
+    SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf);
+    throw new HiveAccessControlException(e.getMessage(), e);
+  }
+
+  private void executeOnErrorHooks(String msg, Exception e) throws HiveAuthzPluginException {
+    LOG.error(msg, e);
+    throw new HiveAuthzPluginException(msg, e);
+  }
+
+  private List<String> convert2RoleList(Set<TSentryRole> roleSet) {
+    List<String> roles = new ArrayList<String>();
+    if (roleSet != null && !roleSet.isEmpty()) {
+      for (TSentryRole tRole : roleSet) {
+        roles.add(tRole.getRoleName());
+      }
+    }
+    return roles;
+  }
+
+  private SentryPolicyServiceClient getSentryClient() throws HiveAuthzPluginException {
+    try {
+      Preconditions.checkNotNull(authzConf, "HiveAuthConf cannot be null");
+      return SentryServiceClientFactory.create(authzConf);
+    } catch (Exception e) {
+      String msg = "Error occurred when creating Sentry client: " + e.getMessage();
+      throw new HiveAuthzPluginException(msg, e);
+    }
+  }
+
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java
new file mode 100644
index 0000000..2bc8aad
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java
@@ -0,0 +1,481 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
+import java.security.CodeSource;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding.HiveHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.v2.util.SentryAuthorizerUtil;
+import org.apache.sentry.binding.hive.v2.util.SimpleSemanticAnalyzer;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Column;
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+
+/**
+ * This class used to do authorization. Check if current user has privileges to do the operation.
+ */
+public class DefaultSentryValidator extends SentryHiveAuthorizationValidator {
+
+  public static final Logger LOG = LoggerFactory.getLogger(DefaultSentryValidator.class);
+
+  protected HiveConf conf;
+  protected HiveAuthzConf authzConf;
+  protected HiveAuthenticationProvider authenticator;
+
+  public DefaultSentryValidator(HiveConf conf, HiveAuthzConf authzConf,
+      HiveAuthenticationProvider authenticator) throws Exception {
+    initilize(conf, authzConf, authenticator);
+    this.hiveHook = HiveHook.HiveServer2;
+  }
+
+  public DefaultSentryValidator(HiveHook hiveHook, HiveConf conf, HiveAuthzConf authzConf,
+      HiveAuthenticationProvider authenticator) throws Exception {
+    initilize(conf, authzConf, authenticator);
+    this.hiveHook = hiveHook;
+  }
+
+  /**
+   * initialize authenticator and hiveAuthzBinding.
+   */
+  protected void initilize(HiveConf conf, HiveAuthzConf authzConf,
+      HiveAuthenticationProvider authenticator) throws Exception {
+    Preconditions.checkNotNull(conf, "HiveConf cannot be null");
+    Preconditions.checkNotNull(authzConf, "HiveAuthzConf cannot be null");
+    Preconditions.checkNotNull(authenticator, "Hive authenticator provider cannot be null");
+    this.conf = conf;
+    this.authzConf = authzConf;
+    this.authenticator = authenticator;
+  }
+
+  private HiveHook hiveHook;
+
+  // all operations need to extend at DB scope
+  private static final Set<HiveOperation> EX_DB_ALL = Sets.newHashSet(HiveOperation.DROPDATABASE,
+      HiveOperation.CREATETABLE, HiveOperation.IMPORT, HiveOperation.DESCDATABASE,
+      HiveOperation.ALTERTABLE_RENAME, HiveOperation.LOCKDB, HiveOperation.UNLOCKDB);
+  // input operations need to extend at DB scope
+  private static final Set<HiveOperation> EX_DB_INPUT = Sets.newHashSet(HiveOperation.DROPDATABASE,
+      HiveOperation.DESCDATABASE, HiveOperation.ALTERTABLE_RENAME, HiveOperation.LOCKDB,
+      HiveOperation.UNLOCKDB);
+
+  // all operations need to extend at Table scope
+  private static final Set<HiveOperation> EX_TB_ALL = Sets.newHashSet(HiveOperation.DROPTABLE,
+      HiveOperation.DROPVIEW, HiveOperation.DESCTABLE, HiveOperation.SHOW_TBLPROPERTIES,
+      HiveOperation.SHOWINDEXES, HiveOperation.ALTERTABLE_PROPERTIES,
+      HiveOperation.ALTERTABLE_SERDEPROPERTIES, HiveOperation.ALTERTABLE_CLUSTER_SORT,
+      HiveOperation.ALTERTABLE_FILEFORMAT, HiveOperation.ALTERTABLE_TOUCH,
+      HiveOperation.ALTERTABLE_PROTECTMODE, HiveOperation.ALTERTABLE_RENAMECOL,
+      HiveOperation.ALTERTABLE_ADDCOLS, HiveOperation.ALTERTABLE_REPLACECOLS,
+      HiveOperation.ALTERTABLE_RENAMEPART, HiveOperation.ALTERTABLE_ARCHIVE,
+      HiveOperation.ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_SERIALIZER,
+      HiveOperation.ALTERTABLE_MERGEFILES, HiveOperation.ALTERTABLE_SKEWED,
+      HiveOperation.ALTERTABLE_DROPPARTS, HiveOperation.ALTERTABLE_ADDPARTS,
+      HiveOperation.ALTERTABLE_RENAME, HiveOperation.ALTERTABLE_LOCATION,
+      HiveOperation.ALTERVIEW_PROPERTIES, HiveOperation.ALTERPARTITION_FILEFORMAT,
+      HiveOperation.ALTERPARTITION_PROTECTMODE, HiveOperation.ALTERPARTITION_SERDEPROPERTIES,
+      HiveOperation.ALTERPARTITION_SERIALIZER, HiveOperation.ALTERPARTITION_MERGEFILES,
+      HiveOperation.ALTERPARTITION_LOCATION, HiveOperation.ALTERTBLPART_SKEWED_LOCATION,
+      HiveOperation.MSCK, HiveOperation.ALTERINDEX_REBUILD, HiveOperation.LOCKTABLE,
+      HiveOperation.UNLOCKTABLE, HiveOperation.SHOWCOLUMNS, HiveOperation.SHOW_TABLESTATUS, HiveOperation.LOAD);
+  // input operations need to extend at Table scope
+  private static final Set<HiveOperation> EX_TB_INPUT = Sets.newHashSet(HiveOperation.DROPTABLE,
+      HiveOperation.DROPVIEW, HiveOperation.SHOW_TBLPROPERTIES, HiveOperation.SHOWINDEXES,
+      HiveOperation.ALTERINDEX_REBUILD, HiveOperation.LOCKTABLE, HiveOperation.UNLOCKTABLE,
+      HiveOperation.SHOW_TABLESTATUS);
+  private static final Set<HiveOperation> META_TB_INPUT = Sets.newHashSet(HiveOperation.DESCTABLE,
+      HiveOperation.SHOWCOLUMNS);
+
+  /**
+   * Check if current user has privileges to perform given operation type hiveOpType on the given
+   * input and output objects
+   *
+   * @param hiveOpType
+   * @param inputHObjs
+   * @param outputHObjs
+   * @param context
+   * @throws SentryAccessControlException
+   */
+  @Override
+  public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    if (LOG.isDebugEnabled()) {
+      String msg =
+          "Checking privileges for operation " + hiveOpType + " by user "
+              + authenticator.getUserName() + " on " + " input objects " + inputHObjs
+              + " and output objects " + outputHObjs + ". Context Info: " + context;
+      LOG.debug(msg);
+    }
+
+    HiveOperation hiveOp = SentryAuthorizerUtil.convert2HiveOperation(hiveOpType.name());
+    HiveAuthzPrivileges stmtAuthPrivileges = null;
+    if (HiveOperation.DESCTABLE.equals(hiveOp) &&
+        !(context.getCommandString().contains("EXTENDED") || context.getCommandString().contains("FORMATTED")) ) {
+      stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
+    } else {
+      stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(hiveOp);
+    }
+
+    HiveAuthzBinding hiveAuthzBinding = null;
+    try {
+      hiveAuthzBinding = getAuthzBinding();
+      if (stmtAuthPrivileges == null) {
+        // We don't handle authorizing this statement
+        return;
+      }
+
+      List<List<DBModelAuthorizable>> inputHierarchyList =
+          SentryAuthorizerUtil.convert2SentryPrivilegeList(hiveAuthzBinding.getAuthServer(),
+              inputHObjs);
+      List<List<DBModelAuthorizable>> outputHierarchyList =
+          SentryAuthorizerUtil.convert2SentryPrivilegeList(hiveAuthzBinding.getAuthServer(),
+              outputHObjs);
+
+      // Workaround for metadata queries
+      addExtendHierarchy(hiveOp, stmtAuthPrivileges, inputHierarchyList, outputHierarchyList,
+          context.getCommandString(), hiveAuthzBinding);
+
+      hiveAuthzBinding.authorize(hiveOp, stmtAuthPrivileges,
+          new Subject(authenticator.getUserName()), inputHierarchyList, outputHierarchyList);
+    } catch (AuthorizationException e) {
+      Database db = null;
+      Table tab = null;
+      AccessURI udfURI = null;
+      AccessURI partitionURI = null;
+      if (outputHObjs != null) {
+        for (HivePrivilegeObject obj : outputHObjs) {
+          switch (obj.getType()) {
+            case DATABASE:
+              db = new Database(obj.getObjectName());
+              break;
+            case TABLE_OR_VIEW:
+              db = new Database(obj.getDbname());
+              tab = new Table(obj.getObjectName());
+              break;
+            case PARTITION:
+              db = new Database(obj.getDbname());
+              tab = new Table(obj.getObjectName());
+            case LOCAL_URI:
+            case DFS_URI:
+          }
+        }
+      }
+      String permsRequired = "";
+      SentryOnFailureHookContext hookCtx =
+          new SentryOnFailureHookContextImpl(context.getCommandString(), null, null, hiveOp, db,
+              tab, udfURI, partitionURI, authenticator.getUserName(), context.getIpAddress(), e,
+              authzConf);
+      SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf);
+      for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) {
+        permsRequired += perm + ";";
+      }
+      SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired);
+      String msg =
+          HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
+              + "\n Required privileges for this query: " + permsRequired;
+      throw new HiveAccessControlException(msg, e);
+    } catch (Exception e) {
+      throw new HiveAuthzPluginException(e.getClass()+ ": " + e.getMessage(), e);
+    } finally {
+      if (hiveAuthzBinding != null) {
+        hiveAuthzBinding.close();
+      }
+    }
+
+    if ("true".equalsIgnoreCase(SessionState.get().getConf()
+        .get(HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION))) {
+      throw new HiveAccessControlException(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR
+          + " Mock query compilation aborted. Set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION
+          + " to 'false' for normal query processing");
+    }
+  }
+
+  @VisibleForTesting
+  public HiveAuthzBinding getAuthzBinding() throws Exception {
+    return new HiveAuthzBinding(hiveHook, conf, authzConf);
+  }
+
+  private void addExtendHierarchy(HiveOperation hiveOp, HiveAuthzPrivileges stmtAuthPrivileges,
+      List<List<DBModelAuthorizable>> inputHierarchyList,
+      List<List<DBModelAuthorizable>> outputHierarchyList, String command,
+      HiveAuthzBinding hiveAuthzBinding) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    String currDatabase = null;
+    switch (stmtAuthPrivileges.getOperationScope()) {
+      case SERVER:
+        // validate server level privileges if applicable. Eg create UDF,register jar etc ..
+        List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
+        serverHierarchy.add(hiveAuthzBinding.getAuthServer());
+        inputHierarchyList.add(serverHierarchy);
+        break;
+      case DATABASE:
+        // workaround for metadata queries.
+        if (EX_DB_ALL.contains(hiveOp)) {
+          SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command);
+          currDatabase = analyzer.getCurrentDb();
+
+          List<DBModelAuthorizable> externalAuthorizableHierarchy =
+              new ArrayList<DBModelAuthorizable>();
+          externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+          externalAuthorizableHierarchy.add(new Database(currDatabase));
+
+          if (EX_DB_INPUT.contains(hiveOp)) {
+            inputHierarchyList.add(externalAuthorizableHierarchy);
+          } else {
+            outputHierarchyList.add(externalAuthorizableHierarchy);
+          }
+        }
+        break;
+      case TABLE:
+      case COLUMN:
+        // workaround for drop table/view.
+        if (EX_TB_ALL.contains(hiveOp)) {
+          SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command);
+          currDatabase = analyzer.getCurrentDb();
+          String currTable = analyzer.getCurrentTb();
+
+          List<DBModelAuthorizable> externalAuthorizableHierarchy =
+              new ArrayList<DBModelAuthorizable>();
+          externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+          externalAuthorizableHierarchy.add(new Database(currDatabase));
+          externalAuthorizableHierarchy.add(new Table(currTable));
+
+          if (EX_TB_INPUT.contains(hiveOp)) {
+            inputHierarchyList.add(externalAuthorizableHierarchy);
+          } else if (META_TB_INPUT.contains(hiveOp)) {
+            externalAuthorizableHierarchy.add(Column.SOME);
+            inputHierarchyList.add(externalAuthorizableHierarchy);
+          } else {
+            outputHierarchyList.add(externalAuthorizableHierarchy);
+          }
+        }
+        break;
+      case FUNCTION:
+        if (hiveOp.equals(HiveOperation.CREATEFUNCTION)) {
+          SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command);
+          currDatabase = analyzer.getCurrentDb();
+          String udfClassName = analyzer.getCurrentTb();
+          try {
+            CodeSource udfSrc = Class.forName(udfClassName).getProtectionDomain().getCodeSource();
+            if (udfSrc == null) {
+              throw new HiveAuthzPluginException("Could not resolve the jar for UDF class "
+                  + udfClassName);
+            }
+            String udfJar = udfSrc.getLocation().getPath();
+            if (udfJar == null || udfJar.isEmpty()) {
+              throw new HiveAuthzPluginException("Could not find the jar for UDF class "
+                  + udfClassName + "to validate privileges");
+            }
+            AccessURI udfURI = SentryAuthorizerUtil.parseURI(udfSrc.getLocation().toString(), true);
+            List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>();
+            udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+            udfUriHierarchy.add(udfURI);
+            inputHierarchyList.add(udfUriHierarchy);
+          } catch (Exception e) {
+            throw new HiveAuthzPluginException("Error retrieving udf class", e);
+          }
+        }
+        break;
+      case CONNECT:
+        /*
+         * The 'CONNECT' is an implicit privilege scope currently used for - USE <db> It's allowed
+         * when the user has any privilege on the current database. For application backward
+         * compatibility, we allow (optional) implicit connect permission on 'default' db.
+         */
+        List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>();
+        connectHierarchy.add(hiveAuthzBinding.getAuthServer());
+        if (hiveOp.equals(HiveOperation.SWITCHDATABASE)) {
+          currDatabase = command.split(" ")[1];
+        }
+        // by default allow connect access to default db
+        Table currTbl = Table.ALL;
+        Database currDB = new Database(currDatabase);
+        Column currCol = Column.ALL;
+        if ((DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDatabase) && "false"
+            .equalsIgnoreCase(authzConf.get(
+                HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false")))) {
+          currDB = Database.ALL;
+          currTbl = Table.SOME;
+        }
+
+        connectHierarchy.add(currDB);
+        connectHierarchy.add(currTbl);
+        connectHierarchy.add(currCol);
+
+        inputHierarchyList.add(connectHierarchy);
+        break;
+    }
+  }
+
+  @Override
+  public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context) {
+    if (listObjs != null && listObjs.size() >= 1) {
+      HivePrivilegeObjectType pType = listObjs.get(0).getType();
+      HiveAuthzBinding hiveAuthzBinding = null;
+      try {
+        switch (pType) {
+          case DATABASE:
+            hiveAuthzBinding = getAuthzBinding();
+            listObjs = filterShowDatabases(listObjs, authenticator.getUserName(), hiveAuthzBinding);
+            break;
+          case TABLE_OR_VIEW:
+            hiveAuthzBinding = getAuthzBinding();
+            listObjs = filterShowTables(listObjs, authenticator.getUserName(), hiveAuthzBinding);
+            break;
+        }
+      } catch (Exception e) {
+        LOG.debug(e.getMessage(),e);
+      } finally {
+        if (hiveAuthzBinding != null) {
+          hiveAuthzBinding.close();
+        }
+      }
+    }
+    return listObjs;
+  }
+
+  private List<HivePrivilegeObject> filterShowTables(List<HivePrivilegeObject> listObjs,
+      String userName, HiveAuthzBinding hiveAuthzBinding) {
+    List<HivePrivilegeObject> filteredResult = new ArrayList<HivePrivilegeObject>();
+    Subject subject = new Subject(userName);
+    HiveAuthzPrivileges tableMetaDataPrivilege =
+        new HiveAuthzPrivileges.AuthzPrivilegeBuilder()
+            .addInputObjectPriviledge(AuthorizableType.Column,
+                EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT))
+            .setOperationScope(HiveOperationScope.TABLE)
+            .setOperationType(
+                org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType.INFO)
+            .build();
+
+    for (HivePrivilegeObject obj : listObjs) {
+      // if user has privileges on table, add to filtered list, else discard
+      Table table = new Table(obj.getObjectName());
+      Database database;
+      database = new Database(obj.getDbname());
+
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy =
+          new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(table);
+      externalAuthorizableHierarchy.add(Column.ALL);
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        hiveAuthzBinding.authorize(HiveOperation.SHOWTABLES, tableMetaDataPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(obj);
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the table is
+        // not added to
+        // filtered list.
+        ;
+      }
+    }
+    return filteredResult;
+  }
+
+  private List<HivePrivilegeObject> filterShowDatabases(List<HivePrivilegeObject> listObjs,
+      String userName, HiveAuthzBinding hiveAuthzBinding) {
+    List<HivePrivilegeObject> filteredResult = new ArrayList<HivePrivilegeObject>();
+    Subject subject = new Subject(userName);
+    HiveAuthzPrivileges anyPrivilege =
+        new HiveAuthzPrivileges.AuthzPrivilegeBuilder()
+            .addInputObjectPriviledge(
+                AuthorizableType.Column,
+                EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT, DBModelAction.ALTER,
+                    DBModelAction.CREATE, DBModelAction.DROP, DBModelAction.INDEX,
+                    DBModelAction.LOCK))
+            .setOperationScope(HiveOperationScope.CONNECT)
+            .setOperationType(
+                org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType.QUERY)
+            .build();
+
+    for (HivePrivilegeObject obj : listObjs) {
+      // if user has privileges on database, add to filtered list, else discard
+      Database database = null;
+
+      // if default is not restricted, continue
+      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(obj.getObjectName())
+          && "false".equalsIgnoreCase(hiveAuthzBinding.getAuthzConf().get(
+              HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
+        filteredResult.add(obj);
+        continue;
+      }
+
+      database = new Database(obj.getObjectName());
+
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy =
+          new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(Table.ALL);
+      externalAuthorizableHierarchy.add(Column.ALL);
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        hiveAuthzBinding.authorize(HiveOperation.SHOWDATABASES, anyPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(obj);
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the table is
+        // not added to
+        // filtered list.
+        ;
+      }
+    }
+    return filteredResult;
+  }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java
new file mode 100644
index 0000000..26fdac8
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessController;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+
+/**
+ * Abstract class to do access control commands, e.g. grant/revoke privileges, grant/revoke role,
+ * create/drop role.
+ */
+public abstract class SentryHiveAccessController implements HiveAccessController {
+
+  /**
+   * Hive statement: Grant privilege GRANT priv_type [, priv_type ] ... ON table_or_view_name TO
+   * principal_specification [, principal_specification] ... [WITH GRANT OPTION];
+   * principal_specification : USER user | ROLE role
+   *
+   * priv_type : INSERT | SELECT | UPDATE | DELETE | ALL
+   *
+   * @param hivePrincipals
+   * @param hivePrivileges
+   * @param hivePrivObject
+   * @param grantorPrincipal
+   * @param grantOption
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void grantPrivileges(List<HivePrincipal> hivePrincipals,
+      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+      HiveAccessControlException;
+
+  /**
+   * Hive statement: Revoke privilege REVOKE priv_type [, priv_type ] ... ON table_or_view_name FROM
+   * principal_specification [, principal_specification] ... ;
+   *
+   * principal_specification : USER user | ROLE role
+   *
+   * priv_type : INSERT | SELECT | UPDATE | DELETE | ALL
+   *
+   * @param hivePrincipals
+   * @param hivePrivileges
+   * @param hivePrivObject
+   * @param grantorPrincipal
+   * @param grantOption
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void revokePrivileges(List<HivePrincipal> hivePrincipals,
+      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+      HiveAccessControlException;
+
+  /**
+   * Hive statement: Create role CREATE ROLE role_name;
+   *
+   * @param roleName
+   * @param adminGrantor
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void createRole(String roleName, HivePrincipal adminGrantor)
+      throws HiveAuthzPluginException, HiveAccessControlException;
+
+  /**
+   * Hive statement: Drop role DROP ROLE role_name;
+   *
+   * @param roleName
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void dropRole(String roleName) throws HiveAuthzPluginException,
+      HiveAccessControlException;
+
+  /**
+   * Hive statement: Grant role GRANT role_name [, role_name] ... TO principal_specification [,
+   * principal_specification] ... [ WITH ADMIN OPTION ];
+   *
+   * principal_specification : USER user | ROLE role
+   *
+   * @param hivePrincipals
+   * @param roles
+   * @param grantOption
+   * @param grantorPrinc
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles,
+      boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+      HiveAccessControlException;
+
+
+  /**
+   * Hive statement: Revoke role REVOKE [ADMIN OPTION FOR] role_name [, role_name] ... FROM
+   * principal_specification [, principal_specification] ... ;
+   *
+   * principal_specification : USER user | ROLE role
+   *
+   * @param hivePrincipals
+   * @param roles
+   * @param grantOption
+   * @param grantorPrinc
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void revokeRole(List<HivePrincipal> hivePrincipals, List<String> roles,
+      boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+      HiveAccessControlException;
+
+  /**
+   * Hive statement: Show roles SHOW ROLES;
+   *
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract List<String> getAllRoles() throws HiveAuthzPluginException,
+      HiveAccessControlException;
+
+  /**
+   * Hive statement: Show grant SHOW GRANT [principal_name] ON (ALL| ([TABLE] table_or_view_name);
+   *
+   * @param principal
+   * @param privObj
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal,
+      HivePrivilegeObject privObj) throws HiveAuthzPluginException, HiveAccessControlException;
+
+  /**
+   * Hive statement: Set role SET ROLE (role_name|ALL);
+   *
+   * @param roleName
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void setCurrentRole(String roleName) throws HiveAuthzPluginException,
+      HiveAccessControlException;
+
+  /**
+   * Hive statement: Show current roles SHOW CURRENT ROLES;
+   *
+   * @throws HiveAuthzPluginException
+   */
+  @Override
+  public abstract List<String> getCurrentRoleNames() throws HiveAuthzPluginException;
+
+  /**
+   * Hive statement: Set role privileges SHOW PRINCIPALS role_name;
+   *
+   * @param roleName
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName)
+      throws HiveAuthzPluginException, HiveAccessControlException;
+
+  /**
+   * Hive statement: Set role grant SHOW ROLE GRANT (USER|ROLE) principal_name;
+   *
+   * @param principal
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal)
+      throws HiveAuthzPluginException, HiveAccessControlException;
+
+  /**
+   * Apply configuration files for authorization V2
+   *
+   * @param hiveConf
+   * @throws HiveAuthzPluginException
+   */
+  @Override
+  public abstract void applyAuthorizationConfigPolicy(HiveConf hiveConf)
+      throws HiveAuthzPluginException;
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java
new file mode 100644
index 0000000..7bf7b87
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+/**
+ * This class used to do authorization validate. Check if current user has privileges to do the
+ * operation and filter the select results.
+ */
+public abstract class SentryHiveAuthorizationValidator implements HiveAuthorizationValidator {
+
+  /**
+   * Check if current user has privileges to perform given operation type hiveOpType on the given
+   * input and output objects.
+   *
+   * @param hiveOpType
+   * @param inputHObjs
+   * @param outputHObjs
+   * @param context
+   * @throws HiveAuthzPluginException, HiveAccessControlException
+   */
+  @Override
+  public abstract void checkPrivileges(HiveOperationType hiveOpType,
+      List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs,
+      HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException;
+
+
+  /**
+   * Filter the select results according current user's permission. remove the object which current
+   * user do not have any privilege on it.
+   *
+   * @param listObjs
+   * @param context
+   */
+  @Override
+  public abstract List<HivePrivilegeObject> filterListCmdObjects(
+      List<HivePrivilegeObject> listObjs, HiveAuthzContext context);
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java
new file mode 100644
index 0000000..9d227b8
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.sentry.binding.hive.v2.SentryHivePrivilegeObject;
+
+/**
+ * Convenience implementation of HiveAuthorizer. You can customize the behavior by passing different
+ * implementations of {@link SentryHiveAccessController} and
+ * {@link SentryHiveAuthorizationValidator} to constructor.
+ */
+public class SentryHiveAuthorizer implements HiveAuthorizer {
+
+  private SentryHiveAccessController accessController;
+  private SentryHiveAuthorizationValidator authValidator;
+
+  public SentryHiveAuthorizer(SentryHiveAccessController accessController,
+      SentryHiveAuthorizationValidator authValidator) {
+    this.accessController = accessController;
+    this.authValidator = authValidator;
+  }
+
+  @Override
+  public void grantPrivileges(List<HivePrincipal> hivePrincipals,
+      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    accessController.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
+        grantorPrincipal, grantOption);
+  }
+
+  @Override
+  public void revokePrivileges(List<HivePrincipal> hivePrincipals,
+      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    accessController.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
+        grantorPrincipal, grantOption);
+  }
+
+  @Override
+  public void createRole(String roleName, HivePrincipal adminGrantor)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    accessController.createRole(roleName, adminGrantor);
+  }
+
+  @Override
+  public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
+    accessController.dropRole(roleName);
+  }
+
+  @Override
+  public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles,
+      boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    accessController.grantRole(hivePrincipals, roles, grantOption, grantorPrinc);
+  }
+
+  @Override
+  public void revokeRole(List<HivePrincipal> hivePrincipals, List<String> roles,
+      boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+      HiveAccessControlException {
+    accessController.revokeRole(hivePrincipals, roles, grantOption, grantorPrinc);
+  }
+
+  @Override
+  public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context);
+  }
+
+  @Override
+  public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException {
+    return accessController.getAllRoles();
+  }
+
+  @Override
+  public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    return accessController.showPrivileges(principal, privObj);
+  }
+
+  @Override
+  public VERSION getVersion() {
+    return VERSION.V1;
+  }
+
+  @Override
+  public void setCurrentRole(String roleName) throws HiveAccessControlException,
+      HiveAuthzPluginException {
+    accessController.setCurrentRole(roleName);
+  }
+
+  @Override
+  public List<String> getCurrentRoleNames() throws HiveAuthzPluginException {
+    return accessController.getCurrentRoleNames();
+  }
+
+  @Override
+  public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    return accessController.getPrincipalGrantInfoForRole(roleName);
+  }
+
+  @Override
+  public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    return accessController.getRoleGrantInfoForPrincipal(principal);
+  }
+
+  @Override
+  public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
+    accessController.applyAuthorizationConfigPolicy(hiveConf);
+  }
+
+  @Override
+  public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
+    return authValidator.filterListCmdObjects(listObjs, context);
+  }
+
+  @Override
+  public List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> principals) throws HiveException {
+    return AuthorizationUtils.getHivePrincipals(principals);
+  }
+
+  @Override
+  public List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> privileges) {
+    return AuthorizationUtils.getHivePrivileges(privileges);
+  }
+
+  @Override
+  public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc)
+      throws HiveException {
+    SentryHivePrivilegeObjectDesc sPrivSubjectDesc = null;
+    if (privSubjectDesc instanceof SentryHivePrivilegeObjectDesc) {
+      sPrivSubjectDesc = (SentryHivePrivilegeObjectDesc) privSubjectDesc;
+    }
+    if (sPrivSubjectDesc != null && sPrivSubjectDesc.isSentryPrivObjectDesc()) {
+      HivePrivilegeObjectType objectType = getPrivObjectType(sPrivSubjectDesc);
+      return new SentryHivePrivilegeObject(objectType, privSubjectDesc.getObject());
+    } else {
+      return AuthorizationUtils.getHivePrivilegeObject(privSubjectDesc);
+    }
+  }
+
+  protected static HivePrivilegeObjectType getPrivObjectType(
+      SentryHivePrivilegeObjectDesc privSubjectDesc) {
+    if (privSubjectDesc.getObject() == null) {
+      return null;
+    }
+    if (privSubjectDesc.getServer()) {
+      return HivePrivilegeObjectType.GLOBAL;
+    } else if (privSubjectDesc.getUri()) {
+      return HivePrivilegeObjectType.LOCAL_URI;
+    } else {
+      return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW
+          : HivePrivilegeObjectType.DATABASE;
+    }
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java
new file mode 100644
index 0000000..ff648ff
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java
@@ -0,0 +1,413 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive.v2.metastore;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.List;
+import java.util.Set;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/***
+ * This class is the wrapper of ObjectStore which is the interface between the
+ * application logic and the database store. Do the authorization or filter the
+ * result when processing the metastore request.
+ * eg:
+ * Callers will only receive the objects back which they have privileges to
+ * access.
+ * If there is a request for the object list(like getAllTables()), the result
+ * will be filtered to exclude object the requestor doesn't have privilege to
+ * access.
+ */
+public class AuthorizingObjectStoreV2 extends ObjectStore {
+  private static ImmutableSet<String> serviceUsers;
+  private static HiveConf hiveConf;
+  private static HiveAuthzConf authzConf;
+  private static HiveAuthzBinding hiveAuthzBinding;
+  private static String NO_ACCESS_MESSAGE_TABLE = "Table does not exist or insufficient privileges to access: ";
+  private static String NO_ACCESS_MESSAGE_DATABASE = "Database does not exist or insufficient privileges to access: ";
+
+  @Override
+  public List<String> getDatabases(String pattern) throws MetaException {
+    return filterDatabases(super.getDatabases(pattern));
+  }
+
+  @Override
+  public List<String> getAllDatabases() throws MetaException {
+    return filterDatabases(super.getAllDatabases());
+  }
+
+  @Override
+  public Database getDatabase(String name) throws NoSuchObjectException {
+    Database db = super.getDatabase(name);
+    try {
+      if (filterDatabases(Lists.newArrayList(name)).isEmpty()) {
+        throw new NoSuchObjectException(getNoAccessMessageForDB(name));
+      }
+    } catch (MetaException e) {
+      throw new NoSuchObjectException("Failed to authorized access to " + name
+          + " : " + e.getMessage());
+    }
+    return db;
+  }
+
+  @Override
+  public Table getTable(String dbName, String tableName) throws MetaException {
+    Table table = super.getTable(dbName, tableName);
+    if (table == null
+        || filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      return null;
+    }
+    return table;
+  }
+
+  @Override
+  public Partition getPartition(String dbName, String tableName,
+      List<String> part_vals) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new NoSuchObjectException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getPartition(dbName, tableName, part_vals);
+  }
+
+  @Override
+  public List<Partition> getPartitions(String dbName, String tableName,
+      int maxParts) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getPartitions(dbName, tableName, maxParts);
+  }
+
+  @Override
+  public List<String> getTables(String dbName, String pattern)
+      throws MetaException {
+    return filterTables(dbName, super.getTables(dbName, pattern));
+  }
+
+  @Override
+  public List<Table> getTableObjectsByName(String dbname, List<String> tableNames)
+      throws MetaException, UnknownDBException {
+    return super.getTableObjectsByName(dbname, filterTables(dbname, tableNames));
+  }
+
+  @Override
+  public List<String> getAllTables(String dbName) throws MetaException {
+    return filterTables(dbName, super.getAllTables(dbName));
+  }
+
+  @Override
+  public List<String> listTableNamesByFilter(String dbName, String filter,
+      short maxTables) throws MetaException {
+    return filterTables(dbName,
+        super.listTableNamesByFilter(dbName, filter, maxTables));
+  }
+
+  @Override
+  public List<String> listPartitionNames(String dbName, String tableName,
+      short max_parts) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.listPartitionNames(dbName, tableName, max_parts);
+  }
+
+  @Override
+  public List<String> listPartitionNamesByFilter(String dbName,
+      String tableName, String filter, short max_parts) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.listPartitionNamesByFilter(dbName, tableName, filter,
+        max_parts);
+  }
+
+  @Override
+  public Index getIndex(String dbName, String origTableName, String indexName)
+      throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+    }
+    return super.getIndex(dbName, origTableName, indexName);
+  }
+
+  @Override
+  public List<Index> getIndexes(String dbName, String origTableName, int max)
+      throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+    }
+    return super.getIndexes(dbName, origTableName, max);
+  }
+
+  @Override
+  public List<String> listIndexNames(String dbName, String origTableName,
+      short max) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+    }
+    return super.listIndexNames(dbName, origTableName, max);
+  }
+
+  @Override
+  public List<Partition> getPartitionsByFilter(String dbName,
+      String tblName, String filter, short maxParts) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsByFilter(dbName, tblName, filter, maxParts);
+  }
+
+  @Override
+  public List<Partition> getPartitionsByNames(String dbName, String tblName,
+      List<String> partNames) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsByNames(dbName, tblName, partNames);
+  }
+
+  @Override
+  public Partition getPartitionWithAuth(String dbName, String tblName,
+      List<String> partVals, String user_name, List<String> group_names)
+      throws MetaException, NoSuchObjectException, InvalidObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionWithAuth(dbName, tblName, partVals, user_name,
+        group_names);
+  }
+
+  @Override
+  public List<Partition> getPartitionsWithAuth(String dbName, String tblName,
+      short maxParts, String userName, List<String> groupNames)
+      throws MetaException, InvalidObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsWithAuth(dbName, tblName, maxParts, userName,
+        groupNames);
+  }
+
+  @Override
+  public List<String> listPartitionNamesPs(String dbName, String tblName,
+      List<String> part_vals, short max_parts) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.listPartitionNamesPs(dbName, tblName, part_vals, max_parts);
+  }
+
+  @Override
+  public List<Partition> listPartitionsPsWithAuth(String dbName,
+      String tblName, List<String> part_vals, short max_parts, String userName,
+      List<String> groupNames) throws MetaException, InvalidObjectException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.listPartitionsPsWithAuth(dbName, tblName, part_vals,
+        max_parts, userName, groupNames);
+  }
+
+  @Override
+  public ColumnStatistics getTableColumnStatistics(String dbName,
+      String tableName, List<String> colNames) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getTableColumnStatistics(dbName, tableName, colNames);
+  }
+
+  @Override
+  public List<ColumnStatistics> getPartitionColumnStatistics(
+      String dbName, String tblName, List<String> partNames,
+      List<String> colNames) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionColumnStatistics(dbName, tblName, partNames,
+        colNames);
+  }
+
+  /**
+   * Invoke Hive database filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterDatabases(List<String> dbList)
+      throws MetaException {
+    if (needsAuthorization(getUserName())) {
+      try {
+        return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(),
+            dbList, HiveOperation.SHOWDATABASES, getUserName());
+      } catch (SemanticException e) {
+        throw new MetaException("Error getting DB list " + e.getMessage());
+      }
+    } else {
+      return dbList;
+    }
+  }
+
+  /**
+   * Invoke Hive table filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  protected List<String> filterTables(String dbName, List<String> tabList)
+      throws MetaException {
+    if (needsAuthorization(getUserName())) {
+      try {
+        return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(),
+            tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
+      } catch (SemanticException e) {
+        throw new MetaException("Error getting Table list " + e.getMessage());
+      }
+    } else {
+      return tabList;
+    }
+  }
+
+  /**
+   * load Hive auth provider
+   *
+   * @return
+   * @throws MetaException
+   */
+  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
+    if (hiveAuthzBinding == null) {
+      try {
+        hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore,
+            getHiveConf(), getAuthzConf());
+      } catch (Exception e) {
+        throw new MetaException("Failed to load Hive binding " + e.getMessage());
+      }
+    }
+    return hiveAuthzBinding;
+  }
+
+  private ImmutableSet<String> getServiceUsers() throws MetaException {
+    if (serviceUsers == null) {
+      serviceUsers = ImmutableSet.copyOf(toTrimed(Sets.newHashSet(getAuthzConf().getStrings(
+          AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(), new String[] { "" }))));
+    }
+    return serviceUsers;
+  }
+
+  private HiveConf getHiveConf() {
+    if (hiveConf == null) {
+      hiveConf = new HiveConf(getConf(), this.getClass());
+    }
+    return hiveConf;
+  }
+
+  private HiveAuthzConf getAuthzConf() throws MetaException {
+    if (authzConf == null) {
+      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+      if (hiveAuthzConf == null
+          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+            + "' is invalid.");
+      }
+      try {
+        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+      } catch (MalformedURLException e) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "' "
+            + e.getMessage());
+      }
+    }
+    return authzConf;
+  }
+
+  /**
+   * Extract the user from underlying auth subsystem
+   * @return
+   * @throws MetaException
+   */
+  private String getUserName() throws MetaException {
+    try {
+      return Utils.getUGI().getShortUserName();
+    } catch (LoginException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    } catch (IOException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    }
+  }
+
+  /**
+   * Check if the give user needs to be validated.
+   * @param userName
+   * @return
+   */
+  private boolean needsAuthorization(String userName) throws MetaException {
+    return !getServiceUsers().contains(userName.trim());
+  }
+
+  private static Set<String> toTrimed(Set<String> s) {
+    Set<String> result = Sets.newHashSet();
+    for (String v : s) {
+      result.add(v.trim());
+    }
+    return result;
+  }
+
+  protected String getNoAccessMessageForTable(String dbName, String tableName) {
+    return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">";
+  }
+
+  private String getNoAccessMessageForDB(String dbName) {
+    return NO_ACCESS_MESSAGE_DATABASE + "<" + dbName + ">";
+  }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java
new file mode 100644
index 0000000..d937491
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2.metastore;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.binding.metastore.MetastoreAuthzBinding;
+
+/**
+ * Sentry binding for Hive Metastore. The binding is integrated into Metastore
+ * via the pre-event listener which are fired prior to executing the metadata
+ * action. This point we are only authorizing metadata writes since the listners
+ * are not fired from read events. Each action builds a input and output
+ * hierarchy as per the objects used in the given operations. This is then
+ * passed down to the hive binding which handles the authorization. This ensures
+ * that we follow the same privilege model and policies.
+ */
+public class MetastoreAuthzBindingV2 extends MetastoreAuthzBinding {
+
+  public MetastoreAuthzBindingV2(Configuration config) throws Exception {
+    super(config);
+  }
+
+  protected void authorizeDropPartition(PreDropPartitionEvent context)
+      throws InvalidOperationException, MetaException {
+    authorizeMetastoreAccess(
+        HiveOperation.ALTERTABLE_DROPPARTS,
+        new HierarcyBuilder().addTableToOutput(getAuthServer(),
+            context.getTable().getDbName(),
+            context.getTable().getTableName()).build(),
+        new HierarcyBuilder().addTableToOutput(getAuthServer(),
+            context.getTable().getDbName(),
+            context.getTable().getTableName()).build());
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java
new file mode 100644
index 0000000..a72e745
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2.metastore;
+
+import java.util.Iterator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
+import org.apache.sentry.binding.metastore.SentryMetastorePostEventListener;
+import org.apache.sentry.provider.db.SentryMetastoreListenerPlugin;
+
+public class SentryMetastorePostEventListenerV2 extends SentryMetastorePostEventListener {
+
+  public SentryMetastorePostEventListenerV2(Configuration config) {
+    super(config);
+  }
+
+  @Override
+  public void onAddPartition(AddPartitionEvent partitionEvent)
+      throws MetaException {
+    if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) {
+      Iterator<Partition> it = partitionEvent.getPartitionIterator();
+      while (it.hasNext()) {
+        Partition part = it.next();
+        if ((part.getSd() != null) && (part.getSd().getLocation() != null)) {
+          String authzObj = part.getDbName() + "." + part.getTableName();
+          String path = part.getSd().getLocation();
+          for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+            plugin.addPath(authzObj, path);
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public void onDropPartition(DropPartitionEvent partitionEvent)
+      throws MetaException {
+    if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) {
+      String authzObj = partitionEvent.getTable().getDbName() + "."
+          + partitionEvent.getTable().getTableName();
+      Iterator<Partition> it = partitionEvent.getPartitionIterator();
+      while (it.hasNext()) {
+        Partition part = it.next();
+        if ((part.getSd() != null) && (part.getSd().getLocation() != null)) {
+          String path = part.getSd().getLocation();
+          for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+            plugin.removePath(authzObj, path);
+          }
+        }
+      }
+    }
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java
new file mode 100644
index 0000000..35bd68c
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java
@@ -0,0 +1,362 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.util;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.hooks.Hook;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.SentryOnFailureHook;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.core.common.utils.PathUtils;
+import org.apache.sentry.core.model.db.AccessConstants;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Column;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.core.model.db.Table;
+import org.apache.sentry.provider.db.service.thrift.TSentryGrantOption;
+import org.apache.sentry.provider.db.service.thrift.TSentryPrivilege;
+import org.apache.sentry.provider.db.service.thrift.TSentryRole;
+import org.apache.sentry.service.thrift.ServiceConstants.PrivilegeScope;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Splitter;
+
+public class SentryAuthorizerUtil {
+  public static final Logger LOG = LoggerFactory.getLogger(SentryAuthorizerUtil.class);
+  public static String UNKONWN_GRANTOR = "--";
+
+  /**
+   * Convert string to URI
+   *
+   * @param uri
+   * @param isLocal
+   * @throws SemanticException
+   * @throws URISyntaxException
+   */
+  public static AccessURI parseURI(String uri, boolean isLocal) throws URISyntaxException {
+    HiveConf conf = SessionState.get().getConf();
+    String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
+    return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
+  }
+
+  /**
+   * Convert HivePrivilegeObject to DBModelAuthorizable list Now hive 0.13 don't support column
+   * level
+   *
+   * @param server
+   * @param privilege
+   */
+  public static List<List<DBModelAuthorizable>> getAuthzHierarchy(Server server,
+      HivePrivilegeObject privilege) {
+    List<DBModelAuthorizable> baseHierarchy = new ArrayList<DBModelAuthorizable>();
+    List<List<DBModelAuthorizable>> objectHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+    boolean isLocal = false;
+    if (privilege.getType() != null) {
+      switch (privilege.getType()) {
+        case GLOBAL:
+          baseHierarchy.add(new Server(privilege.getObjectName()));
+          objectHierarchy.add(baseHierarchy);
+          break;
+        case DATABASE:
+          baseHierarchy.add(server);
+          baseHierarchy.add(new Database(privilege.getDbname()));
+          objectHierarchy.add(baseHierarchy);
+          break;
+        case TABLE_OR_VIEW:
+          baseHierarchy.add(server);
+          baseHierarchy.add(new Database(privilege.getDbname()));
+          baseHierarchy.add(new Table(privilege.getObjectName()));
+          if (privilege.getColumns() != null) {
+            for (String columnName : privilege.getColumns()) {
+              List<DBModelAuthorizable> columnHierarchy =
+                  new ArrayList<DBModelAuthorizable>(baseHierarchy);
+              columnHierarchy.add(new Column(columnName));
+              objectHierarchy.add(columnHierarchy);
+            }
+          } else {
+            objectHierarchy.add(baseHierarchy);
+          }
+          break;
+        case LOCAL_URI:
+          isLocal = true;
+        case DFS_URI:
+          if (privilege.getObjectName() == null) {
+            break;
+          }
+          try {
+            baseHierarchy.add(server);
+            baseHierarchy.add(parseURI(privilege.getObjectName(), isLocal));
+            objectHierarchy.add(baseHierarchy);
+          } catch (Exception e) {
+            throw new AuthorizationException("Failed to get File URI", e);
+          }
+          break;
+        case FUNCTION:
+        case PARTITION:
+        case COLUMN:
+        case COMMAND_PARAMS:
+          // not support these type
+          break;
+        default:
+          break;
+      }
+    }
+    return objectHierarchy;
+  }
+
+  /**
+   * Convert HivePrivilegeObject list to List<List<DBModelAuthorizable>>
+   *
+   * @param server
+   * @param privilges
+   */
+  public static List<List<DBModelAuthorizable>> convert2SentryPrivilegeList(Server server,
+      List<HivePrivilegeObject> privilges) {
+    List<List<DBModelAuthorizable>> hierarchyList = new ArrayList<List<DBModelAuthorizable>>();
+    if (privilges != null && !privilges.isEmpty()) {
+      for (HivePrivilegeObject p : privilges) {
+        hierarchyList.addAll(getAuthzHierarchy(server, p));
+      }
+    }
+    return hierarchyList;
+  }
+
+  /**
+   * Convert HiveOperationType to HiveOperation
+   *
+   * @param type
+   */
+  public static HiveOperation convert2HiveOperation(String typeName) {
+    try {
+      return HiveOperation.valueOf(typeName);
+    } catch (Exception e) {
+      return null;
+    }
+  }
+
+  /**
+   * Convert HivePrivilege to Sentry Action
+   *
+   * @param hivePrivilege
+   */
+  public static String convert2SentryAction(HivePrivilege hivePrivilege) {
+    if (PrivilegeType.ALL.name().equals(hivePrivilege.getName())) {
+      return AccessConstants.ALL;
+    } else {
+      return hivePrivilege.getName();
+    }
+  }
+
+  /**
+   * Convert Sentry Action to HivePrivilege
+   *
+   * @param hivePrivilege
+   */
+  public static HivePrivilege convert2HivePrivilege(String action) {
+    return new HivePrivilege(action, null);
+  }
+
+  /**
+   * Convert TSentryRole Set to String List
+   *
+   * @param roleSet
+   */
+  public static List<String> convert2RoleList(Set<TSentryRole> roleSet) {
+    List<String> roles = new ArrayList<String>();
+    if (roleSet != null && !roleSet.isEmpty()) {
+      for (TSentryRole tRole : roleSet) {
+        roles.add(tRole.getRoleName());
+      }
+    }
+    return roles;
+  }
+
+  /**
+   * Convert TSentryPrivilege to HivePrivilegeInfo
+   *
+   * @param tPrivilege
+   * @param principal
+   */
+  public static HivePrivilegeInfo convert2HivePrivilegeInfo(TSentryPrivilege tPrivilege,
+      HivePrincipal principal) {
+    HivePrivilege hivePrivilege = convert2HivePrivilege(tPrivilege.getAction());
+    HivePrivilegeObject hivePrivilegeObject = convert2HivePrivilegeObject(tPrivilege);
+    // now sentry don't show grantor of a privilege
+    HivePrincipal grantor = new HivePrincipal(UNKONWN_GRANTOR, HivePrincipalType.ROLE);
+    boolean grantOption =
+        tPrivilege.getGrantOption().equals(TSentryGrantOption.TRUE) ? true : false;
+    return new HivePrivilegeInfo(principal, hivePrivilege, hivePrivilegeObject, grantor,
+        grantOption, (int) tPrivilege.getCreateTime());
+  }
+
+  /**
+   * Convert TSentryPrivilege to HivePrivilegeObject
+   *
+   * @param tSentryPrivilege
+   */
+  public static HivePrivilegeObject convert2HivePrivilegeObject(TSentryPrivilege tSentryPrivilege) {
+    HivePrivilegeObject privilege = null;
+    switch (PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope())) {
+      case SERVER:
+        privilege = new HivePrivilegeObject(HivePrivilegeObjectType.GLOBAL, "*", null);
+        break;
+      case DATABASE:
+        privilege =
+            new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, tSentryPrivilege.getDbName(),
+                null);
+        break;
+      case TABLE:
+        privilege =
+            new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW,
+                tSentryPrivilege.getDbName(), tSentryPrivilege.getTableName());
+        break;
+      case COLUMN:
+        privilege =
+            new HivePrivilegeObject(HivePrivilegeObjectType.COLUMN, tSentryPrivilege.getDbName(),
+                tSentryPrivilege.getTableName(), null, tSentryPrivilege.getColumnName());
+        break;
+      case URI:
+        String uriString = tSentryPrivilege.getURI();
+        try {
+          uriString = uriString.replace("'", "").replace("\"", "");
+          HivePrivilegeObjectType type =
+              isLocalUri(uriString) ? HivePrivilegeObjectType.LOCAL_URI
+                  : HivePrivilegeObjectType.DFS_URI;
+          privilege = new HivePrivilegeObject(type, uriString, null);
+        } catch (URISyntaxException e1) {
+          throw new RuntimeException(uriString + "is not a URI");
+        }
+      default:
+        LOG.warn("Unknown PrivilegeScope: "
+            + PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope()));
+        break;
+    }
+    return privilege;
+  }
+
+  public static boolean isLocalUri(String uriString) throws URISyntaxException {
+    URI uri = new URI(uriString);
+    if (uri.getScheme().equalsIgnoreCase("file")) {
+      return true;
+    }
+
+    return false;
+  }
+
+  /**
+   * Convert TSentryRole to HiveRoleGrant
+   *
+   * @param role
+   */
+  public static HiveRoleGrant convert2HiveRoleGrant(TSentryRole role) {
+    HiveRoleGrant hiveRoleGrant = new HiveRoleGrant();
+    hiveRoleGrant.setRoleName(role.getRoleName());
+    hiveRoleGrant.setPrincipalName(role.getRoleName());
+    hiveRoleGrant.setPrincipalType(PrincipalType.ROLE.name());
+    hiveRoleGrant.setGrantOption(false);
+    hiveRoleGrant.setGrantor(role.getGrantorPrincipal());
+    hiveRoleGrant.setGrantorType(PrincipalType.USER.name());
+    return hiveRoleGrant;
+  }
+
+  /**
+   * Execute on failure hooks for e2e tests
+   *
+   * @param context
+   * @param conf
+   * @param hiveOp
+   */
+  public static void executeOnFailureHooks(SentryOnFailureHookContext hookCtx, Configuration conf) {
+    String csHooks =
+        conf.get(HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
+
+    try {
+      for (Hook aofh : SentryAuthorizerUtil.getHooks(csHooks)) {
+        ((SentryOnFailureHook) aofh).run(hookCtx);
+      }
+    } catch (Exception ex) {
+      LOG.error("Error executing hook:", ex);
+    }
+  }
+
+  /**
+   * Returns a set of hooks specified in a configuration variable.
+   *
+   * See getHooks(HiveAuthzConf.AuthzConfVars hookConfVar, Class<T> clazz)
+   *
+   * @param hookConfVar
+   * @return
+   * @throws Exception
+   */
+  public static List<Hook> getHooks(String csHooks) throws Exception {
+    return getHooks(csHooks, Hook.class);
+  }
+
+  /**
+   * Returns the hooks specified in a configuration variable. The hooks are returned in a list in
+   * the order they were specified in the configuration variable.
+   *
+   * @param hookConfVar The configuration variable specifying a comma separated list of the hook
+   *        class names.
+   * @param clazz The super type of the hooks.
+   * @return A list of the hooks cast as the type specified in clazz, in the order they are listed
+   *         in the value of hookConfVar
+   * @throws Exception
+   */
+  public static <T extends Hook> List<T> getHooks(String csHooks, Class<T> clazz) throws Exception {
+
+    List<T> hooks = new ArrayList<T>();
+    if (csHooks.isEmpty()) {
+      return hooks;
+    }
+    for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
+      try {
+        @SuppressWarnings("unchecked")
+        T hook = (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance();
+        hooks.add(hook);
+      } catch (ClassNotFoundException e) {
+        LOG.error(hookClass + " Class not found:" + e.getMessage());
+        throw e;
+      }
+    }
+
+    return hooks;
+  }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java
new file mode 100644
index 0000000..b50bbf4
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java
@@ -0,0 +1,369 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.util;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.core.model.db.Table;
+
+/**
+ * Currently hive complier doesn't create read/write entities for some operations, e.g. create
+ * table, drop table. This class is a simple semantic analyzer using regex, it is a workaround
+ * approach to extract db_name and tb_name from those operations.
+ */
+public class SimpleSemanticAnalyzer {
+  private String currentDb;
+  private String currentTb;
+
+  /**
+   * CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name ...
+   */
+  private static final String CREATE_TABLE_REGEX = "^(CREATE)\\s+" + "(TEMPORARY\\s+)?"
+      + "(EXTERNAL\\s+)?" + "TABLE\\s+" + "(IF\\s+NOT\\s+EXISTS\\s+)?" + "([A-Za-z0-9._]+)";
+
+  /**
+   * DROP (DATABASE|SCHEMA) [IF EXISTS] database_name [RESTRICT|CASCADE];
+   */
+  private static final String DROP_DB_REGEX = "^DROP\\s+" + "(DATABASE|SCHEMA)\\s+"
+      + "(IF\\s+EXISTS\\s+)?" + "([A-Za-z0-9_]+)";
+
+  /**
+   * DROP TABLE [IF EXISTS] table_name;
+   */
+  private static final String DROP_TABLE_REGEX = "^DROP\\s+" + "TABLE\\s+" + "(IF\\s+EXISTS\\s+)?"
+      + "([A-Za-z0-9._]+)";
+
+  /**
+   * DROP VIEW [IF EXISTS] view_name;
+   */
+  private static final String DROP_VIEW_REGEX = "^DROP\\s+" + "VIEW\\s+" + "(IF\\s+EXISTS\\s+)?"
+      + "([A-Za-z0-9_].+)";
+
+  /**
+   * DESCRIBE DATABASE|SCHEMA [EXTENDED] db_name;
+   */
+  private static final String DESCRIBE_DB_REGEX = "^DESCRIBE\\s+" + "(DATABASE|SCHEMA)\\s+"
+      + "(EXTENDED\\s+)?" + "([A-Za-z0-9_]+)";
+
+  /**
+   * DESCRIBE [EXTENDED|FORMATTED] [db_name.]table_name[.col_name ( [.field_name] | [.'$elem$'] |
+   * [.'$key$'] | [.'$value$'] )* ];
+   */
+  private static final String DESCRIBE_TABLE_REGEX = "^DESCRIBE\\s+"
+      + "((EXTENDED|FORMATTED)\\s+)?" + "([A-Za-z0-9._]+)";
+
+  /**
+   * SHOW [FORMATTED] (INDEX|INDEXES) ON table_with_index [(FROM|IN) db_name];
+   */
+  private static final String SHOW_INDEX_REGEX = "^SHOW\\s+" + "(FORMATTED\\s+)?"
+      + "(INDEX|INDEXES)\\s+" + "ON\\s+" + "([A-Za-z0-9._]+)\\s*"
+      + "((FROM|IN)\\s+([A-Za-z0-9_]+))?";
+
+  /**
+   * SHOW TBLPROPERTIES tblname;
+   */
+  private static final String SHOW_TBLPROPERTIES_REGEX = "^SHOW\\s+" + "TBLPROPERTIES\\s+"
+      + "([A-Za-z0-9._]+)";
+
+  /**
+   * ALTER TABLE table_name ...
+   */
+  private static final String ALTER_TABLE_REGEX = "^ALTER\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
+
+  /**
+   * ALTER VIEW view_name ...
+   */
+  private static final String ALTER_VIEW_REGEX = "^ALTER\\s+" + "VIEW\\s+" + "([A-Za-z0-9._]+)";
+
+  /**
+   * MSCK REPAIR TABLE table_name;
+   */
+  private static final String MSCK_REGEX = "^MSCK\\s+" + "REPAIR\\s" + "TABLE\\s"
+      + "([A-Za-z0-9._]+)";
+
+  /**
+   * ALTER INDEX index_name ON table_name [PARTITION partition_spec] REBUILD;
+   */
+  private static final String ALTER_INDEX_REGEX = "^ALTER\\s+" + "INDEX\\s+"
+      + "([A-Za-z0-9_]+)\\s+" + "ON\\s" + "([A-Za-z0-9._]+)";
+
+  /**
+   * CREATE FUNCTION [db_name.]function_name AS class_name [USING JAR|FILE|ARCHIVE 'file_uri' [,
+   * JAR|FILE|ARCHIVE 'file_uri'] ];
+   */
+  private static final String CREATE_FUNCTION_REGEX = "^CREATE\\s+" + "(TEMPORARY\\s+)?"
+      + "FUNCTION\\s+" + "([A-Za-z0-9._]+)\\s+" + "AS\\s" + "([A-Za-z0-9._']+)";
+
+  /**
+   * SHOW COLUMNS FROM table_name
+   */
+  private static final String SHOWCOLUMNS = "^SHOW\\s+" + "COLUMNS\\s+" + "(FROM|IN)\\s+"
+      + "([A-Za-z0-9._]+)";
+
+  private static final String SHOW_TABLESTATUS = "^SHOW\\s+" + "TABLE\\s+" + "EXTENDED\\s+" + "IN\\s+"
+      + "([A-Za-z0-9._]+)";
+
+  private static final String LOAD = "^LOAD\\s+" + "DATA\\s+" + "(LOCAL\\s+)?" + "INPATH\\s+"
+      + "([A-Za-z0-9._':///-]+)" +"\\s" + "INTO\\s" + "TABLE\\s" + "([A-Za-z0-9._]+)";
+
+  /**
+   * LOCK DATABASE dbname;
+   */
+  private static final String LOCKDB = "^LOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)";
+
+  /**
+   * UNLOCK DATABASE dbname;
+   */
+  private static final String UNLOCKDB = "^UNLOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)";
+
+  /**
+   * LOCK TABLE tblname;
+   */
+  private static final String LOCKTABLE = "^LOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
+
+  /**
+   * UNLOCK TABLE tblname;
+   */
+  private static final String UNLOCKTABLE = "^UNLOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
+
+  private static Map<HiveOperation, String> OP_REGEX_MAP = new HashMap<HiveOperation, String>();
+  static {
+    // database metadata
+    OP_REGEX_MAP.put(HiveOperation.DROPDATABASE, DROP_DB_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.DESCDATABASE, DESCRIBE_DB_REGEX);
+
+    // table metadata
+    OP_REGEX_MAP.put(HiveOperation.CREATETABLE, CREATE_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.DROPTABLE, DROP_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.DROPVIEW, DROP_VIEW_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.DESCTABLE, DESCRIBE_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.SHOW_TBLPROPERTIES, SHOW_TBLPROPERTIES_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_PROPERTIES, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERDEPROPERTIES, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_CLUSTER_SORT, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_FILEFORMAT, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_TOUCH, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_PROTECTMODE, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMECOL, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDCOLS, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_REPLACECOLS, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMEPART, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ARCHIVE, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_UNARCHIVE, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERIALIZER, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_MERGEFILES, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SKEWED, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_DROPPARTS, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDPARTS, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAME, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_LOCATION, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_FILEFORMAT, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_PROTECTMODE, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERDEPROPERTIES, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERIALIZER, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_MERGEFILES, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_LOCATION, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERTBLPART_SKEWED_LOCATION, ALTER_TABLE_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERVIEW_PROPERTIES, ALTER_VIEW_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.MSCK, MSCK_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_REBUILD, ALTER_INDEX_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_PROPS, ALTER_INDEX_REGEX);
+    OP_REGEX_MAP.put(HiveOperation.LOCKDB, LOCKDB);
+    OP_REGEX_MAP.put(HiveOperation.UNLOCKDB, UNLOCKDB);
+    OP_REGEX_MAP.put(HiveOperation.LOCKTABLE, LOCKTABLE);
+    OP_REGEX_MAP.put(HiveOperation.UNLOCKTABLE, UNLOCKTABLE);
+    OP_REGEX_MAP.put(HiveOperation.SHOWCOLUMNS, SHOWCOLUMNS);
+    OP_REGEX_MAP.put(HiveOperation.SHOW_TABLESTATUS, SHOW_TABLESTATUS);
+  }
+
+  public SimpleSemanticAnalyzer(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException {
+    currentDb = SessionState.get().getCurrentDatabase();
+    parse(hiveOp, cmd);
+  }
+
+  private void parse(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException {
+    switch (hiveOp) {
+      case DROPDATABASE:
+      case DESCDATABASE:
+      case LOCKDB:
+      case UNLOCKDB:
+        parseDbMeta(cmd, OP_REGEX_MAP.get(hiveOp));
+        break;
+      case DESCTABLE:
+      case CREATETABLE:
+      case DROPTABLE:
+      case DROPVIEW:
+      case SHOW_TBLPROPERTIES:
+        // alter table
+      case ALTERTABLE_PROPERTIES:
+      case ALTERTABLE_SERDEPROPERTIES:
+      case ALTERTABLE_CLUSTER_SORT:
+      case ALTERTABLE_FILEFORMAT:
+      case ALTERTABLE_TOUCH:
+      case ALTERTABLE_PROTECTMODE:
+      case ALTERTABLE_RENAMECOL:
+      case ALTERTABLE_ADDCOLS:
+      case ALTERTABLE_REPLACECOLS:
+      case ALTERTABLE_RENAMEPART:
+      case ALTERTABLE_ARCHIVE:
+      case ALTERTABLE_UNARCHIVE:
+      case ALTERTABLE_SERIALIZER:
+      case ALTERTABLE_MERGEFILES:
+      case ALTERTABLE_SKEWED:
+      case ALTERTABLE_DROPPARTS:
+      case ALTERTABLE_ADDPARTS:
+      case ALTERTABLE_RENAME:
+      case ALTERTABLE_LOCATION:
+        // alter view
+      case ALTERVIEW_PROPERTIES:
+        // alter partition
+      case ALTERPARTITION_FILEFORMAT:
+      case ALTERPARTITION_PROTECTMODE:
+      case ALTERPARTITION_SERDEPROPERTIES:
+      case ALTERPARTITION_SERIALIZER:
+      case ALTERPARTITION_MERGEFILES:
+      case ALTERPARTITION_LOCATION:
+      case ALTERTBLPART_SKEWED_LOCATION:
+        // MSCK
+      case MSCK:
+        // alter index
+      case ALTERINDEX_REBUILD:
+      case ALTERINDEX_PROPS:
+      case LOCKTABLE:
+      case UNLOCKTABLE:
+      case SHOWCOLUMNS:
+        parseTableMeta(cmd, OP_REGEX_MAP.get(hiveOp));
+        break;
+      case SHOWINDEXES:
+        parseShowIndex(cmd, SHOW_INDEX_REGEX);
+        break;
+      case CREATEFUNCTION:
+        parseFunction(cmd, CREATE_FUNCTION_REGEX);
+        break;
+      case SHOW_TABLESTATUS:
+        parseTableExtend(cmd, SHOW_TABLESTATUS);
+        break;
+      case LOAD:
+        parseLoadTable(cmd, LOAD);
+        break;
+      default:
+        break;
+    }
+  }
+
+  private void parseLoadTable(String cmd, String load) throws HiveAuthzPluginException {
+    Pattern pattern = Pattern.compile(load, Pattern.CASE_INSENSITIVE);
+    Matcher matcher = pattern.matcher(cmd);
+    if (matcher.find()) {
+      String tbName = matcher.group(matcher.groupCount());
+      extractDbAndTb(tbName.trim());
+    } else {
+      throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar");
+    }
+  }
+
+  private void parseTableExtend(String cmd, String showTablestatus) throws HiveAuthzPluginException {
+    Pattern pattern = Pattern.compile(showTablestatus, Pattern.CASE_INSENSITIVE);
+    Matcher matcher = pattern.matcher(cmd);
+    if (matcher.find()) {
+      String dbName = matcher.group(matcher.groupCount());
+      currentDb = dbName;
+      currentTb = Table.SOME.getName();
+    } else {
+      throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar");
+    }
+  }
+
+  private void extractDbAndTb(String tableName) {
+    if (tableName.contains(".")) {
+      String[] tb = tableName.split("\\.");
+      currentDb = tb[0];
+      currentTb = tb[1];
+    } else {
+      currentDb = SessionState.get().getCurrentDatabase();
+      currentTb = tableName;
+    }
+  }
+
+  private void parseDbMeta(String cmd, String regex) throws HiveAuthzPluginException {
+    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+    Matcher matcher = pattern.matcher(cmd);
+    if (matcher.find()) {
+      currentDb = matcher.group(matcher.groupCount());
+    } else {
+      throw new HiveAuthzPluginException("this command " + cmd
+          + " is not match database meta grammar");
+    }
+  }
+
+  private void parseTableMeta(String cmd, String regex) throws HiveAuthzPluginException {
+    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+    Matcher matcher = pattern.matcher(cmd);
+    if (matcher.find()) {
+      String tbName = matcher.group(matcher.groupCount());
+      extractDbAndTb(tbName.trim());
+    } else {
+      throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar");
+    }
+  }
+
+  private void parseShowIndex(String cmd, String regex) throws HiveAuthzPluginException {
+    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+    Matcher matcher = pattern.matcher(cmd);
+    if (matcher.find()) {
+      String dbName = matcher.group(matcher.groupCount());
+      String tbName = matcher.group(3);
+      if (dbName != null) {
+        currentDb = dbName;
+        currentTb = tbName;
+      } else {
+        extractDbAndTb(tbName);
+      }
+    } else {
+      throw new HiveAuthzPluginException("this command " + cmd + " is not match show index grammar");
+    }
+  }
+
+  private void parseFunction(String cmd, String regex) throws HiveAuthzPluginException {
+    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+    Matcher matcher = pattern.matcher(cmd);
+    if (matcher.find()) {
+      String udfClass = matcher.group(matcher.groupCount());
+      if (udfClass.contains("'")) {
+        currentTb = udfClass.split("'")[1];
+      } else {
+        currentTb = udfClass;
+      }
+    } else {
+      throw new HiveAuthzPluginException("this command " + cmd
+          + " is not match create function grammar");
+    }
+  }
+
+  public String getCurrentDb() {
+    return currentDb;
+  }
+
+  public String getCurrentTb() {
+    return currentTb;
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java b/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java
new file mode 100644
index 0000000..9335c37
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+public class DummyHiveAuthenticationProvider implements HiveAuthenticationProvider {
+
+  private String userName;
+  private Configuration conf;
+
+  @Override
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+
+  @Override
+  public Configuration getConf() {
+    return conf;
+  }
+
+  @Override
+  public String getUserName() {
+    return userName;
+  }
+
+  @Override
+  public List<String> getGroupNames() {
+    return null;
+  }
+
+  @Override
+  public void destroy() throws HiveException {
+
+  }
+
+  @Override
+  public void setSessionState(SessionState ss) {
+
+  }
+
+  public void setUserName(String user) {
+    this.userName = user;
+  }
+
+}
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
index 18cdde2..8929357 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
@@ -47,4 +47,8 @@
     this.isServer = isServer;
   }
 
+  public boolean isSentryPrivObjectDesc() {
+    return isServer || isUri;
+  }
+
 }
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
index d9bb42d..2e0f299 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
@@ -379,8 +379,9 @@
   // verify senty session hook is set
   private boolean isSentryEnabledOnHiveServer(Statement stmt)
       throws SQLException {
-    return HiveAuthzBindingSessionHook.class.getName().equalsIgnoreCase(
-        readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname));
+    String bindingString = readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname).toUpperCase();
+    return bindingString.contains("org.apache.sentry.binding.hive".toUpperCase())
+        && bindingString.contains("HiveAuthzBindingSessionHook".toUpperCase());
   }
 
   // read a config value using 'set' statement
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
index 5a0c950..9938373 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
@@ -303,7 +303,7 @@
    * @return
    * @throws MetaException
    */
-  private List<String> filterTables(String dbName, List<String> tabList)
+  protected List<String> filterTables(String dbName, List<String> tabList)
       throws MetaException {
     if (needsAuthorization(getUserName())) {
       try {
@@ -403,7 +403,7 @@
     return result;
   }
 
-  private String getNoAccessMessageForTable(String dbName, String tableName) {
+  protected String getNoAccessMessageForTable(String dbName, String tableName) {
     return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">";
   }
 
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
index 5375f6a..f6b9c7a 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
@@ -79,7 +79,7 @@
   /**
    * Build the set of object hierarchies ie fully qualified db model objects
    */
-  private static class HierarcyBuilder {
+  protected static class HierarcyBuilder {
     private List<List<DBModelAuthorizable>> authHierarchy;
 
     public HierarcyBuilder() {
@@ -337,7 +337,7 @@
     }
   }
 
-  private void authorizeDropPartition(PreDropPartitionEvent context)
+  protected void authorizeDropPartition(PreDropPartitionEvent context)
       throws InvalidOperationException, MetaException {
     authorizeMetastoreAccess(
         HiveOperation.ALTERTABLE_DROPPARTS,
@@ -392,7 +392,7 @@
    * @param outputHierarchy
    * @throws InvalidOperationException
    */
-  private void authorizeMetastoreAccess(HiveOperation hiveOp,
+  protected void authorizeMetastoreAccess(HiveOperation hiveOp,
       List<List<DBModelAuthorizable>> inputHierarchy,
       List<List<DBModelAuthorizable>> outputHierarchy)
       throws InvalidOperationException {
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
index e8f21e5..9f33f3d 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
@@ -111,7 +111,7 @@
   @Override
   public List<String> filterIndexNames(String dbName, String tblName,
       List<String> indexList) {
-    return null;
+    return indexList;
   }
 
   @Override
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
index 4924669..ecdfe1f 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
@@ -56,7 +56,7 @@
   private final HiveAuthzConf authzConf;
   private final Server server;
 
-  private List<SentryMetastoreListenerPlugin> sentryPlugins = new ArrayList<SentryMetastoreListenerPlugin>();
+  protected List<SentryMetastoreListenerPlugin> sentryPlugins = new ArrayList<SentryMetastoreListenerPlugin>();
 
   public SentryMetastorePostEventListener(Configuration config) {
     super(config);
diff --git a/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java b/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java
index 89aabfc..305fd1f 100644
--- a/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java
+++ b/sentry-core/sentry-core-model-db/src/main/java/org/apache/sentry/core/model/db/Column.java
@@ -23,6 +23,8 @@
    */
   public static final Column ALL = new Column(AccessConstants.ALL);
 
+  public static final Column SOME = new Column(AccessConstants.SOME);
+
   private final String name;
 
   public Column(String name) {