DRILL-8531: Update Various Libraries due to CVEs (#3016)

diff --git a/common/src/main/java/org/apache/drill/common/KerberosUtil.java b/common/src/main/java/org/apache/drill/common/KerberosUtil.java
index 44403e9..cb7d511 100644
--- a/common/src/main/java/org/apache/drill/common/KerberosUtil.java
+++ b/common/src/main/java/org/apache/drill/common/KerberosUtil.java
@@ -17,6 +17,9 @@
  */
 package org.apache.drill.common;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 
@@ -24,7 +27,7 @@
 import static com.google.common.base.Preconditions.checkState;
 
 public final class KerberosUtil {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(KerberosUtil.class);
+  private static final Logger logger = LoggerFactory.getLogger(KerberosUtil.class);
 
   // Per this link http://docs.oracle.com/javase/jndi/tutorial/ldap/security/gssapi.html
   // "... GSS-API SASL mechanism was retrofitted to mean only Kerberos v5 ..."
diff --git a/common/src/main/java/org/apache/drill/common/exceptions/UserExceptionUtils.java b/common/src/main/java/org/apache/drill/common/exceptions/UserExceptionUtils.java
index 1d9fbff..15b6ec5 100644
--- a/common/src/main/java/org/apache/drill/common/exceptions/UserExceptionUtils.java
+++ b/common/src/main/java/org/apache/drill/common/exceptions/UserExceptionUtils.java
@@ -33,7 +33,8 @@
     return String.format("[Hint: %s]", text);
   }
   public static String getUserHint(final Throwable ex) {
-    if (ex.getMessage().startsWith("Error getting user info for current user")) {
+    final String message = ex.getMessage();
+    if (message != null && message.startsWith("Error getting user info for current user")) {
       //User does not exist hint
       return decorateHint(USER_DOES_NOT_EXIST);
     } else {
diff --git a/contrib/storage-hive/hive-exec-shade/pom.xml b/contrib/storage-hive/hive-exec-shade/pom.xml
index aa6032b..3ab44d1 100644
--- a/contrib/storage-hive/hive-exec-shade/pom.xml
+++ b/contrib/storage-hive/hive-exec-shade/pom.xml
@@ -32,7 +32,7 @@
   <name>Drill : Contrib : Storage : Hive : Exec Shaded</name>
 
   <properties>
-    <hive.parquet.version>1.15.1</hive.parquet.version>
+    <hive.parquet.version>1.15.2</hive.parquet.version>
   </properties>
 
   <dependencyManagement>
diff --git a/contrib/storage-phoenix/pom.xml b/contrib/storage-phoenix/pom.xml
index aad0fac..f483dc7 100644
--- a/contrib/storage-phoenix/pom.xml
+++ b/contrib/storage-phoenix/pom.xml
@@ -1,37 +1,40 @@
 <?xml version="1.0"?>
 <!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
 
     http://www.apache.org/licenses/LICENSE-2.0
 
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
-
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
+
   <parent>
     <groupId>org.apache.drill.contrib</groupId>
     <artifactId>drill-contrib-parent</artifactId>
     <version>1.23.0-SNAPSHOT</version>
   </parent>
+
   <artifactId>drill-storage-phoenix</artifactId>
   <name>Drill : Contrib : Storage : Phoenix</name>
 
   <properties>
-    <phoenix.version>5.1.3</phoenix.version>
-    <!-- Limit the HBase minicluster version to 2.4.x to avoid a dependency conflict. -->
-    <hbase.minicluster.version>2.4.17</hbase.minicluster.version>
+    <phoenix.version>5.2.1</phoenix.version>
+    <hbase.version>2.6.3</hbase.version>
+    <hbase.shaded.version>4.1.5</hbase.shaded.version>
     <skipTests>false</skipTests>
   </properties>
 
@@ -48,21 +51,26 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+
     <dependency>
       <groupId>org.apache.drill</groupId>
       <artifactId>drill-common</artifactId>
-      <classifier>tests</classifier>
       <version>${project.version}</version>
+      <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
+
+    <!-- Phoenix -->
     <dependency>
       <groupId>org.apache.phoenix</groupId>
       <artifactId>phoenix-core</artifactId>
       <version>${phoenix.version}</version>
-      <scope>test</scope>
-      <classifier>tests</classifier>
       <exclusions>
         <exclusion>
+          <groupId>com.google.protobuf</groupId>
+          <artifactId>protobuf-java</artifactId>
+        </exclusion>
+        <exclusion>
           <groupId>org.slf4j</groupId>
           <artifactId>*</artifactId>
         </exclusion>
@@ -70,209 +78,94 @@
           <groupId>log4j</groupId>
           <artifactId>log4j</artifactId>
         </exclusion>
-        <exclusion>
-          <artifactId>commons-logging</artifactId>
-          <groupId>commons-logging</groupId>
-        </exclusion>
-        <exclusion>
-          <groupId>javax.servlet</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.ow2.asm</groupId>
-          <artifactId>asm</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.ow2.asm</groupId>
-          <artifactId>asm-all</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>commons-configuration</groupId>
-          <artifactId>commons-configuration</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.commons</groupId>
-          <artifactId>commons-csv</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-endpoint</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.salesforce.i18n</groupId>
-          <artifactId>i18n-util</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>jline</groupId>
-          <artifactId>jline</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.codahale.metrics</groupId>
-          <artifactId>metrics-core</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.codahale.metrics</groupId>
-          <artifactId>metrics-graphite</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.jboss.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-hbase-compat-2.4.1</artifactId>
-      <version>${phoenix.version}</version>
-    </dependency>
+
     <dependency>
       <groupId>org.apache.phoenix</groupId>
       <artifactId>phoenix-core</artifactId>
       <version>${phoenix.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
       <exclusions>
         <exclusion>
-          <groupId>org.slf4j</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>log4j</groupId>
-          <artifactId>log4j</artifactId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-logging</artifactId>
-          <groupId>commons-logging</groupId>
-        </exclusion>
-        <exclusion>
-          <groupId>javax.servlet</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.ow2.asm</groupId>
-          <artifactId>asm</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.ow2.asm</groupId>
-          <artifactId>asm-all</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>commons-configuration</groupId>
-          <artifactId>commons-configuration</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-testing-util</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.htrace</groupId>
-          <artifactId>htrace-core</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.commons</groupId>
-          <artifactId>commons-csv</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-endpoint</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>jline</groupId>
-          <artifactId>jline</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.salesforce.i18n</groupId>
-          <artifactId>i18n-util</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.codahale.metrics</groupId>
-          <artifactId>metrics-core</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.codahale.metrics</groupId>
-          <artifactId>metrics-graphite</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.jboss.netty</groupId>
-          <artifactId>netty</artifactId>
+          <groupId>com.google.protobuf</groupId>
+          <artifactId>protobuf-java</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
+
+    <!-- Phoenix/HBase compat -->
     <dependency>
       <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-hbase-compat-2.4.0</artifactId>
+      <artifactId>phoenix-hbase-compat-2.6.0</artifactId>
       <version>${phoenix.version}</version>
       <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.kerby</groupId>
-      <artifactId>kerb-core</artifactId>
-      <version>${kerby.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-it</artifactId>
-      <version>${hbase.minicluster.version}</version>
-      <classifier>tests</classifier>
-      <scope>test</scope>
       <exclusions>
         <exclusion>
-          <groupId>org.apache.hbase</groupId>
-          <artifactId>hbase-endpoint</artifactId>
+          <groupId>com.google.protobuf</groupId>
+          <artifactId>protobuf-java</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
+
+    <!-- HBase core (2.6.3) -->
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-      <version>${hbase.minicluster.version}</version>
-      <classifier>tests</classifier>
+      <artifactId>hbase-testing-util</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minikdc</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-server-tests</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-client</artifactId>
-      <version>${hbase.minicluster.version}</version>
+      <version>${hbase.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-server</artifactId>
-      <version>${hbase.minicluster.version}</version>
-      <scope>test</scope>
+      <artifactId>hbase-common</artifactId>
+      <version>${hbase.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-protocol-shaded</artifactId>
-      <version>${hbase.minicluster.version}</version>
-      <scope>test</scope>
+      <artifactId>hbase-zookeeper</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+
+    <!-- Zookeeper -->
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-hadoop-compat</artifactId>
-      <version>${hbase.minicluster.version}</version>
-      <scope>test</scope>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper-jute</artifactId>
+      <version>${zookeeper.version}</version>
     </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-asyncfs</artifactId>
-      <classifier>tests</classifier>
-      <version>${hbase.minicluster.version}</version>
-      <scope>test</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>commons-logging</groupId>
-          <artifactId>commons-logging</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>log4j</groupId>
-          <artifactId>log4j</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.slf4j</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
+    <!-- Needed by secured tests: provides org.apache.hadoop.minikdc.MiniKdc -->
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs-client</artifactId>
@@ -284,13 +177,14 @@
           </exclusion>
       </exclusions>
       <scope>test</scope>
-
     </dependency>
+    <!-- Needed by secured tests: provides org.apache.hadoop.minikdc.MiniKdc -->
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-minikdc</artifactId>
       <version>${hadoop.version}</version>
       <scope>test</scope>
+      <!-- keep logging slim to avoid conflicts -->
       <exclusions>
         <exclusion>
           <groupId>commons-logging</groupId>
@@ -300,102 +194,123 @@
           <groupId>log4j</groupId>
           <artifactId>log4j</artifactId>
         </exclusion>
+        <!-- Exclude old Kerby versions to avoid conflicts -->
         <exclusion>
-          <groupId>org.slf4j</groupId>
-          <artifactId>*</artifactId>
+          <groupId>org.apache.kerby</groupId>
+          <artifactId>kerb-simplekdc</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.kerby</groupId>
+          <artifactId>kerb-client</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.kerby</groupId>
+          <artifactId>kerb-common</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.kerby</groupId>
+          <artifactId>kerb-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.kerby</groupId>
+          <artifactId>kerb-crypto</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.kerby</groupId>
+          <artifactId>kerb-util</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
+
+    <!-- Explicitly add compatible Kerby dependencies for Hadoop 3.4.1 -->
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-simplekdc</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-testing-util</artifactId>
-      <version>${hbase.minicluster.version}</version>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-client</artifactId>
+      <version>${kerby.version}</version>
       <scope>test</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>commons-logging</groupId>
-          <artifactId>commons-logging</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>log4j</groupId>
-          <artifactId>log4j</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.slf4j</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>javax.servlet</groupId>
-          <artifactId>servlet-api</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.zaxxer</groupId>
-          <artifactId>HikariCP-java7</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.commons</groupId>
-          <artifactId>commons-csv</artifactId>
-        </exclusion>
-      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-common</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-core</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-crypto</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-util</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerby-config</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerby-asn1</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerby-pkix</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerby-util</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.bouncycastle</groupId>
+      <artifactId>bcprov-jdk18on</artifactId>
+      <version>1.78.1</version>
+      <scope>test</scope>
     </dependency>
   </dependencies>
+
   <build>
     <plugins>
       <plugin>
-        <artifactId>maven-resources-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-java-sources</id>
-            <phase>process-sources</phase>
-            <goals>
-              <goal>copy-resources</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>${basedir}/target/classes/org/apache/drill/exec/store/phoenix</outputDirectory>
-              <resources>
-                <resource>
-                  <directory>src/main/java/org/apache/drill/exec/store/phoenix</directory>
-                  <filtering>true</filtering>
-                </resource>
-              </resources>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
           <skipTests>${skipTests}</skipTests>
-          <forkCount combine.self="override">1</forkCount>
+          <forkCount>1</forkCount>
           <reuseForks>false</reuseForks>
-          <includes>
-            <include>**/PhoenixTestSuite.class</include>
-            <include>**/SecuredPhoenixTestSuite.class</include>
-          </includes>
-          <excludes>
-            <exclude>**/*Test.java</exclude>
-          </excludes>
-          <argLine>-Xms2048m -Xmx2048m</argLine>
+          <argLine>
+            -Xms2048m -Xmx2048m
+            --add-opens=java.base/java.lang=ALL-UNNAMED
+            --add-opens=java.base/java.util=ALL-UNNAMED
+            -Djava.net.preferIPv4Stack=true
+            -Dsun.security.krb5.debug=true
+            -Dsun.security.krb5.allowUdp=false
+          </argLine>
         </configuration>
       </plugin>
     </plugins>
   </build>
-  <profiles>
-    <profile>
-      <!-- Disable unit tests for JDK 14+ until Phoenix 5.2.0+ is released.
-      See PHOENIX-6723 for details.-->
-      <id>jdk14+</id>
-      <activation>
-        <jdk>[14,)</jdk>
-      </activation>
-      <properties>
-        <skipTests>true</skipTests>
-      </properties>
-    </profile>
-  </profiles>
 </project>
diff --git a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java
index daf4e48..de0b851 100644
--- a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java
+++ b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/PhoenixStoragePlugin.java
@@ -43,12 +43,12 @@
 import org.apache.drill.exec.store.phoenix.rules.PhoenixConvention;
 
 import com.fasterxml.jackson.core.type.TypeReference;
-import org.apache.drill.exec.util.ImpersonationUtil;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
+import com.google.common.collect.ImmutableSet;
+import org.apache.drill.exec.util.ImpersonationUtil;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.tephra.shaded.com.google.common.collect.ImmutableSet;
 
 public class PhoenixStoragePlugin extends AbstractStoragePlugin {
 
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java
index 53b7adc..e04f39a 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBaseTest.java
@@ -17,7 +17,18 @@
  */
 package org.apache.drill.exec.store.phoenix;
 
-import static org.junit.Assert.assertFalse;
+import com.google.common.collect.Maps;
+import com.univocity.parsers.csv.CsvParser;
+import com.univocity.parsers.csv.CsvParserSettings;
+import org.apache.drill.exec.store.StoragePluginRegistry;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterFixtureBuilder;
+import org.apache.drill.test.ClusterTest;
+import org.apache.hadoop.fs.Path;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.FileInputStream;
 import java.io.InputStreamReader;
@@ -37,22 +48,11 @@
 import java.util.UUID;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.drill.exec.store.StoragePluginRegistry;
-import com.google.common.collect.Maps;
-import org.apache.drill.test.ClusterFixture;
-import org.apache.drill.test.ClusterFixtureBuilder;
-import org.apache.drill.test.ClusterTest;
-import org.apache.hadoop.fs.Path;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.slf4j.LoggerFactory;
-
-import com.univocity.parsers.csv.CsvParser;
-import com.univocity.parsers.csv.CsvParserSettings;
+import static org.junit.Assert.assertFalse;
 
 public class PhoenixBaseTest extends ClusterTest {
 
-  private static final org.slf4j.Logger logger = LoggerFactory.getLogger(PhoenixBaseTest.class);
+  private static final Logger logger = LoggerFactory.getLogger(PhoenixBaseTest.class);
 
   public final static String U_U_I_D = UUID.randomUUID().toString();
   private final static AtomicInteger initCount = new AtomicInteger(0);
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBasicsIT.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBasicsIT.java
index bb61e01..012c5c6 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBasicsIT.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixBasicsIT.java
@@ -7,80 +7,92 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
 package org.apache.drill.exec.store.phoenix;
 
-import static org.apache.hadoop.hbase.HConstants.HBASE_DIR;
-import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_CAT;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
-import java.util.Optional;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.LocalHBaseCluster;
-import org.apache.phoenix.util.PhoenixRuntime;
-import org.slf4j.LoggerFactory;
+import static org.apache.hadoop.hbase.HConstants.HBASE_DIR;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
-/**
- * This is a copy of {@code org.apache.phoenix.end2end.QueryServerBasicsIT} until
- * <a href="https://issues.apache.org/jira/browse/PHOENIX-6613">PHOENIX-6613</a> is fixed
- */
 public class PhoenixBasicsIT {
   private static final HBaseTestingUtility util = new HBaseTestingUtility();
-
-  private static final org.slf4j.Logger logger = LoggerFactory.getLogger(PhoenixBasicsIT.class);
+  private static final Logger logger = LoggerFactory.getLogger(PhoenixBasicsIT.class);
 
   protected static String CONN_STRING;
-  static LocalHBaseCluster hbaseCluster;
 
   public static synchronized void doSetup() throws Exception {
     Configuration conf = util.getConfiguration();
-    // Start ZK by hand
-    util.startMiniZKCluster();
+
+    // Keep it embedded & filesystem-only (no HDFS)
+    conf.set("hbase.cluster.distributed", "false");
+    conf.setBoolean("hbase.unsafe.stream.capability.enforce", false);
+    conf.setInt("hbase.master.wait.on.regionservers.mintostart", 1);
+
+    // Randomize service ports, disable HTTP/Jetty info servers to avoid Netty/servlet deps
+    conf.setInt("hbase.master.port", 0);
+    conf.setInt("hbase.master.info.port", -1);
+    conf.setInt("hbase.regionserver.port", 0);
+    conf.setInt("hbase.regionserver.info.port", -1);
+    conf.unset("hbase.http.filter.initializers"); // make sure no web filters get bootstrapped
+
+    // Force loopback to dodge IPv6/hostname hiccups
+    conf.set("hbase.zookeeper.quorum", "127.0.0.1");
+    conf.set("hbase.master.hostname", "127.0.0.1");
+    conf.set("hbase.regionserver.hostname", "127.0.0.1");
+
+    // Root dir on local FS (file:///), so HTU won't start MiniDFS
     Path rootdir = util.getDataTestDirOnTestFS(PhoenixBasicsIT.class.getSimpleName());
-    // There is no setRootdir method that is available in all supported HBase versions.
-    conf.set(HBASE_DIR, rootdir.toString());
-    hbaseCluster = new LocalHBaseCluster(conf, 1);
-    hbaseCluster.startup();
+    conf.set(HBASE_DIR, rootdir.toUri().toString()); // keep URI form
 
-    CONN_STRING = PhoenixRuntime.JDBC_PROTOCOL + ":localhost:" + getZookeeperPort();
-    logger.info("JDBC connection string is " + CONN_STRING);
-  }
+    // Start ZK + 1 Master + 1 RegionServer WITHOUT HDFS
+    util.startMiniZKCluster();
+    util.startMiniHBaseCluster(1, 1);
 
-  public static int getZookeeperPort() {
-    return util.getConfiguration().getInt(HConstants.ZOOKEEPER_CLIENT_PORT, 2181);
+    int zkPort = util.getZkCluster().getClientPort();
+    CONN_STRING = PhoenixRuntime.JDBC_PROTOCOL + ":localhost:" + zkPort;
+    logger.info("JDBC connection string is {}", CONN_STRING);
   }
 
   public static void testCatalogs() throws Exception {
-    try (final Connection connection = DriverManager.getConnection(CONN_STRING)) {
+    try (Connection connection = DriverManager.getConnection(CONN_STRING)) {
       assertFalse(connection.isClosed());
-      try (final ResultSet resultSet = connection.getMetaData().getCatalogs()) {
-        final ResultSetMetaData metaData = resultSet.getMetaData();
-        assertFalse("unexpected populated resultSet", resultSet.next());
-        assertEquals(1, metaData.getColumnCount());
-        assertEquals(TABLE_CAT, metaData.getColumnName(1));
+      try (ResultSet rs = connection.getMetaData().getCatalogs()) {
+        ResultSetMetaData md = rs.getMetaData();
+        String col = md.getColumnLabel(1);  // label is safer than name
+        if (!"TABLE_CAT".equals(col) && !"TENANT_ID".equals(col)) {
+          // fall back to name just in case some drivers differ
+          col = md.getColumnName(1);
+        }
+        assertTrue("Unexpected first column: " + col,
+            "TABLE_CAT".equals(col) || "TENANT_ID".equals(col));
       }
     }
   }
 
   public static synchronized void afterClass() throws IOException {
-    Optional.of(hbaseCluster).ifPresent(LocalHBaseCluster::shutdown);
-    util.shutdownMiniCluster();
+    util.shutdownMiniHBaseCluster();  // stops RS & Master
+    util.shutdownMiniZKCluster();     // stops ZK
   }
 }
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java
index 0e43090..3be298e 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixTestSuite.java
@@ -17,9 +17,6 @@
  */
 package org.apache.drill.exec.store.phoenix;
 
-import java.util.TimeZone;
-import java.util.concurrent.atomic.AtomicInteger;
-
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.test.BaseTest;
 import org.junit.AfterClass;
@@ -28,8 +25,12 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.Suite;
 import org.junit.runners.Suite.SuiteClasses;
+import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.TimeZone;
+import java.util.concurrent.atomic.AtomicInteger;
+
 
 @RunWith(Suite.class)
 @SuiteClasses ({
@@ -40,7 +41,7 @@
 @Category({ SlowTest.class })
 public class PhoenixTestSuite extends BaseTest {
 
-  private static final org.slf4j.Logger logger = LoggerFactory.getLogger(PhoenixTestSuite.class);
+  private static final Logger logger = LoggerFactory.getLogger(PhoenixTestSuite.class);
 
   private static volatile boolean runningSuite = false;
   private static final AtomicInteger initCount = new AtomicInteger(0);
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java
index 88b3d60..acc662c 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java
@@ -17,9 +17,28 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import static org.apache.hadoop.hbase.HConstants.HBASE_DIR;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.LocalHBaseCluster;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.security.access.AccessController;
+import org.apache.hadoop.hbase.security.token.TokenProvider;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.apache.phoenix.query.ConfigurationFactory;
+import org.apache.phoenix.util.InstanceResolver;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
@@ -29,25 +48,12 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.UUID;
 
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.LocalHBaseCluster;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
-import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.minikdc.MiniKdc;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.phoenix.query.ConfigurationFactory;
-import org.apache.phoenix.util.InstanceResolver;
-import org.apache.phoenix.util.PhoenixRuntime;
+import static org.apache.hadoop.hbase.HConstants.HBASE_DIR;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * This is a copy of class from `org.apache.phoenix:phoenix-queryserver-it`,
@@ -97,6 +103,7 @@
   private int numCreatedUsers;
 
   private final String phoenixUrl;
+  private static final Logger logger = LoggerFactory.getLogger(PhoenixEnvironment.class);
 
   private static Configuration conf() {
     Configuration configuration = HBaseConfiguration.create();
@@ -195,8 +202,7 @@
   /**
    * Setup and start kerberosed, hbase
    */
-  public PhoenixEnvironment(final Configuration confIn, int numberOfUsers, boolean tls)
-    throws Exception {
+  public PhoenixEnvironment(final Configuration confIn, int numberOfUsers, boolean tls) throws Exception {
 
     Configuration conf = util.getConfiguration();
     conf.addResource(confIn);
@@ -204,30 +210,98 @@
     ensureIsEmptyDirectory(tempDir);
     ensureIsEmptyDirectory(keytabDir);
     keytab = new File(keytabDir, "test.keytab");
+
     // Start a MiniKDC
-    kdc = util.setupMiniKdc(keytab);
-    // Create a service principal and spnego principal in one keytab
-    // NB. Due to some apparent limitations between HDFS and HBase in the same JVM, trying to
-    // use separate identies for HBase and HDFS results in a GSS initiate error. The quick
-    // solution is to just use a single "service" principal instead of "hbase" and "hdfs"
-    // (or "dn" and "nn") per usual.
-    kdc.createPrincipal(keytab, SPNEGO_PRINCIPAL, PQS_PRINCIPAL, SERVICE_PRINCIPAL);
-    // Start ZK by hand
+    File kdcWorkDir = new File(new File(getTempDir()), "kdc-" + System.currentTimeMillis());
+    ensureIsEmptyDirectory(kdcWorkDir);
+
+    Properties kdcConf = org.apache.hadoop.minikdc.MiniKdc.createConf();
+    kdcConf.setProperty(org.apache.hadoop.minikdc.MiniKdc.KDC_BIND_ADDRESS, "127.0.0.1");
+    kdcConf.setProperty("kdc.tcp.port", "0");
+    kdcConf.setProperty("kdc.allow_udp", "false");
+    kdcConf.setProperty("kdc.encryption.types", "aes128-cts-hmac-sha1-96");
+    kdcConf.setProperty("kdc.fast.enabled", "false");
+    kdcConf.setProperty("kdc.preauth.required", "true");
+    kdcConf.setProperty("kdc.allowable.clockskew", "300000"); // 5m
+    kdcConf.setProperty(org.apache.hadoop.minikdc.MiniKdc.DEBUG, "true");
+
+    kdc = new org.apache.hadoop.minikdc.MiniKdc(kdcConf, kdcWorkDir);
+    kdc.start();
+
+    // Write krb5.conf that disables referrals/canonicalization
+    File krb5File = new File(kdcWorkDir, "krb5.conf");
+    writeKrb5Conf(krb5File.toPath(), kdc.getRealm(), "127.0.0.1", kdc.getPort());
+    System.setProperty("java.security.krb5.conf", krb5File.getAbsolutePath());
+    System.setProperty("sun.security.krb5.allowUdp", "false");
+    System.setProperty("sun.security.krb5.disableReferrals", "true");
+    System.setProperty("java.net.preferIPv4Stack", "true");
+    System.setProperty("sun.security.krb5.debug", "true");
+    System.clearProperty("java.security.krb5.realm"); // avoid env overrides
+    System.clearProperty("java.security.krb5.kdc");
+
+    // Fresh keytab every run; create principals in one shot
+    if (keytab.exists() && !keytab.delete()) {
+      throw new IOException("Couldn't delete old keytab: " + keytab);
+    }
+    keytab.getParentFile().mkdirs();
+
+    // Use a conventional service principal to avoid canonicalization surprises
+    final String SERVICE_PRINCIPAL_LOCAL = "hbase/localhost";
+    final String SPNEGO_PRINCIPAL_LOCAL  = "HTTP/localhost";
+    final String PQS_PRINCIPAL_LOCAL     = "phoenixqs/localhost";
+
+    kdc.createPrincipal(
+        keytab,
+        SPNEGO_PRINCIPAL_LOCAL,
+        PQS_PRINCIPAL_LOCAL,
+        SERVICE_PRINCIPAL_LOCAL
+    );
+  // --- End explicit MiniKDC setup ---
+
+  // Start ZK by hand
     util.startMiniZKCluster();
 
     // Create a number of unprivileged users
     createUsers(numberOfUsers);
 
-    // Set configuration for HBase
-    HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + kdc.getRealm());
+    // HBase ↔ Kerberos wiring: set creds BEFORE setSecuredConfiguration
+    final String servicePrincipal = "hbase/localhost@" + kdc.getRealm();
+
+    conf.set("hadoop.security.authentication", "kerberos");
+    conf.set("hbase.security.authentication", "kerberos");
+
+    conf.set("hbase.master.keytab.file", keytab.getAbsolutePath());
+    conf.set("hbase.regionserver.keytab.file", keytab.getAbsolutePath());
+    conf.set("hbase.master.kerberos.principal", servicePrincipal);
+    conf.set("hbase.regionserver.kerberos.principal", servicePrincipal);
+
+    // Make HBase copy its secured defaults *after* we have principals/keytab in conf
+    HBaseKerberosUtils.setPrincipalForTesting(servicePrincipal);
+    HBaseKerberosUtils.setKeytabFileForTesting(keytab.getAbsolutePath());
     HBaseKerberosUtils.setSecuredConfiguration(conf);
+
+    // HDFS side
     setHdfsSecuredConfiguration(conf);
+
+    // UGI must see kerberos
     UserGroupInformation.setConfiguration(conf);
+
+    // Preflight: prove the keytab/KDC works *before* we start HBase
+    UserGroupInformation.loginUserFromKeytab(servicePrincipal, keytab.getAbsolutePath());
+    logger.info("UGI login OK for {}", servicePrincipal);
+
+    UserGroupInformation.setConfiguration(conf);
+
     conf.setInt(HConstants.MASTER_PORT, 0);
     conf.setInt(HConstants.MASTER_INFO_PORT, 0);
     conf.setInt(HConstants.REGIONSERVER_PORT, 0);
     conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
 
+    // Coprocessors, proxy user configs, etc. (whatever you already have)
+    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
+    conf.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
+    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName(), TokenProvider.class.getName());
+
     // Clear the cached singletons so we can inject our own.
     InstanceResolver.clearSingletons();
     // Make sure the ConnectionInfo doesn't try to pull a default Configuration
@@ -258,10 +332,47 @@
     phoenixUrl = PhoenixRuntime.JDBC_PROTOCOL + ":localhost:" + getZookeeperPort();
   }
 
+  private static void writeKrb5Conf(java.nio.file.Path path, String realm, String host, int port) throws Exception {
+    String cfg =
+        "[libdefaults]\n" +
+            " default_realm = " + realm + "\n" +
+            " dns_lookup_kdc = false\n" +
+            " dns_lookup_realm = false\n" +
+            " dns_canonicalize_hostname = false\n" +
+            " rdns = false\n" +
+            " udp_preference_limit = 1\n" +
+            " default_tkt_enctypes = aes128-cts-hmac-sha1-96\n" +
+            " default_tgs_enctypes = aes128-cts-hmac-sha1-96\n" +
+            " permitted_enctypes   = aes128-cts-hmac-sha1-96\n" +
+            "\n" +
+            "[realms]\n" +
+            " " + realm + " = {\n" +
+            "   kdc = " + host + ":" + port + "\n" +
+            "   admin_server = " + host + ":" + port + "\n" +
+            " }\n";
+    java.nio.file.Files.createDirectories(path.getParent());
+    java.nio.file.Files.write(path, cfg.getBytes(java.nio.charset.StandardCharsets.UTF_8));
+  }
+
+
   public int getZookeeperPort() {
     return util.getConfiguration().getInt(HConstants.ZOOKEEPER_CLIENT_PORT, 2181);
   }
 
+  private static void createPrincipalIfAbsent(MiniKdc kdc, File keytab, String principal) throws Exception {
+    try {
+      kdc.createPrincipal(keytab, principal);
+    } catch (org.apache.kerby.kerberos.kerb.KrbException e) {
+      String msg = e.getMessage();
+      if (msg != null && msg.contains("already exists")) {
+        // Principal is already in the KDC; fine to proceed.
+        // (Keys were generated when it was first created.)
+        return;
+      }
+      throw e;
+    }
+  }
+
   public void stop() throws Exception {
     // Remove our custom ConfigurationFactory for future tests
     InstanceResolver.clearSingletons();
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java
index 193b7ba..fdbe85f 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixBaseTest.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.store.phoenix.secured;
 
 import ch.qos.logback.classic.Level;
+import com.google.common.collect.Lists;
 import com.sun.security.auth.module.Krb5LoginModule;
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.common.exceptions.UserRemoteException;
@@ -29,7 +30,6 @@
 import org.apache.drill.exec.store.phoenix.PhoenixDataSource;
 import org.apache.drill.exec.store.phoenix.PhoenixStoragePluginConfig;
 import org.apache.drill.exec.util.ImpersonationUtil;
-import com.google.common.collect.Lists;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
 import org.apache.drill.test.ClusterTest;
@@ -69,6 +69,8 @@
 
   private final static AtomicInteger initCount = new AtomicInteger(0);
 
+
+
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
     TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
@@ -92,6 +94,22 @@
     Map.Entry<String, File> user3 = environment.getUser(3);
 
     dirTestWatcher.start(SecuredPhoenixTestSuite.class); // until DirTestWatcher ClassRule is implemented for JUnit5
+
+    // Create a UDF directory with proper permissions in the test directory
+    File udfDir = dirTestWatcher.makeSubDir(Paths.get("udf"));
+    // Pre-create all subdirectories that Drill will need with proper permissions
+    File drillDir = new File(udfDir, "drill");
+    File happyDir = new File(drillDir, "happy");
+    File udfSubDir = new File(happyDir, "udf");
+    File registryDir = new File(udfSubDir, "registry");
+    File stagingDir = new File(udfSubDir, "staging");
+    File tmpDir = new File(udfSubDir, "tmp");
+    // Create all directories and set permissions
+    registryDir.mkdirs();
+    stagingDir.mkdirs();
+    tmpDir.mkdirs();
+    setDirectoryPermissions(udfDir);
+
     ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
         .configProperty(ExecConstants.USER_AUTHENTICATION_ENABLED, true)
         .configProperty(ExecConstants.USER_AUTHENTICATOR_IMPL, UserAuthenticatorTestImpl.TYPE)
@@ -99,12 +117,23 @@
         .configProperty(ExecConstants.IMPERSONATION_ENABLED, true)
         .configProperty(ExecConstants.BIT_AUTHENTICATION_ENABLED, true)
         .configProperty(ExecConstants.BIT_AUTHENTICATION_MECHANISM, "kerberos")
+        .configProperty(ExecConstants.USE_LOGIN_PRINCIPAL, true)
         .configProperty(ExecConstants.SERVICE_PRINCIPAL, HBaseKerberosUtils.getPrincipalForTesting())
         .configProperty(ExecConstants.SERVICE_KEYTAB_LOCATION, environment.getServiceKeytab().getAbsolutePath())
+        // Set UDF directory to a location we control with proper permissions
+        .configProperty(ExecConstants.UDF_DIRECTORY_ROOT, udfDir.getAbsolutePath())
+        .configProperty(ExecConstants.UDF_DIRECTORY_FS, "file:///" + udfDir.getAbsolutePath().replace("\\", "/"))
+        // Disable dynamic UDF support for this test to avoid filesystem issues
+        .configProperty(ExecConstants.UDF_DISABLE_DYNAMIC, true)
         .configClientProperty(DrillProperties.SERVICE_PRINCIPAL, HBaseKerberosUtils.getPrincipalForTesting())
         .configClientProperty(DrillProperties.USER, user1.getKey())
         .configClientProperty(DrillProperties.KEYTAB, user1.getValue().getAbsolutePath());
     startCluster(builder);
+
+    // After cluster starts, Drill creates subdirectories in the UDF area
+    // Set permissions recursively on all created subdirectories
+    setDirectoryPermissions(udfDir);
+
     Properties user2ClientProperties = new Properties();
     user2ClientProperties.setProperty(DrillProperties.SERVICE_PRINCIPAL, HBaseKerberosUtils.getPrincipalForTesting());
     user2ClientProperties.setProperty(DrillProperties.USER, user2.getKey());
@@ -124,6 +153,29 @@
     registry.put(PhoenixStoragePluginConfig.NAME + "123", config);
   }
 
+  /**
+   * Set proper permissions on a directory to ensure it's writable and executable
+   * This method recursively sets permissions on all subdirectories created by Drill
+   */
+  private static void setDirectoryPermissions(File dir) {
+    if (dir != null && dir.exists()) {
+      // Set permissions on the directory itself
+      dir.setWritable(true, false); // writable by all
+      dir.setExecutable(true, false); // executable by all
+      dir.setReadable(true, false); // readable by all
+      // Recursively set permissions on subdirectories
+      if (dir.isDirectory()) {
+        File[] children = dir.listFiles();
+        if (children != null) {
+          for (File child : children) {
+            if (child.isDirectory()) {
+              setDirectoryPermissions(child);
+            }
+          }
+        }
+      }
+    }
+  }
 
   /**
    * Initialize HBase via Phoenix
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
index 89f1a99..7631ea7 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
@@ -26,6 +26,7 @@
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Suite;
+import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.TimeZone;
@@ -41,7 +42,7 @@
 @Category({ SlowTest.class, RowSetTest.class })
 public class SecuredPhoenixTestSuite extends BaseTest {
 
-  private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SecuredPhoenixTestSuite.class);
+  private static final Logger logger = LoggerFactory.getLogger(SecuredPhoenixTestSuite.class);
 
   private static volatile boolean runningSuite = false;
   private static final AtomicInteger initCount = new AtomicInteger(0);
diff --git a/pom.xml b/pom.xml
index e8ece20..60f7ec1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -71,7 +71,7 @@
     <commons.text.version>1.10.0</commons.text.version>
     <commons.validator.version>1.7</commons.validator.version>
     <curator.version>5.5.0</curator.version>
-    <derby.version>10.14.2.0</derby.version>
+    <derby.version>10.17.1.0</derby.version>
     <directMemoryMb>3072</directMemoryMb>
     <docker.repository>apache/drill</docker.repository>
     <excludedGroups />
@@ -107,7 +107,7 @@
     <junit.args />
     <junit.platform.version>1.8.2</junit.platform.version>
     <junit.version>5.7.2</junit.version>
-    <kerby.version>1.0.1</kerby.version>
+    <kerby.version>2.0.3</kerby.version>
     <libthrift.version>0.18.1</libthrift.version>
     <license.skip>true</license.skip>
     <log4j.version>2.23.1</log4j.version>
@@ -128,7 +128,7 @@
     <netty.tcnative.version>2.0.65.Final</netty.tcnative.version>
     <netty.version>4.1.118.Final</netty.version>
     <parquet.format.version>2.11.0</parquet.format.version>
-    <parquet.version>1.15.1</parquet.version>
+    <parquet.version>1.15.2</parquet.version>
     <project.build.outputTimestamp>1750144553</project.build.outputTimestamp>
     <protobuf.version>3.25.5</protobuf.version>
     <proto.cas.path>${project.basedir}/src/main/protobuf/</proto.cas.path>
@@ -148,7 +148,7 @@
     <wiremock.standalone.version>2.23.2</wiremock.standalone.version>
     <xerces.version>2.12.2</xerces.version>
     <yauaa.version>7.31.0</yauaa.version>
-    <zookeeper.version>3.5.10</zookeeper.version>
+    <zookeeper.version>3.9.3</zookeeper.version>
   </properties>
 
   <scm>
diff --git a/tools/fmpp/pom.xml b/tools/fmpp/pom.xml
index eb2b3bb..6b44979 100644
--- a/tools/fmpp/pom.xml
+++ b/tools/fmpp/pom.xml
@@ -70,6 +70,18 @@
           <artifactId>bsh</artifactId>
           <groupId>org.beanshell</groupId>
         </exclusion>
+        <exclusion>
+          <artifactId>maven-plugin-api</artifactId>
+          <groupId>org.apache.maven</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>maven-compat</artifactId>
+          <groupId>org.apache.maven</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>maven-model</artifactId>
+          <groupId>org.apache.maven</groupId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>