PHOENIX-6076 Refactor Phoenix Hive Connectors introduced by PHOENIX-6057
diff --git a/phoenix-flume-base/phoenix4-flume/pom.xml b/phoenix-flume-base/phoenix4-flume/pom.xml
index 3586387..9ba9693 100644
--- a/phoenix-flume-base/phoenix4-flume/pom.xml
+++ b/phoenix-flume-base/phoenix4-flume/pom.xml
@@ -39,5 +39,22 @@
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-flume-base/phoenix5-flume/pom.xml b/phoenix-flume-base/phoenix5-flume/pom.xml
index 4239ae0..f979a50 100644
--- a/phoenix-flume-base/phoenix5-flume/pom.xml
+++ b/phoenix-flume-base/phoenix5-flume/pom.xml
@@ -49,5 +49,22 @@
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-flume-base/pom.xml b/phoenix-flume-base/pom.xml
index d360287..706027e 100644
--- a/phoenix-flume-base/pom.xml
+++ b/phoenix-flume-base/pom.xml
@@ -160,46 +160,40 @@
</dependencies>
<build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
- <version>3.0.0</version>
- <executions>
- <execution>
- <id>add-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/main/java</source>
- </sources>
- </configuration>
- </execution>
- <execution>
- <id>add-test-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-test-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/it/java</source>
- </sources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-failsafe-plugin</artifactId>
- </plugin>
- <plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>${maven-dependency-plugin.version}</version>
- </plugin>
- </plugins>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/main/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>add-test-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/it/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </pluginManagement>
</build>
</project>
diff --git a/phoenix-hive-base/phoenix4-hive/pom.xml b/phoenix-hive-base/phoenix4-hive/pom.xml
new file mode 100644
index 0000000..6bc5bf0
--- /dev/null
+++ b/phoenix-hive-base/phoenix4-hive/pom.xml
@@ -0,0 +1,69 @@
+<?xml version='1.0'?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-hive-base</artifactId>
+ <version>6.0.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+
+ <artifactId>phoenix4-hive</artifactId>
+ <name>Phoenix Hive Connector for Phoenix 4</name>
+
+ <properties>
+ <commons-lang.version>2.6</commons-lang.version>
+ <commons-lang3.version>3.1</commons-lang3.version>
+ <hive.version>${hive2.version}</hive.version>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix4-compat</artifactId>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
\ No newline at end of file
diff --git a/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/compat/HiveCompatUtil.java b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/compat/HiveCompatUtil.java
new file mode 100644
index 0000000..71c4f8e
--- /dev/null
+++ b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/compat/HiveCompatUtil.java
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.compat;
+
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveMaterializedViewsRegistry;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import java.io.File;
+import java.lang.reflect.Method;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.concurrent.atomic.AtomicReference;
+
+public class HiveCompatUtil {
+
+ private HiveCompatUtil() {
+ // Not to be instantiated
+ }
+
+ public static ExprNodeGenericFuncDesc getComparisonExpr(ExprNodeGenericFuncDesc comparisonExpr, boolean isNot) {
+ return comparisonExpr;
+ }
+
+ public static String getOptionsValue(AcidOutputFormat.Options options, AtomicReference<Method> GET_BUCKET_METHOD_REF, AtomicReference<Method> GET_BUCKET_ID_METHOD_REF, Log LOG) {
+ StringBuilder content = new StringBuilder();
+
+ int bucket = options.getBucket();
+ String inspectorInfo = options.getInspector().getCategory() + ":" + options.getInspector()
+ .getTypeName();
+ long maxTxnId = options.getMaximumTransactionId();
+ long minTxnId = options.getMinimumTransactionId();
+ int recordIdColumn = options.getRecordIdColumn();
+ boolean isCompresses = options.isCompressed();
+ boolean isWritingBase = options.isWritingBase();
+
+ content.append("bucket : ").append(bucket).append(", inspectorInfo : ").append
+ (inspectorInfo).append(", minTxnId : ").append(minTxnId).append(", maxTxnId : ")
+ .append(maxTxnId).append(", recordIdColumn : ").append(recordIdColumn);
+ content.append(", isCompressed : ").append(isCompresses).append(", isWritingBase : ")
+ .append(isWritingBase);
+
+ return content.toString();
+ }
+
+ public static Object getDateOrTimestampValue(Object value){
+ return null;
+ }
+
+ public static String getDefaultDatabaseName(){
+ return org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+ }
+
+ public static MyResult doSetup(String confDir) throws MalformedURLException {
+ // HIVE-14443 move this fall-back logic to CliConfigs
+ if (confDir != null && !confDir.isEmpty()) {
+ HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
+ System.out.println("Setting hive-site: "+ HiveConf.getHiveSiteLocation());
+ }
+ HiveConf conf = new HiveConf();
+ String tmpBaseDir = System.getProperty("test.tmp.dir");
+ if (tmpBaseDir == null || tmpBaseDir == "") {
+ tmpBaseDir = System.getProperty("java.io.tmpdir");
+ }
+ String metaStoreURL = "jdbc:derby:" + tmpBaseDir + File.separator + "metastore_dbtest;" +
+ "create=true";
+ conf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
+ System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
+ return new MyResult(conf, new QueryState(conf));
+ }
+
+ public static void destroyTEZSession(SessionState sessionState){
+ }
+
+ public static Object getDriver(HiveConf conf){
+ return null;
+ }
+
+ public static void cleanupQueryResultCache() { }
+
+ public static HiveConf getHiveConf(){
+ return new HiveConf();
+ }
+
+ public static int getDriverResponseCode(Object drv, String createTableCmd){
+ return -1; // There is no driver in Phoenix4Hive2
+ }
+
+ public static void closeDriver(Object drv) { }
+
+ public static QueryState getQueryState(HiveConf conf){
+ return new QueryState(conf);
+ }
+
+ public static void initHiveMaterializedViewsRegistry() {}
+
+ public static void initHiveMaterializedViewsRegistry(Hive db)
+ {
+ HiveMaterializedViewsRegistry.get().init(db);
+ }
+}
diff --git a/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/compat/MyResult.java b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/compat/MyResult.java
new file mode 100644
index 0000000..1da4615
--- /dev/null
+++ b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/compat/MyResult.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.compat;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
+
+public class MyResult {
+ private final HiveConf conf;
+ private final QueryState queryState;
+
+ public MyResult(HiveConf conf, QueryState queryState) {
+ this.conf = conf;
+ this.queryState = queryState;
+ }
+
+ public HiveConf getFirst() {
+ return conf;
+ }
+
+ public QueryState getSecond() {
+ return queryState;
+ }
+}
\ No newline at end of file
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
similarity index 97%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
rename to phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
index 1de1cc7..7e406cd 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
+++ b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
@@ -19,8 +19,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive
- .AbstractPrimitiveLazyObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.AbstractPrimitiveLazyObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.io.Writable;
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspectorBase.java
similarity index 91%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java
rename to phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspectorBase.java
index d97993e..ca3f5f6 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java
+++ b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspectorBase.java
@@ -27,10 +27,12 @@
* ObjectInspector for date type
*/
-public class PhoenixDateObjectInspector extends AbstractPhoenixObjectInspector<DateWritable>
+
+public class PhoenixDateObjectInspectorBase extends AbstractPhoenixObjectInspector<DateWritable>
implements DateObjectInspector {
- public PhoenixDateObjectInspector() {
+ public PhoenixDateObjectInspectorBase()
+ {
super(TypeInfoFactory.dateTypeInfo);
}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspectorBase.java
similarity index 93%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java
rename to phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspectorBase.java
index 7b13f2b..d6c05fd 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java
+++ b/phoenix-hive-base/phoenix4-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspectorBase.java
@@ -26,11 +26,12 @@
/**
* ObjectInspector for timestamp type
*/
-public class PhoenixTimestampObjectInspector extends
+public class PhoenixTimestampObjectInspectorBase extends
AbstractPhoenixObjectInspector<TimestampWritable>
implements TimestampObjectInspector {
- public PhoenixTimestampObjectInspector() {
+ public PhoenixTimestampObjectInspectorBase()
+ {
super(TypeInfoFactory.timestampTypeInfo);
}
diff --git a/phoenix-hive-base/phoenix5-hive/pom.xml b/phoenix-hive-base/phoenix5-hive/pom.xml
new file mode 100644
index 0000000..38ab3c0
--- /dev/null
+++ b/phoenix-hive-base/phoenix5-hive/pom.xml
@@ -0,0 +1,190 @@
+<?xml version='1.0'?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-hive-base</artifactId>
+ <version>6.0.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+
+ <artifactId>phoenix5-hive</artifactId>
+ <name>Phoenix Hive Connector for Phoenix 5</name>
+
+ <properties>
+ <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
+ <netty.version>4.1.47.Final</netty.version>
+ <phoenix.version>${phoenix-five.version}</phoenix.version>
+ <hbase.version>${hbase-two.version}</hbase.version>
+ <hadoop.version>${hadoop-three.version}</hadoop.version>
+ <avatica.version>1.12.0</avatica.version>
+ <hive.version>${hive3.version}</hive.version>
+ <curator.version>4.0.0</curator.version>
+ <jetty.version>9.3.8.v20160314</jetty.version>
+ <jdk.version>1.8</jdk.version>
+ <phoenix.compat.version>5</phoenix.compat.version>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix5-compat</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-core</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-standalone-metastore</artifactId>
+ <type>test-jar</type>
+ <version>${hive.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${hive.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>io.netty</groupId>
+ <artifactId>netty</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>io.netty</groupId>
+ <artifactId>netty-all</artifactId>
+ <version>${netty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <scope>test</scope>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-http</artifactId>
+ <scope>test</scope>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ <scope>test</scope>
+ <version>${jetty.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>19.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.calcite.avatica</groupId>
+ <artifactId>avatica</artifactId>
+ <!-- Overriding the version of Avatica that PQS uses so that Hive will work -->
+ <version>${avatica.version}</version>
+ <scope>test</scope>
+ <!-- And removing a bunch of dependencies that haven't been shaded in this older
+ Avatica version which conflict with HDFS -->
+ <exclusions>
+ <exclusion>
+ <groupId>org.hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-annotations</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-core</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>HBaseManagedTimeTests</id>
+ <configuration>
+ <encoding>UTF-8</encoding>
+ <forkCount>1</forkCount>
+ <runOrder>alphabetical</runOrder>
+ <reuseForks>false</reuseForks>
+ <argLine>-enableassertions -Xmx2000m -XX:MaxPermSize=256m
+ -Djava.security.egd=file:/dev/./urandom
+ "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
+ -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/
+ -Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded.
+ </argLine>
+ <redirectTestOutputToFile>${test.output.tofile}
+ </redirectTestOutputToFile>
+ <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
+ <groups>org.apache.phoenix.end2end.HBaseManagedTimeTest</groups>
+ <shutdown>kill</shutdown>
+ <useSystemClassLoader>false</useSystemClassLoader>
+ </configuration>
+ <goals>
+ <goal>integration-test</goal>
+ <goal>verify</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+</project>
\ No newline at end of file
diff --git a/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/compat/HiveCompatUtil.java b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/compat/HiveCompatUtil.java
new file mode 100644
index 0000000..7dbb1bf
--- /dev/null
+++ b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/compat/HiveCompatUtil.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.compat;
+
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.DriverFactory;
+import org.apache.hadoop.hive.ql.IDriver;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveMaterializedViewsRegistry;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import java.io.File;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Collections;
+import java.util.concurrent.atomic.AtomicReference;
+
+public class HiveCompatUtil {
+
+ private HiveCompatUtil() {
+ // Not to be instantiated
+ }
+
+ public static ExprNodeGenericFuncDesc getComparisonExpr(ExprNodeGenericFuncDesc comparisonExpr, boolean isNot){
+ ExprNodeGenericFuncDesc ret = comparisonExpr;
+ try {
+ if (GenericUDFIn.class == comparisonExpr.getGenericUDF().getClass() && isNot) {
+ ret = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
+ FunctionRegistry.getFunctionInfo("not").getGenericUDF(),
+ Collections.singletonList(comparisonExpr));
+ }
+ } catch (SemanticException e) {
+ throw new RuntimeException("hive operator -- never be thrown", e);
+ }
+ return ret;
+ }
+
+ public static String getOptionsValue(AcidOutputFormat.Options options, AtomicReference<Method> GET_BUCKET_METHOD_REF, AtomicReference<Method> GET_BUCKET_ID_METHOD_REF, Log LOG) {
+ StringBuilder content = new StringBuilder();
+
+ int bucket = getBucket(options, GET_BUCKET_METHOD_REF, GET_BUCKET_ID_METHOD_REF, LOG);
+ String inspectorInfo = options.getInspector().getCategory() + ":" + options.getInspector()
+ .getTypeName();
+ long maxTxnId = options.getMaximumWriteId();
+ long minTxnId = options.getMinimumWriteId();
+ int recordIdColumn = options.getRecordIdColumn();
+ boolean isCompresses = options.isCompressed();
+ boolean isWritingBase = options.isWritingBase();
+
+ content.append("bucket : ").append(bucket).append(", inspectorInfo : ").append
+ (inspectorInfo).append(", minTxnId : ").append(minTxnId).append(", maxTxnId : ")
+ .append(maxTxnId).append(", recordIdColumn : ").append(recordIdColumn);
+ content.append(", isCompressed : ").append(isCompresses).append(", isWritingBase : ")
+ .append(isWritingBase);
+
+ return content.toString();
+ }
+
+ private static int getBucket(AcidOutputFormat.Options options, AtomicReference<Method> GET_BUCKET_METHOD_REF, AtomicReference<Method> GET_BUCKET_ID_METHOD_REF, Log LOG) {
+ Method getBucketMethod = GET_BUCKET_METHOD_REF.get();
+ try {
+ if (getBucketMethod == null) {
+ getBucketMethod = AcidOutputFormat.Options.class.getMethod("getBucket");
+ GET_BUCKET_METHOD_REF.set(getBucketMethod);
+ }
+ return (int) getBucketMethod.invoke(options);
+ } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException e) {
+ LOG.trace("Failed to invoke Options.getBucket()", e);
+ }
+ Method getBucketIdMethod = GET_BUCKET_ID_METHOD_REF.get();
+ try {
+ if (getBucketIdMethod == null) {
+ getBucketIdMethod = AcidOutputFormat.Options.class.getMethod("getBucketId");
+ GET_BUCKET_ID_METHOD_REF.set(getBucketMethod);
+ }
+ return (int) getBucketIdMethod.invoke(options);
+ } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException e) {
+ throw new RuntimeException("Failed to invoke Options.getBucketId()", e);
+ }
+ }
+
+ public static Object getDateOrTimestampValue(Object value){
+ if (value instanceof Date) {
+ value = java.sql.Date.valueOf(value.toString());
+ } else if (value instanceof Timestamp) {
+ value = java.sql.Timestamp.valueOf(value.toString());
+ }
+ return value;
+ }
+
+ public static String getDefaultDatabaseName(){
+ return org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
+ }
+
+ public static MyResult doSetup(String confDir) throws MalformedURLException {
+ // HIVE-14443 move this fall-back logic to CliConfigs
+ if (confDir != null && !confDir.isEmpty()) {
+ HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
+ MetastoreConf.setHiveSiteLocation(HiveConf.getHiveSiteLocation());
+ System.out.println("Setting hive-site: "+ HiveConf.getHiveSiteLocation());
+ }
+
+ QueryState queryState = new QueryState.Builder().withHiveConf(new HiveConf(IDriver.class)).build();
+ HiveConf conf = queryState.getConf();
+ return new MyResult(conf, queryState);
+ }
+
+ public static void destroyTEZSession(SessionState sessionState) throws Exception {
+ sessionState.getTezSession().destroy();
+ }
+
+ public static Object getDriver(HiveConf conf){
+ return DriverFactory.newDriver(conf);
+ }
+
+ public static void cleanupQueryResultCache(){
+ // Remove any cached results from the previous test.
+ QueryResultsCache.cleanupInstance();
+ }
+
+ public static HiveConf getHiveConf(){
+ return new HiveConf(IDriver.class);
+ }
+
+ public static int getDriverResponseCode(Object drv, String createTableCmd){
+ IDriver driver = (IDriver) drv;
+ return driver.run(createTableCmd).getResponseCode();
+ }
+
+ public static void closeDriver(Object drv){
+ IDriver driver = (IDriver) drv;
+ driver.close();
+ }
+
+ public static QueryState getQueryState(HiveConf conf){
+ return new QueryState.Builder().withHiveConf(conf).build();
+ }
+
+ public static void initHiveMaterializedViewsRegistry()
+ {
+ HiveMaterializedViewsRegistry.get().init();
+ }
+
+ public static void initHiveMaterializedViewsRegistry(Hive db) { }
+}
diff --git a/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/compat/MyResult.java b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/compat/MyResult.java
new file mode 100644
index 0000000..d24b95f
--- /dev/null
+++ b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/compat/MyResult.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.compat;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.QueryState;
+
+public class MyResult {
+ private final HiveConf conf;
+ private final QueryState queryState;
+
+ public MyResult(HiveConf conf, QueryState queryState) {
+ this.conf = conf;
+ this.queryState = queryState;
+ }
+
+ public HiveConf getFirst() {
+ return conf;
+ }
+
+ public QueryState getSecond() {
+ return queryState;
+ }
+}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
similarity index 97%
copy from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
copy to phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
index 1de1cc7..7e406cd 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
+++ b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
@@ -19,8 +19,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive
- .AbstractPrimitiveLazyObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.AbstractPrimitiveLazyObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.io.Writable;
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspectorBase.java
similarity index 92%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java
rename to phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspectorBase.java
index 7702c64..b9ee1b1 100644
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java
+++ b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspectorBase.java
@@ -26,10 +26,10 @@
* ObjectInspector for date type
*/
-public class PhoenixDateObjectInspector extends AbstractPhoenixObjectInspector<DateWritableV2>
+public class PhoenixDateObjectInspectorBase extends AbstractPhoenixObjectInspector<DateWritableV2>
implements DateObjectInspector {
- public PhoenixDateObjectInspector() {
+ public PhoenixDateObjectInspectorBase() {
super(TypeInfoFactory.dateTypeInfo);
}
@@ -61,5 +61,4 @@
return value;
}
-
}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspectorBase.java
similarity index 94%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java
rename to phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspectorBase.java
index 99ad0cc..984a903 100644
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java
+++ b/phoenix-hive-base/phoenix5-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspectorBase.java
@@ -25,11 +25,12 @@
/**
* ObjectInspector for timestamp type
*/
-public class PhoenixTimestampObjectInspector extends
+public class PhoenixTimestampObjectInspectorBase extends
AbstractPhoenixObjectInspector<TimestampWritableV2>
implements TimestampObjectInspector {
- public PhoenixTimestampObjectInspector() {
+ public PhoenixTimestampObjectInspectorBase()
+ {
super(TypeInfoFactory.timestampTypeInfo);
}
diff --git a/phoenix-hive-base/pom.xml b/phoenix-hive-base/pom.xml
new file mode 100644
index 0000000..0fa9625
--- /dev/null
+++ b/phoenix-hive-base/pom.xml
@@ -0,0 +1,275 @@
+<?xml version='1.0'?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-connectors</artifactId>
+ <version>6.0.0-SNAPSHOT</version>
+ </parent>
+ <artifactId>phoenix-hive-base</artifactId>
+ <name>Phoenix Hive Connector - Base</name>
+
+ <packaging>pom</packaging>
+ <modules>
+ <module>phoenix4-hive</module>
+ <module>phoenix5-hive</module>
+ </modules>
+
+ <properties>
+ <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
+ <netty.version>4.1.47.Final</netty.version>
+ <avatica.version>1.8.0</avatica.version>
+ <curator.version>2.7.1</curator.version>
+ <tez.version>0.9.1</tez.version>
+ <jetty.version>8.1.7.v20120910</jetty.version>
+ <jdk.version>1.8</jdk.version>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-core</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-cli</artifactId>
+ <version>${hive.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${hive.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${hive.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>${slf4j.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <version>${slf4j.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>${log4j.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ </dependency>
+
+ <!-- Test dependencies -->
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-core</artifactId>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-testing-util</artifactId>
+ <scope>test</scope>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-it</artifactId>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-auth</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-minicluster</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-tests</artifactId>
+ <scope>test</scope>
+ <version>${tez.version}</version>
+ <type>test-jar</type>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.tez</groupId>
+ <artifactId>tez-dag</artifactId>
+ <scope>test</scope>
+ <version>${tez.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/main/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>add-test-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/it/java</source>
+ <source>${project.basedir}/src/it/java</source>
+ <source>${project.parent.basedir}/src/test/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-resources-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-resources</id>
+ <phase>generate-resources</phase>
+ <goals>
+ <goal>copy-resources</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/test-classes
+ </outputDirectory>
+ <overwrite>true</overwrite>
+ <resources>
+ <resource>
+ <directory>${project.parent.basedir}/src/test/resources</directory>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <configuration>
+ <descriptorRefs>
+ <descriptorRef>jar-with-dependencies</descriptorRef>
+ </descriptorRefs>
+ </configuration>
+ <executions>
+ <execution>
+ <id>make-jar-with-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ <configuration>
+ <appendAssemblyId>false</appendAssemblyId>
+ <finalName>phoenix-${project.version}-hive</finalName>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>8</source>
+ <target>8</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ </build>
+</project>
diff --git a/phoenix-hive3/src/it/java/org/apache/hadoop/hive/ql/QTestProcessExecResult.java b/phoenix-hive-base/src/it/java/org/apache/hadoop/hive/ql/QTestProcessExecResult.java
similarity index 100%
rename from phoenix-hive3/src/it/java/org/apache/hadoop/hive/ql/QTestProcessExecResult.java
rename to phoenix-hive-base/src/it/java/org/apache/hadoop/hive/ql/QTestProcessExecResult.java
diff --git a/phoenix-hive3/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java b/phoenix-hive-base/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java
similarity index 96%
rename from phoenix-hive3/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java
rename to phoenix-hive-base/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 4450047..4bf495b 100644
--- a/phoenix-hive3/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/phoenix-hive-base/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hive.ql;
-import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
-
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
@@ -38,6 +36,7 @@
import java.io.PrintStream;
import java.io.Serializable;
import java.io.StringWriter;
+import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystems;
@@ -92,8 +91,6 @@
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -121,6 +118,9 @@
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.common.util.StreamPrinter;
import org.apache.logging.log4j.util.Strings;
+import org.apache.phoenix.compat.CompatUtil;
+import org.apache.phoenix.compat.HiveCompatUtil;
+import org.apache.phoenix.compat.MyResult;
import org.apache.tools.ant.BuildException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
@@ -137,6 +137,7 @@
/**
* QTestUtil. Cloned from Hive 3.0.0 as hive doesn't release hive-it-util artifact
+ * Some changes has been applyed to make Hive 3 and Hive 2 work with the same file
*
*/
public class QTestUtil {
@@ -157,6 +158,7 @@
public static final String PATH_HDFS_REGEX = "(hdfs://)([a-zA-Z0-9:/_\\-\\.=])+";
public static final String PATH_HDFS_WITH_DATE_USER_GROUP_REGEX = "([a-z]+) ([a-z]+)([ ]+)([0-9]+) ([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}) " + PATH_HDFS_REGEX;
+ private static String DEFAULT_DATABASE_NAME = HiveCompatUtil.getDefaultDatabaseName();
private String testWarehouse;
private final String testFiles;
@@ -180,7 +182,7 @@
protected Hive db;
protected QueryState queryState;
protected HiveConf conf;
- private IDriver drv;
+ private Object drv; // IDriver in Phoenix5Hive3, null in Phoenix4Hive2
private BaseSemanticAnalyzer sem;
protected final boolean overWrite;
private CliDriver cliDriver;
@@ -484,7 +486,7 @@
} else if (type.equals("llap_local")) {
return llap_local;
} else if (type.equals("druid")) {
- return druid;
+ return druid;
} else {
return none;
}
@@ -512,7 +514,7 @@
boolean withLlapIo, FsType fsType)
throws Exception {
LOG.info("Setting up QTestUtil with outDir="+outDir+", logDir="+logDir+", clusterType="+clusterType+", confDir="+confDir+"," +
- " hadoopVer="+hadoopVer+", initScript="+initScript+", cleanupScript="+cleanupScript+", withLlapIo="+withLlapIo+"," +
+ " hadoopVer="+hadoopVer+", initScript="+initScript+", cleanupScript="+cleanupScript+", withLlapIo="+withLlapIo+"," +
" fsType="+fsType+"");
Preconditions.checkNotNull(clusterType, "ClusterType cannot be null");
if (fsType != null) {
@@ -525,15 +527,10 @@
this.srcTables=getSrcTables();
this.srcUDFs = getSrcUDFs();
- // HIVE-14443 move this fall-back logic to CliConfigs
- if (confDir != null && !confDir.isEmpty()) {
- HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
- MetastoreConf.setHiveSiteLocation(HiveConf.getHiveSiteLocation());
- System.out.println("Setting hive-site: "+ HiveConf.getHiveSiteLocation());
- }
+ MyResult result = HiveCompatUtil.doSetup(confDir);
+ conf = result.getFirst();
+ queryState = result.getSecond();
- queryState = new QueryState.Builder().withHiveConf(new HiveConf(IDriver.class)).build();
- conf = queryState.getConf();
this.hadoopVer = getHadoopMainVersion(hadoopVer);
qMap = new TreeMap<String, String>();
qSkipSet = new HashSet<String>();
@@ -642,10 +639,10 @@
cleanUp();
}
- if (clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
- SessionState.get().getTezSession().destroy();
+ if (CompatUtil.isPhoenix5() && clusterType.getCoreClusterType() == CoreClusterType.TEZ){
+ HiveCompatUtil.destroyTEZSession(SessionState.get());
}
-
+
setup.tearDown();
if (sparkSession != null) {
try {
@@ -953,12 +950,13 @@
return;
}
- // Remove any cached results from the previous test.
- QueryResultsCache.cleanupInstance();
+ if(CompatUtil.isPhoenix5()){
+ HiveCompatUtil.cleanupQueryResultCache();
+ }
// allocate and initialize a new conf since a test can
// modify conf by using 'set' commands
- conf = new HiveConf(IDriver.class);
+ conf = HiveCompatUtil.getHiveConf();
initConf();
initConfFromSetup();
@@ -1030,7 +1028,7 @@
protected void runCreateTableCmd(String createTableCmd) throws Exception {
int ecode = 0;
- ecode = drv.run(createTableCmd).getResponseCode();
+ ecode = HiveCompatUtil.getDriverResponseCode(drv, createTableCmd);
if (ecode != 0) {
throw new Exception("create table command: " + createTableCmd
+ " failed with exit code= " + ecode);
@@ -1041,8 +1039,8 @@
protected void runCmd(String cmd) throws Exception {
int ecode = 0;
- ecode = drv.run(cmd).getResponseCode();
- drv.close();
+ ecode = HiveCompatUtil.getDriverResponseCode(drv, cmd);
+ HiveCompatUtil.closeDriver(drv);
if (ecode != 0) {
throw new Exception("command: " + cmd
+ " failed with exit code= " + ecode);
@@ -1090,8 +1088,10 @@
createRemoteDirs();
}
- // Create views registry
- HiveMaterializedViewsRegistry.get().init();
+ if (CompatUtil.isPhoenix5()) {
+ // Create views registry
+ HiveCompatUtil.initHiveMaterializedViewsRegistry();
+ }
testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
String execEngine = conf.get("hive.execution.engine");
@@ -1099,7 +1099,10 @@
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
db = Hive.get(conf);
- drv = DriverFactory.newDriver(conf);
+ drv = HiveCompatUtil.getDriver(conf);
+ if(CompatUtil.isPhoenix4()) {
+ HiveCompatUtil.initHiveMaterializedViewsRegistry(db);
+ }
pd = new ParseDriver();
sem = new SemanticAnalyzer(queryState);
}
@@ -1256,7 +1259,7 @@
}
public int execute(String tname) {
- return drv.run(qMap.get(tname)).getResponseCode();
+ return HiveCompatUtil.getDriverResponseCode(drv, qMap.get(tname));
}
public int executeClient(String tname1, String tname2) {
@@ -1426,12 +1429,11 @@
db = Hive.get(conf);
// Move all data from dest4_sequencefile to dest4
- drv
- .run("FROM dest4_sequencefile INSERT OVERWRITE TABLE dest4 SELECT dest4_sequencefile.*");
+ HiveCompatUtil.getDriverResponseCode(drv, "FROM dest4_sequencefile INSERT OVERWRITE TABLE dest4 SELECT dest4_sequencefile.*");
+
// Drop dest4_sequencefile
- db.dropTable(Warehouse.DEFAULT_DATABASE_NAME, "dest4_sequencefile",
- true, true);
+ db.dropTable(DEFAULT_DATABASE_NAME, "dest4_sequencefile", true, true);
}
public QTestProcessExecResult checkNegativeResults(String tname, Exception e) throws Exception {
@@ -1529,7 +1531,7 @@
break;
}
}
- return ret;
+ return ret;
}
private Pattern[] toPattern(String[] patternStrs) {
@@ -1888,7 +1890,7 @@
public void resetParser() throws SemanticException {
pd = new ParseDriver();
- queryState = new QueryState.Builder().withHiveConf(conf).build();
+ queryState = HiveCompatUtil.getQueryState(conf);
sem = new SemanticAnalyzer(queryState);
}
@@ -2021,8 +2023,8 @@
QTestUtil[] qt = new QTestUtil[qfiles.length];
for (int i = 0; i < qfiles.length; i++) {
qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20",
- initScript == null ? defaultInitScript : initScript,
- cleanupScript == null ? defaultCleanupScript : cleanupScript, false);
+ initScript == null ? defaultInitScript : initScript,
+ cleanupScript == null ? defaultCleanupScript : cleanupScript, false);
qt[i].addFile(qfiles[i]);
qt[i].clearTestSideEffects();
}
@@ -2041,7 +2043,7 @@
* @return true if all queries passed, false otw
*/
public static boolean queryListRunnerSingleThreaded(File[] qfiles, QTestUtil[] qt)
- throws Exception
+ throws Exception
{
boolean failed = false;
qt[0].cleanUp();
@@ -2055,9 +2057,9 @@
failed = true;
StringBuilder builder = new StringBuilder();
builder.append("Test ")
- .append(qfiles[i].getName())
- .append(" results check failed with error code ")
- .append(result.getReturnCode());
+ .append(qfiles[i].getName())
+ .append(" results check failed with error code ")
+ .append(result.getReturnCode());
if (Strings.isNotEmpty(result.getCapturedOutput())) {
builder.append(" and diff value ").append(result.getCapturedOutput());
}
@@ -2084,7 +2086,7 @@
*
*/
public static boolean queryListRunnerMultiThreaded(File[] qfiles, QTestUtil[] qt)
- throws Exception
+ throws Exception
{
boolean failed = false;
@@ -2113,9 +2115,9 @@
failed = true;
StringBuilder builder = new StringBuilder();
builder.append("Test ")
- .append(qfiles[i].getName())
- .append(" results check failed with error code ")
- .append(result.getReturnCode());
+ .append(qfiles[i].getName())
+ .append(" results check failed with error code ")
+ .append(result.getReturnCode());
if (Strings.isNotEmpty(result.getCapturedOutput())) {
builder.append(" and diff value ").append(result.getCapturedOutput());
}
@@ -2329,7 +2331,7 @@
final SortedMap<String, Integer> tableNameToID = new TreeMap<String, Integer>(new MyComp());
- rs = s.executeQuery("SELECT * FROM APP.TBLS");
+ rs = s.executeQuery("SELECT * FROM APP.TBLS");
while(rs.next()) {
String tblName = rs.getString("TBL_NAME");
Integer tblId = rs.getInt("TBL_ID");
@@ -2461,7 +2463,7 @@
}
}
}
-
+
private static String getHiveRoot() {
String path;
if (System.getProperty("hive.root") != null) {
@@ -2475,7 +2477,7 @@
}
return ensurePathEndsInSlash(new File(path).getAbsolutePath());
}
-
+
public static String ensurePathEndsInSlash(String path) {
if (path == null) {
throw new NullPointerException("Path cannot be null");
diff --git a/phoenix-hive3/src/it/java/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java b/phoenix-hive-base/src/it/java/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java
similarity index 100%
rename from phoenix-hive3/src/it/java/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java
rename to phoenix-hive-base/src/it/java/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java
diff --git a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
similarity index 100%
rename from phoenix-hive3/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
rename to phoenix-hive-base/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
diff --git a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
similarity index 100%
rename from phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
rename to phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
diff --git a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
similarity index 99%
rename from phoenix-hive3/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
rename to phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index 550b2b5..dea79d6 100644
--- a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -287,7 +287,7 @@
sb.append("INSERT INTO TABLE joinTable4" + HiveTestUtil.CRLF +"VALUES (5, \'part2\',\'foodesc\',200,2.0,-1);" + HiveTestUtil.CRLF);
sb.append("INSERT INTO TABLE joinTable4" + HiveTestUtil.CRLF +"VALUES (10, \'part2\',\'foodesc\',200,2.0,-1);" + HiveTestUtil.CRLF);
-
+
sb.append("SELECT A.ID, a.db, B.ID2 from joinTable3 A join joinTable4 B on A.ID = B.ID WHERE A.ID=10;" +
HiveTestUtil.CRLF);
diff --git a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
similarity index 85%
rename from phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
rename to phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
index 2144f08..072f742 100644
--- a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+++ b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
@@ -26,7 +26,7 @@
public class HiveTestUtil extends QTestUtil{
public static final String CRLF = System.getProperty("line.separator");
- public HiveTestUtil(String outDir, String logDir, MiniClusterType clusterType, String confDir, String hadoopVer,
+ public HiveTestUtil(String outDir, String logDir, QTestUtil.MiniClusterType clusterType, String confDir, String hadoopVer,
String initScript, String cleanupScript, boolean withLlapIo) throws Exception {
super(outDir, logDir, clusterType, confDir, hadoopVer, initScript, cleanupScript, withLlapIo);
}
@@ -34,6 +34,8 @@
@Override
public int executeClient(String tname) {
conf.set("mapreduce.job.name", "test");
+ conf.set("hive.mapred.mode", "nonstrict");
+ conf.set("hive.strict.checks.cartesian.product", "false");
return super.executeClient(tname);
}
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
similarity index 100%
rename from phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
rename to phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java
similarity index 99%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java
index 089a299..710f372 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java
@@ -17,6 +17,12 @@
*/
package org.apache.phoenix.hive;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.Properties;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -38,12 +44,6 @@
import org.apache.phoenix.schema.MetaDataClient;
import org.apache.phoenix.util.QueryUtil;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.Properties;
-
public class PhoenixRecordUpdater implements RecordUpdater {
private static final Log LOG = LogFactory.getLog(PhoenixRecordUpdater.class);
@@ -333,4 +333,8 @@
return stats;
}
+ public long getBufferedRowCount() {
+ return numRecords;
+ }
+
}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRow.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRowKey.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixRowKey.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixRowKey.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixRowKey.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java
similarity index 97%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java
index 3aace9c..27e9052 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java
@@ -30,6 +30,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Writable;
+import org.apache.phoenix.compat.CompatUtil;
+import org.apache.phoenix.compat.HiveCompatUtil;
import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
import org.apache.phoenix.hive.mapreduce.PhoenixResultWritable;
import org.apache.phoenix.hive.util.PhoenixConnectionUtil;
@@ -167,6 +169,8 @@
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue().trim();
+ } else if (CompatUtil.isPhoenix5()){
+ value = HiveCompatUtil.getDateOrTimestampValue(value);
}
pResultWritable.add(value);
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PrimaryKeyData.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PrimaryKeyData.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/PrimaryKeyData.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/PrimaryKeyData.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
similarity index 97%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index eac55d4..38e01fd 100644
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -34,7 +34,6 @@
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.mapreduce.RegionSizeCalculator;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
@@ -64,6 +63,7 @@
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
import org.apache.phoenix.query.KeyRange;
import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.compat.CompatUtil;
/**
* Custom InputFormat to feed into Hive
@@ -139,16 +139,13 @@
try (org.apache.hadoop.hbase.client.Connection connection = ConnectionFactory.createConnection(PhoenixConnectionUtil.getConfiguration(jobConf))) {
RegionLocator regionLocator = connection.getRegionLocator(TableName.valueOf(qplan
.getTableRef().getTable().getPhysicalName().toString()));
- RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(regionLocator, connection
- .getAdmin());
for (List<Scan> scans : qplan.getScans()) {
PhoenixInputSplit inputSplit;
HRegionLocation location = regionLocator.getRegionLocation(scans.get(0).getStartRow()
, false);
- long regionSize = sizeCalculator.getRegionSize(location.getRegionInfo().getRegionName
- ());
+ long regionSize = CompatUtil.getSize(regionLocator, connection.getAdmin(), location);
String regionLocation = PhoenixStorageHandlerUtil.getRegionLocation(location, LOG);
if (splitByStats) {
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixOutputFormat.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixOutputFormat.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixOutputFormat.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixOutputFormat.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java
similarity index 99%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java
index fcced90..eba10d7 100644
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java
@@ -353,7 +353,6 @@
return stats;
}
- @Override
public long getBufferedRowCount() {
return numRecords;
}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBinaryObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBinaryObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBinaryObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBinaryObjectInspector.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBooleanObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBooleanObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBooleanObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBooleanObjectInspector.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixByteObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixByteObjectInspector.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixByteObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixByteObjectInspector.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixCharObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixCharObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixCharObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixCharObjectInspector.java
diff --git a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTezIT.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java
similarity index 69%
rename from phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java
index 2931f93..ef33a46 100644
--- a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDateObjectInspector.java
@@ -15,15 +15,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+package org.apache.phoenix.hive.objectinspector;
-package org.apache.phoenix.hive;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
-import org.junit.BeforeClass;
+/**
+ * ObjectInspector for date type
+ */
-public class HiveTezIT extends HivePhoenixStoreIT {
+public class PhoenixDateObjectInspector extends PhoenixDateObjectInspectorBase
+ implements DateObjectInspector {
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- setup(HiveTestUtil.MiniClusterType.tez);
+ public PhoenixDateObjectInspector() {
+ super();
}
}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDecimalObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDecimalObjectInspector.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDecimalObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDecimalObjectInspector.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDoubleObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDoubleObjectInspector.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDoubleObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDoubleObjectInspector.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixFloatObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixFloatObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixFloatObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixFloatObjectInspector.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixIntObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixIntObjectInspector.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixIntObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixIntObjectInspector.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixLongObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixLongObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixLongObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixLongObjectInspector.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixObjectInspectorFactory.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixObjectInspectorFactory.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixObjectInspectorFactory.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixObjectInspectorFactory.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixShortObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixShortObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixShortObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixShortObjectInspector.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixStringObjectInspector.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixStringObjectInspector.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixStringObjectInspector.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixStringObjectInspector.java
diff --git a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTezIT.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java
similarity index 67%
copy from phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
copy to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java
index 2931f93..a19ec8e 100644
--- a/phoenix-hive3/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixTimestampObjectInspector.java
@@ -15,15 +15,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+package org.apache.phoenix.hive.objectinspector;
-package org.apache.phoenix.hive;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
-import org.junit.BeforeClass;
+/**
+ * ObjectInspector for timestamp type
+ */
+public class PhoenixTimestampObjectInspector extends PhoenixTimestampObjectInspectorBase
+ implements TimestampObjectInspector {
-public class HiveTezIT extends HivePhoenixStoreIT {
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- setup(HiveTestUtil.MiniClusterType.tez);
- }
+ public PhoenixTimestampObjectInspector() { super(); }
}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ppd/PhoenixPredicateDecomposer.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ppd/PhoenixPredicateDecomposer.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/ppd/PhoenixPredicateDecomposer.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ppd/PhoenixPredicateDecomposer.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java
similarity index 94%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java
index ebd978c..e100925 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java
@@ -20,6 +20,7 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.phoenix.compat.HiveCompatUtil;
/**
* IndexSearchCondition represents an individual search condition found by
@@ -30,13 +31,13 @@
private ExprNodeColumnDesc columnDesc;
private String comparisonOp;
private ExprNodeConstantDesc constantDesc;
- private ExprNodeGenericFuncDesc comparisonExpr;
+ protected ExprNodeGenericFuncDesc comparisonExpr;
private String[] fields;
// Support (Not) Between/(Not) In Operator
private ExprNodeConstantDesc[] multiConstants;
- private boolean isNot;
+ protected boolean isNot;
public IndexSearchCondition(ExprNodeColumnDesc columnDesc, String comparisonOp,
ExprNodeConstantDesc[] multiConstants, ExprNodeGenericFuncDesc
@@ -128,10 +129,6 @@
this.comparisonExpr = comparisonExpr;
}
- public ExprNodeGenericFuncDesc getComparisonExpr() {
- return comparisonExpr;
- }
-
public String[] getFields() {
return fields;
}
@@ -140,4 +137,9 @@
public String toString() {
return comparisonExpr.getExprString();
}
+
+ public ExprNodeGenericFuncDesc getComparisonExpr()
+ {
+ return HiveCompatUtil.getComparisonExpr(comparisonExpr, isNot);
+ }
}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/PredicateAnalyzerFactory.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ql/index/PredicateAnalyzerFactory.java
similarity index 100%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/PredicateAnalyzerFactory.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/ql/index/PredicateAnalyzerFactory.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/PhoenixConnectionUtil.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/PhoenixConnectionUtil.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/PhoenixConnectionUtil.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/PhoenixConnectionUtil.java
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
similarity index 91%
rename from phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
index 176a2ec..d7af62f 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+++ b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
@@ -20,18 +20,27 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.lang.reflect.Array;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
import javax.naming.NamingException;
import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.util.Strings;
@@ -45,6 +54,8 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.net.DNS;
+import org.apache.phoenix.compat.CompatUtil;
+import org.apache.phoenix.compat.HiveCompatUtil;
import org.apache.phoenix.hive.PrimaryKeyData;
import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
@@ -54,7 +65,10 @@
* Misc utils for PhoenixStorageHandler
*/
-public class PhoenixStorageHandlerUtil {
+public abstract class PhoenixStorageHandlerUtil {
+ protected static final Log LOG = LogFactory.getLog(PhoenixStorageHandlerUtil.class);
+ protected static final AtomicReference<Method> GET_BUCKET_METHOD_REF = new AtomicReference<>();
+ protected static final AtomicReference<Method> GET_BUCKET_ID_METHOD_REF = new AtomicReference<>();
public static String getTargetTableName(Table table) {
Map<String, String> tableParameterMap = table.getParameters();
@@ -262,23 +276,6 @@
}
public static String getOptionsValue(Options options) {
- StringBuilder content = new StringBuilder();
-
- int bucket = options.getBucket();
- String inspectorInfo = options.getInspector().getCategory() + ":" + options.getInspector()
- .getTypeName();
- long maxTxnId = options.getMaximumTransactionId();
- long minTxnId = options.getMinimumTransactionId();
- int recordIdColumn = options.getRecordIdColumn();
- boolean isCompresses = options.isCompressed();
- boolean isWritingBase = options.isWritingBase();
-
- content.append("bucket : ").append(bucket).append(", inspectorInfo : ").append
- (inspectorInfo).append(", minTxnId : ").append(minTxnId).append(", maxTxnId : ")
- .append(maxTxnId).append(", recordIdColumn : ").append(recordIdColumn);
- content.append(", isCompressed : ").append(isCompresses).append(", isWritingBase : ")
- .append(isWritingBase);
-
- return content.toString();
+ return HiveCompatUtil.getOptionsValue(options, GET_BUCKET_METHOD_REF, GET_BUCKET_ID_METHOD_REF, LOG);
}
}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/TypeInfoUtils.java b/phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/TypeInfoUtils.java
similarity index 100%
rename from phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/TypeInfoUtils.java
rename to phoenix-hive-base/src/main/java/org/apache/phoenix/hive/util/TypeInfoUtils.java
diff --git a/phoenix-hive/src/test/java/org/apache/phoenix/hive/PrimaryKeyDataTest.java b/phoenix-hive-base/src/test/java/org/apache/phoenix/hive/PrimaryKeyDataTest.java
similarity index 100%
rename from phoenix-hive/src/test/java/org/apache/phoenix/hive/PrimaryKeyDataTest.java
rename to phoenix-hive-base/src/test/java/org/apache/phoenix/hive/PrimaryKeyDataTest.java
diff --git a/phoenix-hive3/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java b/phoenix-hive-base/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
similarity index 100%
rename from phoenix-hive3/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
rename to phoenix-hive-base/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
diff --git a/phoenix-hive3/src/test/resources/hbase-site.xml b/phoenix-hive-base/src/test/resources/hbase-site.xml
similarity index 100%
rename from phoenix-hive3/src/test/resources/hbase-site.xml
rename to phoenix-hive-base/src/test/resources/hbase-site.xml
diff --git a/phoenix-hive3/src/test/resources/hive-site.xml b/phoenix-hive-base/src/test/resources/hive-site.xml
similarity index 100%
rename from phoenix-hive3/src/test/resources/hive-site.xml
rename to phoenix-hive-base/src/test/resources/hive-site.xml
diff --git a/phoenix-hive3/src/test/resources/log4j.properties b/phoenix-hive-base/src/test/resources/log4j.properties
similarity index 100%
rename from phoenix-hive3/src/test/resources/log4j.properties
rename to phoenix-hive-base/src/test/resources/log4j.properties
diff --git a/phoenix-hive3/src/test/resources/tez-site.xml b/phoenix-hive-base/src/test/resources/tez-site.xml
similarity index 100%
rename from phoenix-hive3/src/test/resources/tez-site.xml
rename to phoenix-hive-base/src/test/resources/tez-site.xml
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
deleted file mode 100644
index 7bccd76..0000000
--- a/phoenix-hive/pom.xml
+++ /dev/null
@@ -1,210 +0,0 @@
-<?xml version='1.0'?>
-<!--
-
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied. See the License for the
- specific language governing permissions and limitations
- under the License.
-
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-connectors</artifactId>
- <version>6.0.0-SNAPSHOT</version>
- </parent>
- <artifactId>phoenix-hive</artifactId>
- <name>Phoenix Hive Connector for Phoenix 4</name>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-core</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-cli</artifactId>
- <version>${hive.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-exec</artifactId>
- <version>${hive.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>${slf4j.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>${slf4j.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>${log4j.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-core</artifactId>
- </dependency>
-
- <!-- Test dependencies -->
- <dependency>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-core</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hbase</groupId>
- <artifactId>hbase-testing-util</artifactId>
- <scope>test</scope>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.apache.hbase</groupId>
- <artifactId>hbase-it</artifactId>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-auth</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-common</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-minicluster</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.tez</groupId>
- <artifactId>tez-tests</artifactId>
- <scope>test</scope>
- <version>0.8.4</version>
- <type>test-jar</type>
- <exclusions>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.tez</groupId>
- <artifactId>tez-dag</artifactId>
- <scope>test</scope>
- <version>0.8.4</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- <version>${mockito-all.version}</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-failsafe-plugin</artifactId>
- </plugin>
- <plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>${maven-dependency-plugin.version}</version>
- <executions>
- <execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- </configuration>
- <executions>
- <execution>
- <id>make-jar-with-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- <configuration>
- <appendAssemblyId>false</appendAssemblyId>
- <finalName>phoenix-${project.version}-hive</finalName>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
deleted file mode 100644
index df77fcc..0000000
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.MiniHBaseCluster;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
-import org.apache.phoenix.jdbc.PhoenixDriver;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.util.PhoenixRuntime;
-import org.apache.phoenix.util.PropertiesUtil;
-import org.apache.phoenix.util.TestUtil;
-import org.junit.AfterClass;
-
-import java.io.File;
-import java.io.IOException;
-import java.sql.*;
-import java.util.Properties;
-
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Base class for all Hive Phoenix integration tests that may be run with Tez or MR mini cluster
- */
-public class BaseHivePhoenixStoreIT extends BaseHBaseManagedTimeIT {
-
- private static final Log LOG = LogFactory.getLog(BaseHivePhoenixStoreIT.class);
- protected static HBaseTestingUtility hbaseTestUtil;
- protected static MiniHBaseCluster hbaseCluster;
- private static String zkQuorum;
- protected static Connection conn;
- private static Configuration conf;
- protected static HiveTestUtil qt;
- protected static String hiveOutputDir;
- protected static String hiveLogDir;
-
- public static void setup(HiveTestUtil.MiniClusterType clusterType)throws Exception {
- String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
- if (null != hadoopConfDir && !hadoopConfDir.isEmpty()) {
- LOG.warn("WARNING: HADOOP_CONF_DIR is set in the environment which may cause "
- + "issues with test execution via MiniDFSCluster");
- }
- hbaseTestUtil = new HBaseTestingUtility();
- conf = hbaseTestUtil.getConfiguration();
- setUpConfigForMiniCluster(conf);
- conf.set(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
- hiveOutputDir = new Path(hbaseTestUtil.getDataTestDir(), "hive_output").toString();
- File outputDir = new File(hiveOutputDir);
- outputDir.mkdirs();
- hiveLogDir = new Path(hbaseTestUtil.getDataTestDir(), "hive_log").toString();
- File logDir = new File(hiveLogDir);
- logDir.mkdirs();
- // Setup Hive mini Server
- Path testRoot = hbaseTestUtil.getDataTestDir();
- System.setProperty("test.tmp.dir", testRoot.toString());
- System.setProperty("test.warehouse.dir", (new Path(testRoot, "warehouse")).toString());
-
- try {
- qt = new HiveTestUtil(hiveOutputDir, hiveLogDir, clusterType, null);
- } catch (Exception e) {
- LOG.error("Unexpected exception in setup", e);
- fail("Unexpected exception in setup");
- }
-
- //Start HBase cluster
- hbaseCluster = hbaseTestUtil.startMiniCluster(1);
- MiniDFSCluster x = hbaseTestUtil.getDFSCluster();
- Class.forName(PhoenixDriver.class.getName());
- zkQuorum = "localhost:" + hbaseTestUtil.getZkCluster().getClientPort();
- Properties props = PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
- props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
- conn = DriverManager.getConnection(PhoenixRuntime.JDBC_PROTOCOL +
- PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + zkQuorum, props);
- // Setup Hive Output Folder
-
- Statement stmt = conn.createStatement();
- stmt.execute("create table t(a integer primary key,b varchar)");
- }
-
- protected void runTest(String fname, String fpath) throws Exception {
- long startTime = System.currentTimeMillis();
- try {
- LOG.info("Begin query: " + fname);
- qt.addFile(fpath);
-
- if (qt.shouldBeSkipped(fname)) {
- LOG.info("Test " + fname + " skipped");
- return;
- }
-
- qt.cliInit(fname);
- qt.clearTestSideEffects();
- int ecode = qt.executeClient(fname);
- if (ecode != 0) {
- qt.failed(ecode, fname, null);
- return;
- }
-
- ecode = qt.checkCliDriverResults(fname);
- if (ecode != 0) {
- qt.failedDiff(ecode, fname, null);
- }
- qt.clearPostTestEffects();
-
- } catch (Throwable e) {
- qt.failed(e, fname, null);
- }
-
- long elapsedTime = System.currentTimeMillis() - startTime;
- LOG.info("Done query: " + fname + " elapsedTime=" + elapsedTime / 1000 + "s");
- assertTrue("Test passed", true);
- }
-
- protected void createFile(String content, String fullName) throws IOException {
- FileUtils.write(new File(fullName), content);
- }
-
- @AfterClass
- public static void tearDownAfterClass() throws Exception {
- if (qt != null) {
- try {
- qt.shutdown();
- } catch (Exception e) {
- LOG.error("Unexpected exception in setup", e);
- fail("Unexpected exception in tearDown");
- }
- }
- try {
- conn.close();
- } finally {
- try {
- PhoenixDriver.INSTANCE.close();
- } finally {
- try {
- DriverManager.deregisterDriver(PhoenixDriver.INSTANCE);
- } finally {
- hbaseTestUtil.shutdownMiniCluster();
- }
- }
- }
- }
-}
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
deleted file mode 100644
index a60f24c..0000000
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.phoenix.hive;
-
-import static org.junit.Assert.fail;
-
-import org.junit.BeforeClass;
-
-public class HiveMapReduceIT extends HivePhoenixStoreIT {
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- final String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
- if (hadoopConfDir != null && hadoopConfDir.length() != 0) {
- fail("HADOOP_CONF_DIR is non-empty in the current shell environment which will very likely cause this test to fail.");
- }
- setup(HiveTestUtil.MiniClusterType.mr);
- }
-}
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
deleted file mode 100644
index 6ce409b..0000000
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.phoenix.util.StringUtil;
-import org.junit.Ignore;
-import org.junit.Test;
-
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-
-import static org.junit.Assert.assertTrue;
-
-/**
- * Test methods only. All supporting methods should be placed to BaseHivePhoenixStoreIT
- */
-@Ignore("This class contains only test methods and should not be executed directly")
-public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
-
- /**
- * Create a table with two column, insert 1 row, check that phoenix table is created and
- * the row is there
- *
- * @throws Exception
- */
- @Test
- public void simpleTest() throws Exception {
- String testName = "simpleTest";
- hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
- createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
- createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
- StringBuilder sb = new StringBuilder();
- sb.append("CREATE TABLE phoenix_table(ID STRING, SALARY STRING)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF + " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.table.name'='phoenix_table'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id');");
- sb.append("INSERT INTO TABLE phoenix_table" + HiveTestUtil.CRLF +
- "VALUES ('10', '1000');" + HiveTestUtil.CRLF);
- String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
- createFile(sb.toString(), fullPath);
- runTest(testName, fullPath);
-
- String phoenixQuery = "SELECT * FROM phoenix_table";
- PreparedStatement statement = conn.prepareStatement(phoenixQuery);
- ResultSet rs = statement.executeQuery();
- assert (rs.getMetaData().getColumnCount() == 2);
- assertTrue(rs.next());
- assert (rs.getString(1).equals("10"));
- assert (rs.getString(2).equals("1000"));
- }
-
- /**
- * Create hive table with custom column mapping
- * @throws Exception
- */
-
- @Test
- public void simpleColumnMapTest() throws Exception {
- String testName = "cmTest";
- hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
- createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
- createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
- StringBuilder sb = new StringBuilder();
- sb.append("CREATE TABLE column_table(ID STRING, P1 STRING, p2 STRING)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF + " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.table.name'='column_table'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.column.mapping' = 'id:C1, p1:c2, p2:C3'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id');");
- sb.append("INSERT INTO TABLE column_table" + HiveTestUtil.CRLF +
- "VALUES ('1', '2', '3');" + HiveTestUtil.CRLF);
- String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
- createFile(sb.toString(), fullPath);
- runTest(testName, fullPath);
-
- String phoenixQuery = "SELECT C1, \"c2\", C3 FROM column_table";
- PreparedStatement statement = conn.prepareStatement(phoenixQuery);
- ResultSet rs = statement.executeQuery();
- assert (rs.getMetaData().getColumnCount() == 3);
- assertTrue(rs.next());
- assert (rs.getString(1).equals("1"));
- assert (rs.getString(2).equals("2"));
- assert (rs.getString(3).equals("3"));
-
- }
-
-
- /**
- * Datatype Test
- *
- * @throws Exception
- */
- @Test
- public void dataTypeTest() throws Exception {
- String testName = "dataTypeTest";
- hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
- createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
- createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
- StringBuilder sb = new StringBuilder();
- sb.append("CREATE TABLE phoenix_datatype(ID int, description STRING, ts TIMESTAMP, db " +
- "DOUBLE,fl FLOAT, us INT)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF + " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.hbase.table.name'='phoenix_datatype'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id');");
- sb.append("INSERT INTO TABLE phoenix_datatype" + HiveTestUtil.CRLF +
- "VALUES (10, \"foodesc\", \"2013-01-05 01:01:01\", 200,2.0,-1);" + HiveTestUtil.CRLF);
- String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
- createFile(sb.toString(), fullPath);
- runTest(testName, fullPath);
-
- String phoenixQuery = "SELECT * FROM phoenix_datatype";
- PreparedStatement statement = conn.prepareStatement(phoenixQuery);
- ResultSet rs = statement.executeQuery();
- assert (rs.getMetaData().getColumnCount() == 6);
- while (rs.next()) {
- assert (rs.getInt(1) == 10);
- assert (rs.getString(2).equalsIgnoreCase("foodesc"));
- assert (rs.getDouble(4) == 200);
- assert (rs.getFloat(5) == 2.0);
- assert (rs.getInt(6) == -1);
- }
- }
-
- /**
- * Datatype Test
- *
- * @throws Exception
- */
- @Test
- public void MultiKey() throws Exception {
- String testName = "MultiKey";
- hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
- createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
- createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
- StringBuilder sb = new StringBuilder();
- sb.append("CREATE TABLE phoenix_MultiKey(ID int, ID2 String,description STRING," +
- "db DOUBLE,fl FLOAT, us INT)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF +
- " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.hbase.table.name'='phoenix_MultiKey'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
- sb.append("INSERT INTO TABLE phoenix_MultiKey VALUES (10, \"part2\",\"foodesc\",200,2.0,-1);" +
- HiveTestUtil.CRLF);
- String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
- createFile(sb.toString(), fullPath);
- runTest(testName, fullPath);
-
- String phoenixQuery = "SELECT * FROM phoenix_MultiKey";
- PreparedStatement statement = conn.prepareStatement(phoenixQuery);
- ResultSet rs = statement.executeQuery();
- assert (rs.getMetaData().getColumnCount() == 6);
- while (rs.next()) {
- assert (rs.getInt(1) == 10);
- assert (rs.getString(2).equalsIgnoreCase("part2"));
- assert (rs.getString(3).equalsIgnoreCase("foodesc"));
- assert (rs.getDouble(4) == 200);
- assert (rs.getFloat(5) == 2.0);
- assert (rs.getInt(6) == -1);
- }
- }
-
- /**
- * Test that hive is able to access Phoenix data during MR job (creating two tables and perform join on it)
- *
- * @throws Exception
- */
- @Test
- public void testJoinNoColumnMaps() throws Exception {
- String testName = "testJoin";
- hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
- createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
- createFile("10\tpart2\tfoodesc\t200.0\t2.0\t-1\t10\tpart2\tfoodesc\t200.0\t2.0\t-1\n",
- new Path(hiveOutputDir, testName + ".out").toString());
- StringBuilder sb = new StringBuilder();
- sb.append("CREATE TABLE joinTable1(ID int, ID2 String,description STRING," +
- "db DOUBLE,fl FLOAT, us INT)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF +
- " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.hbase.table.name'='joinTable1'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
- sb.append("CREATE TABLE joinTable2(ID int, ID2 String,description STRING," +
- "db DOUBLE,fl FLOAT, us INT)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF +
- " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.hbase.table.name'='joinTable2'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
-
- sb.append("INSERT INTO TABLE joinTable1 VALUES (5, \"part2\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
- sb.append("INSERT INTO TABLE joinTable1 VALUES (10, \"part2\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
-
- sb.append("INSERT INTO TABLE joinTable2 VALUES (5, \"part2\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
- sb.append("INSERT INTO TABLE joinTable2 VALUES (10, \"part2\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
-
- sb.append("SELECT * from joinTable1 A join joinTable2 B on A.ID = B.ID WHERE A.ID=10;" +
- HiveTestUtil.CRLF);
-
- String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
- createFile(sb.toString(), fullPath);
- runTest(testName, fullPath);
- }
-
- /**
- * Test that hive is able to access Phoenix data during MR job (creating two tables and perform join on it)
- *
- * @throws Exception
- */
- @Test
- public void testJoinColumnMaps() throws Exception {
- String testName = "testJoin";
- hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
- createFile("10\t200.0\tpart2\n", new Path(hiveOutputDir, testName + ".out").toString());
- createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
-
- StringBuilder sb = new StringBuilder();
- sb.append("CREATE TABLE joinTable3(ID int, ID2 String,description STRING," +
- "db DOUBLE,fl FLOAT, us INT)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF +
- " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.hbase.table.name'='joinTable3'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.column.mapping' = 'id:i1, id2:I2'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
- sb.append("CREATE TABLE joinTable4(ID int, ID2 String,description STRING," +
- "db DOUBLE,fl FLOAT, us INT)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF +
- " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.hbase.table.name'='joinTable4'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.column.mapping' = 'id:i1, id2:I2'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
-
- sb.append("INSERT INTO TABLE joinTable3 VALUES (5, \"part1\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
- sb.append("INSERT INTO TABLE joinTable3 VALUES (10, \"part1\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
-
- sb.append("INSERT INTO TABLE joinTable4 VALUES (5, \"part2\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
- sb.append("INSERT INTO TABLE joinTable4 VALUES (10, \"part2\",\"foodesc\",200,2.0,-1);" + HiveTestUtil.CRLF);
-
- sb.append("SELECT A.ID, a.db, B.ID2 from joinTable3 A join joinTable4 B on A.ID = B.ID WHERE A.ID=10;" +
- HiveTestUtil.CRLF);
-
- String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
- createFile(sb.toString(), fullPath);
- runTest(testName, fullPath);
- //Test that Phoenix has correctly mapped columns. We are checking both, primary key and
- // regular columns mapped and not mapped
- String phoenixQuery = "SELECT \"i1\", \"I2\", \"db\" FROM joinTable3 where \"i1\" = 10 AND \"I2\" = 'part1' AND \"db\" = 200";
- PreparedStatement statement = conn.prepareStatement(phoenixQuery);
- ResultSet rs = statement.executeQuery();
- assert (rs.getMetaData().getColumnCount() == 3);
- while (rs.next()) {
- assert (rs.getInt(1) == 10);
- assert (rs.getString(2).equalsIgnoreCase("part1"));
- assert (rs.getDouble(3) == 200);
- }
- }
-
- @Test
- @Ignore("This test fails. We need to fix this test later")
- public void testTimestampPredicate() throws Exception {
- String testName = "testTimeStampPredicate";
- hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
- createFile("10\t2013-01-02 01:01:01.123456\n", new Path(hiveOutputDir, testName + ".out").toString());
- createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
-
- StringBuilder sb = new StringBuilder();
- sb.append("CREATE TABLE timeStampTable(ID int,ts TIMESTAMP)" + HiveTestUtil.CRLF +
- " STORED BY \"org.apache.phoenix.hive.PhoenixStorageHandler\"" + HiveTestUtil
- .CRLF +
- " TBLPROPERTIES(" + HiveTestUtil.CRLF +
- " 'phoenix.hbase.table.name'='TIMESTAMPTABLE'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
- " 'phoenix.zookeeper.client.port'='" +
- hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
- " 'phoenix.column.mapping' = 'id:ID, ts:TS'," + HiveTestUtil.CRLF +
- " 'phoenix.rowkeys'='id');" + HiveTestUtil.CRLF);
- sb.append("INSERT INTO TABLE timeStampTable VALUES (10, \"2013-01-02 01:01:01.123456\");" + HiveTestUtil.CRLF);
- sb.append("SELECT * from timeStampTable WHERE ts between '2013-01-02 01:01:01.123455' and " +
- " '2013-01-02 12:01:02.123457789' AND id = 10;" + HiveTestUtil.CRLF);
-
- String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
- createFile(sb.toString(), fullPath);
- runTest(testName, fullPath);
- }
-}
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
deleted file mode 100644
index 6f646ac..0000000
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+++ /dev/null
@@ -1,1270 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import junit.framework.Assert;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
-import org.apache.hadoop.hive.cli.CliDriver;
-import org.apache.hadoop.hive.cli.CliSessionState;
-import org.apache.hadoop.hive.common.io.CachingPrintStream;
-import org.apache.hadoop.hive.common.io.DigestPrintStream;
-import org.apache.hadoop.hive.common.io.SortAndDigestPrintStream;
-import org.apache.hadoop.hive.common.io.SortPrintStream;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.shims.HadoopShims;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.util.Shell;
-import org.apache.hive.common.util.StreamPrinter;
-import org.apache.tools.ant.BuildException;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.Watcher;
-import org.apache.zookeeper.ZooKeeper;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.PrintStream;
-import java.io.StringWriter;
-import java.net.URL;
-import java.util.*;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * HiveTestUtil cloned from Hive QTestUtil. Can be outdated and may require update once a problem
- * found.
- */
-public class HiveTestUtil {
-
- public static final String UTF_8 = "UTF-8";
- private static final Log LOG = LogFactory.getLog("HiveTestUtil");
- private static final String QTEST_LEAVE_FILES = "QTEST_LEAVE_FILES";
- public static final String DEFAULT_DATABASE_NAME = "default";
-
- private String testWarehouse;
- private final String testFiles;
- protected final String outDir;
- protected final String logDir;
- private final TreeMap<String, String> qMap;
- private final Set<String> qSkipSet;
- private final Set<String> qSortSet;
- private final Set<String> qSortQuerySet;
- private final Set<String> qHashQuerySet;
- private final Set<String> qSortNHashQuerySet;
- private final Set<String> qJavaVersionSpecificOutput;
- private static final String SORT_SUFFIX = ".sorted";
- private static MiniClusterType clusterType = MiniClusterType.none;
- private ParseDriver pd;
- protected Hive db;
- protected HiveConf conf;
- private BaseSemanticAnalyzer sem;
- protected final boolean overWrite;
- private CliDriver cliDriver;
- private HadoopShims.MiniMrShim mr = null;
- private HadoopShims.MiniDFSShim dfs = null;
- private String hadoopVer = null;
- private HiveTestSetup setup = null;
- private boolean isSessionStateStarted = false;
- private static final String javaVersion = getJavaVersion();
-
- private String initScript = "";
- private String cleanupScript = "";
-
- public HiveConf getConf() {
- return conf;
- }
-
- public boolean deleteDirectory(File path) {
- if (path.exists()) {
- File[] files = path.listFiles();
- for (File file : files) {
- if (file.isDirectory()) {
- deleteDirectory(file);
- } else {
- file.delete();
- }
- }
- }
- return (path.delete());
- }
-
- public void copyDirectoryToLocal(Path src, Path dest) throws Exception {
-
- FileSystem srcFs = src.getFileSystem(conf);
- FileSystem destFs = dest.getFileSystem(conf);
- if (srcFs.exists(src)) {
- FileStatus[] files = srcFs.listStatus(src);
- for (FileStatus file : files) {
- String name = file.getPath().getName();
- Path dfs_path = file.getPath();
- Path local_path = new Path(dest, name);
-
- if (file.isDir()) {
- if (!destFs.exists(local_path)) {
- destFs.mkdirs(local_path);
- }
- copyDirectoryToLocal(dfs_path, local_path);
- } else {
- srcFs.copyToLocalFile(dfs_path, local_path);
- }
- }
- }
- }
-
- static Pattern mapTok = Pattern.compile("(\\.?)(.*)_map_(.*)");
- static Pattern reduceTok = Pattern.compile("(.*)(reduce_[^\\.]*)((\\..*)?)");
-
- public void normalizeNames(File path) throws Exception {
- if (path.isDirectory()) {
- File[] files = path.listFiles();
- for (File file : files) {
- normalizeNames(file);
- }
- } else {
- Matcher m = reduceTok.matcher(path.getName());
- if (m.matches()) {
- String name = m.group(1) + "reduce" + m.group(3);
- path.renameTo(new File(path.getParent(), name));
- } else {
- m = mapTok.matcher(path.getName());
- if (m.matches()) {
- String name = m.group(1) + "map_" + m.group(3);
- path.renameTo(new File(path.getParent(), name));
- }
- }
- }
- }
-
- public String getOutputDirectory() {
- return outDir;
- }
-
- public String getLogDirectory() {
- return logDir;
- }
-
- private String getHadoopMainVersion(String input) {
- if (input == null) {
- return null;
- }
- Pattern p = Pattern.compile("^(\\d+\\.\\d+).*");
- Matcher m = p.matcher(input);
- if (m.matches()) {
- return m.group(1);
- }
- return null;
- }
-
- public void initConf() throws Exception {
- // Plug verifying metastore in for testing.
- conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
- "org.apache.hadoop.hive.metastore.VerifyingObjectStore");
-
- if (mr != null) {
- assert dfs != null;
-
- mr.setupConfiguration(conf);
-
- // set fs.default.name to the uri of mini-dfs
- String dfsUriString = WindowsPathUtil.getHdfsUriString(dfs.getFileSystem().getUri()
- .toString());
- conf.setVar(HiveConf.ConfVars.HADOOPFS, dfsUriString);
- // hive.metastore.warehouse.dir needs to be set relative to the mini-dfs
- conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
- (new Path(dfsUriString,
- "/build/ql/test/data/warehouse/")).toString());
- }
-
- // Windows paths should be converted after MiniMrShim.setupConfiguration()
- // since setupConfiguration may overwrite configuration values.
- if (Shell.WINDOWS) {
- WindowsPathUtil.convertPathsFromWindowsToHdfs(conf);
- }
- }
-
- public enum MiniClusterType {
- mr,
- tez,
- none;
-
- public static MiniClusterType valueForString(String type) {
- if (type.equals("miniMR")) {
- return mr;
- } else if (type.equals("tez")) {
- return tez;
- } else {
- return none;
- }
- }
- }
-
- public HiveTestUtil(String outDir, String logDir, MiniClusterType clusterType, String hadoopVer)
- throws Exception {
- this(outDir, logDir, clusterType, null, hadoopVer);
- }
-
- public HiveTestUtil(String outDir, String logDir, MiniClusterType clusterType, String confDir,
- String hadoopVer)
- throws Exception {
- this.outDir = outDir;
- this.logDir = logDir;
- if (confDir != null && !confDir.isEmpty()) {
- HiveConf.setHiveSiteLocation(new URL("file://" + new File(confDir).toURI().getPath()
- + "/hive-site.xml"));
- LOG.info("Setting hive-site: " + HiveConf.getHiveSiteLocation());
- }
- conf = new HiveConf();
- String tmpBaseDir = System.getProperty("test.tmp.dir");
- if (tmpBaseDir == null || tmpBaseDir == "") {
- tmpBaseDir = System.getProperty("java.io.tmpdir");
- }
- String metaStoreURL = "jdbc:derby:" + tmpBaseDir + File.separator + "metastore_dbtest;" +
- "create=true";
- conf.set(ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
- System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
-
- //set where derby logs
- File derbyLogFile = new File(tmpBaseDir + "/derby.log");
- derbyLogFile.createNewFile();
- System.setProperty("derby.stream.error.file", derbyLogFile.getPath());
-
- this.hadoopVer = getHadoopMainVersion(hadoopVer);
- qMap = new TreeMap<String, String>();
- qSkipSet = new HashSet<String>();
- qSortSet = new HashSet<String>();
- qSortQuerySet = new HashSet<String>();
- qHashQuerySet = new HashSet<String>();
- qSortNHashQuerySet = new HashSet<String>();
- qJavaVersionSpecificOutput = new HashSet<String>();
- this.clusterType = clusterType;
-
- // Using randomUUID for dfs cluster
- System.setProperty("test.build.data", "target/test-data/hive-" + UUID.randomUUID().toString
- ());
-
- HadoopShims shims = ShimLoader.getHadoopShims();
- int numberOfDataNodes = 1;
-
- if (clusterType != MiniClusterType.none) {
- dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null);
- FileSystem fs = dfs.getFileSystem();
- String uriString = WindowsPathUtil.getHdfsUriString(fs.getUri().toString());
- if (clusterType == MiniClusterType.tez) {
- conf.set("hive.execution.engine", "tez");
- mr = shims.getMiniTezCluster(conf, 1, uriString, 1);
- } else {
- conf.set("hive.execution.engine", "mr");
- mr = shims.getMiniMrCluster(conf, 1, uriString, 1);
-
- }
- }
-
- initConf();
-
- // Use the current directory if it is not specified
- String dataDir = conf.get("test.data.files");
- if (dataDir == null) {
- dataDir = new File(".").getAbsolutePath() + "/data/files";
- }
-
- testFiles = dataDir;
-
- // Use the current directory if it is not specified
- String scriptsDir = conf.get("test.data.scripts");
- if (scriptsDir == null) {
- scriptsDir = new File(".").getAbsolutePath() + "/data/scripts";
- }
- if (!initScript.isEmpty()) {
- this.initScript = scriptsDir + "/" + initScript;
- }
- if (!cleanupScript.isEmpty()) {
- this.cleanupScript = scriptsDir + "/" + cleanupScript;
- }
-
- overWrite = "true".equalsIgnoreCase(System.getProperty("test.output.overwrite"));
-
- setup = new HiveTestSetup();
- setup.preTest(conf);
- init();
- }
-
- public void shutdown() throws Exception {
- cleanUp();
- setup.tearDown();
- if (mr != null) {
- mr.shutdown();
- mr = null;
- }
- FileSystem.closeAll();
- if (dfs != null) {
- dfs.shutdown();
- dfs = null;
- }
- }
-
- public String readEntireFileIntoString(File queryFile) throws IOException {
- InputStreamReader isr = new InputStreamReader(
- new BufferedInputStream(new FileInputStream(queryFile)), HiveTestUtil.UTF_8);
- StringWriter sw = new StringWriter();
- try {
- IOUtils.copy(isr, sw);
- } finally {
- if (isr != null) {
- isr.close();
- }
- }
- return sw.toString();
- }
-
- public void addFile(String queryFile) throws IOException {
- addFile(queryFile, false);
- }
-
- public void addFile(String queryFile, boolean partial) throws IOException {
- addFile(new File(queryFile));
- }
-
- public void addFile(File qf) throws IOException {
- addFile(qf, false);
- }
-
- public void addFile(File qf, boolean partial) throws IOException {
- String query = readEntireFileIntoString(qf);
- qMap.put(qf.getName(), query);
- if (partial) return;
-
- if (matches(SORT_BEFORE_DIFF, query)) {
- qSortSet.add(qf.getName());
- } else if (matches(SORT_QUERY_RESULTS, query)) {
- qSortQuerySet.add(qf.getName());
- } else if (matches(HASH_QUERY_RESULTS, query)) {
- qHashQuerySet.add(qf.getName());
- } else if (matches(SORT_AND_HASH_QUERY_RESULTS, query)) {
- qSortNHashQuerySet.add(qf.getName());
- }
- }
-
- private static final Pattern SORT_BEFORE_DIFF = Pattern.compile("-- SORT_BEFORE_DIFF");
- private static final Pattern SORT_QUERY_RESULTS = Pattern.compile("-- SORT_QUERY_RESULTS");
- private static final Pattern HASH_QUERY_RESULTS = Pattern.compile("-- HASH_QUERY_RESULTS");
- private static final Pattern SORT_AND_HASH_QUERY_RESULTS = Pattern.compile("-- " +
- "SORT_AND_HASH_QUERY_RESULTS");
-
- private boolean matches(Pattern pattern, String query) {
- Matcher matcher = pattern.matcher(query);
- if (matcher.find()) {
- return true;
- }
- return false;
- }
-
- /**
- * Get formatted Java version to include minor version, but
- * exclude patch level.
- *
- * @return Java version formatted as major_version.minor_version
- */
- private static String getJavaVersion() {
- String version = System.getProperty("java.version");
- if (version == null) {
- throw new NullPointerException("No java version could be determined " +
- "from system properties");
- }
-
- // "java version" system property is formatted
- // major_version.minor_version.patch_level.
- // Find second dot, instead of last dot, to be safe
- int pos = version.indexOf('.');
- pos = version.indexOf('.', pos + 1);
- return version.substring(0, pos);
- }
-
- /**
- * Clear out any side effects of running tests
- */
- public void clearPostTestEffects() throws Exception {
- setup.postTest(conf);
- }
-
- /**
- * Clear out any side effects of running tests
- */
- public void clearTablesCreatedDuringTests() throws Exception {
- if (System.getenv(QTEST_LEAVE_FILES) != null) {
- return;
- }
-
- // Delete any tables other than the source tables
- // and any databases other than the default database.
- for (String dbName : db.getAllDatabases()) {
- SessionState.get().setCurrentDatabase(dbName);
- for (String tblName : db.getAllTables()) {
- if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
- Table tblObj = db.getTable(tblName);
- // dropping index table can not be dropped directly. Dropping the base
- // table will automatically drop all its index table
- if (tblObj.isIndexTable()) {
- continue;
- }
- db.dropTable(dbName, tblName);
- } else {
- // this table is defined in srcTables, drop all indexes on it
- List<Index> indexes = db.getIndexes(dbName, tblName, (short) -1);
- if (indexes != null && indexes.size() > 0) {
- for (Index index : indexes) {
- db.dropIndex(dbName, tblName, index.getIndexName(), true, true);
- }
- }
- }
- }
- if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
- // Drop cascade, may need to drop functions
- db.dropDatabase(dbName, true, true, true);
- }
- }
-
- // delete remaining directories for external tables (can affect stats for following tests)
- try {
- Path p = new Path(testWarehouse);
- FileSystem fileSystem = p.getFileSystem(conf);
- if (fileSystem.exists(p)) {
- for (FileStatus status : fileSystem.listStatus(p)) {
- if (status.isDir()) {
- fileSystem.delete(status.getPath(), true);
- }
- }
- }
- } catch (IllegalArgumentException e) {
- // ignore.. provides invalid url sometimes intentionally
- }
- SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME);
-
- List<String> roleNames = db.getAllRoleNames();
- for (String roleName : roleNames) {
- if (!"PUBLIC".equalsIgnoreCase(roleName) && !"ADMIN".equalsIgnoreCase(roleName)) {
- db.dropRole(roleName);
- }
- }
- }
-
- /**
- * Clear out any side effects of running tests
- */
- public void clearTestSideEffects() throws Exception {
- if (System.getenv(QTEST_LEAVE_FILES) != null) {
- return;
- }
-
- clearTablesCreatedDuringTests();
- }
-
- public void cleanUp() throws Exception {
- if (!isSessionStateStarted) {
- startSessionState();
- }
- if (System.getenv(QTEST_LEAVE_FILES) != null) {
- return;
- }
-
- clearTablesCreatedDuringTests();
-
- SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", true);
-
- if (cleanupScript != "") {
- String cleanupCommands = readEntireFileIntoString(new File(cleanupScript));
- LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands);
- if (cliDriver == null) {
- cliDriver = new CliDriver();
- }
- cliDriver.processLine(cleanupCommands);
- }
-
- SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", false);
-
- // delete any contents in the warehouse dir
- Path p = new Path(testWarehouse);
- FileSystem fs = p.getFileSystem(conf);
-
- try {
- FileStatus[] ls = fs.listStatus(p);
- for (int i = 0; (ls != null) && (i < ls.length); i++) {
- fs.delete(ls[i].getPath(), true);
- }
- } catch (FileNotFoundException e) {
- // Best effort
- }
-
- FunctionRegistry.unregisterTemporaryUDF("test_udaf");
- FunctionRegistry.unregisterTemporaryUDF("test_error");
- }
-
- public void createSources() throws Exception {
- if (!isSessionStateStarted) {
- startSessionState();
- }
- conf.setBoolean("hive.test.init.phase", true);
-
- if (cliDriver == null) {
- cliDriver = new CliDriver();
- }
- cliDriver.processLine("set test.data.dir=" + testFiles + ";");
-
- conf.setBoolean("hive.test.init.phase", false);
- }
-
- public void init() throws Exception {
- testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
- conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, false);
- String execEngine = conf.get("hive.execution.engine");
- conf.set("hive.execution.engine", "mr");
- SessionState.start(conf);
- conf.set("hive.execution.engine", execEngine);
- db = Hive.get(conf);
- pd = new ParseDriver();
- sem = new SemanticAnalyzer(conf);
- }
-
- public void init(String tname) throws Exception {
- cleanUp();
- createSources();
- cliDriver.processCmd("set hive.cli.print.header=true;");
- }
-
- public void cliInit(String tname) throws Exception {
- cliInit(tname, true);
- }
-
- public String cliInit(String tname, boolean recreate) throws Exception {
- if (recreate) {
- cleanUp();
- createSources();
- }
-
- HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
- "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
- Utilities.clearWorkMap();
- CliSessionState ss = new CliSessionState(conf);
- assert ss != null;
- ss.in = System.in;
-
- String outFileExtension = getOutFileExtension(tname);
- String stdoutName = null;
- if (outDir != null) {
- File qf = new File(outDir, tname);
- stdoutName = qf.getName().concat(outFileExtension);
- } else {
- stdoutName = tname + outFileExtension;
- }
-
- File outf = new File(logDir, stdoutName);
- OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
- if (qSortQuerySet.contains(tname)) {
- ss.out = new SortPrintStream(fo, "UTF-8");
- } else if (qHashQuerySet.contains(tname)) {
- ss.out = new DigestPrintStream(fo, "UTF-8");
- } else if (qSortNHashQuerySet.contains(tname)) {
- ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
- } else {
- ss.out = new PrintStream(fo, true, "UTF-8");
- }
- ss.err = new CachingPrintStream(fo, true, "UTF-8");
- ss.setIsSilent(true);
- SessionState oldSs = SessionState.get();
-
- if (oldSs != null && clusterType == MiniClusterType.tez) {
- oldSs.close();
- }
-
- if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
- oldSs.out.close();
- }
- SessionState.start(ss);
-
- cliDriver = new CliDriver();
- cliDriver.processInitFiles(ss);
-
- return outf.getAbsolutePath();
- }
-
- private CliSessionState startSessionState()
- throws IOException {
-
- HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
- "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
-
- String execEngine = conf.get("hive.execution.engine");
- conf.set("hive.execution.engine", "mr");
- CliSessionState ss = new CliSessionState(conf);
- assert ss != null;
- ss.in = System.in;
- ss.out = System.out;
- ss.err = System.out;
-
- SessionState oldSs = SessionState.get();
- if (oldSs != null && clusterType == MiniClusterType.tez) {
- oldSs.close();
- }
- if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
- oldSs.out.close();
- }
- SessionState.start(ss);
-
- isSessionStateStarted = true;
-
- conf.set("hive.execution.engine", execEngine);
- return ss;
- }
-
- public int executeOne(String tname) {
- String q = qMap.get(tname);
-
- if (q.indexOf(";") == -1) {
- return -1;
- }
-
- String q1 = q.substring(0, q.indexOf(";") + 1);
- String qrest = q.substring(q.indexOf(";") + 1);
- qMap.put(tname, qrest);
-
- LOG.info("Executing " + q1);
- return cliDriver.processLine(q1);
- }
-
- public static final String CRLF = System.getProperty("line.separator");
-
- public int executeClient(String tname1, String tname2) {
- String commands = getCommands(tname1) + CRLF + getCommands(tname2);
- return cliDriver.processLine(commands);
- }
-
- public int executeClient(String tname) {
- conf.set("mapreduce.job.name", "test");
- return cliDriver.processLine(getCommands(tname), false);
- }
-
- private String getCommands(String tname) {
- String commands = qMap.get(tname);
- StringBuilder newCommands = new StringBuilder(commands.length());
- int lastMatchEnd = 0;
- Matcher commentMatcher = Pattern.compile("^--.*$", Pattern.MULTILINE).matcher(commands);
- while (commentMatcher.find()) {
- newCommands.append(commands.substring(lastMatchEnd, commentMatcher.start()));
- newCommands.append(commentMatcher.group().replaceAll("(?<!\\\\);", "\\\\;"));
- lastMatchEnd = commentMatcher.end();
- }
- newCommands.append(commands.substring(lastMatchEnd, commands.length()));
- commands = newCommands.toString();
- return commands;
- }
-
- public boolean shouldBeSkipped(String tname) {
- return qSkipSet.contains(tname);
- }
-
- private String getOutFileExtension(String fname) {
- String outFileExtension = ".out";
- if (qJavaVersionSpecificOutput.contains(fname)) {
- outFileExtension = ".java" + javaVersion + ".out";
- }
-
- return outFileExtension;
- }
-
- /**
- * Given the current configurations (e.g., hadoop version and execution mode), return
- * the correct file name to compare with the current test run output.
- *
- * @param outDir The directory where the reference log files are stored.
- * @param testName The test file name (terminated by ".out").
- * @return The file name appended with the configuration values if it exists.
- */
- public String outPath(String outDir, String testName) {
- String ret = (new File(outDir, testName)).getPath();
- // List of configurations. Currently the list consists of hadoop version and execution
- // mode only
- List<String> configs = new ArrayList<String>();
- configs.add(this.hadoopVer);
-
- Deque<String> stack = new LinkedList<String>();
- StringBuilder sb = new StringBuilder();
- sb.append(testName);
- stack.push(sb.toString());
-
- // example file names are input1.q.out_0.20.0_minimr or input2.q.out_0.17
- for (String s : configs) {
- sb.append('_');
- sb.append(s);
- stack.push(sb.toString());
- }
- while (stack.size() > 0) {
- String fileName = stack.pop();
- File f = new File(outDir, fileName);
- if (f.exists()) {
- ret = f.getPath();
- break;
- }
- }
- return ret;
- }
-
- private Pattern[] toPattern(String[] patternStrs) {
- Pattern[] patterns = new Pattern[patternStrs.length];
- for (int i = 0; i < patternStrs.length; i++) {
- patterns[i] = Pattern.compile(patternStrs[i]);
- }
- return patterns;
- }
-
- private void maskPatterns(Pattern[] patterns, String fname) throws Exception {
- String maskPattern = "#### A masked pattern was here ####";
-
- String line;
- BufferedReader in;
- BufferedWriter out;
-
- File file = new File(fname);
- File fileOrig = new File(fname + ".orig");
- FileUtils.copyFile(file, fileOrig);
-
- in = new BufferedReader(new InputStreamReader(new FileInputStream(fileOrig), "UTF-8"));
- out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"));
-
- boolean lastWasMasked = false;
- while (null != (line = in.readLine())) {
- for (Pattern pattern : patterns) {
- line = pattern.matcher(line).replaceAll(maskPattern);
- }
-
- if (line.equals(maskPattern)) {
- // We're folding multiple masked lines into one.
- if (!lastWasMasked) {
- out.write(line);
- out.write("\n");
- lastWasMasked = true;
- }
- } else {
- out.write(line);
- out.write("\n");
- lastWasMasked = false;
- }
- }
-
- in.close();
- out.close();
- }
-
- private final Pattern[] planMask = toPattern(new String[]{
- ".*file:.*",
- ".*pfile:.*",
- ".*hdfs:.*",
- ".*/tmp/.*",
- ".*invalidscheme:.*",
- ".*lastUpdateTime.*",
- ".*lastAccessTime.*",
- ".*lastModifiedTime.*",
- ".*[Oo]wner.*",
- ".*CreateTime.*",
- ".*LastAccessTime.*",
- ".*Location.*",
- ".*LOCATION '.*",
- ".*transient_lastDdlTime.*",
- ".*last_modified_.*",
- ".*at org.*",
- ".*at sun.*",
- ".*at java.*",
- ".*at junit.*",
- ".*Caused by:.*",
- ".*LOCK_QUERYID:.*",
- ".*LOCK_TIME:.*",
- ".*grantTime.*",
- ".*[.][.][.] [0-9]* more.*",
- ".*job_[0-9_]*.*",
- ".*job_local[0-9_]*.*",
- ".*USING 'java -cp.*",
- "^Deleted.*",
- ".*DagName:.*",
- ".*Input:.*/data/files/.*",
- ".*Output:.*/data/files/.*",
- ".*total number of created files now is.*"
- });
-
- public int checkCliDriverResults(String tname) throws Exception {
- assert (qMap.containsKey(tname));
-
- String outFileExtension = getOutFileExtension(tname);
- String outFileName = outPath(outDir, tname + outFileExtension);
-
- File f = new File(logDir, tname + outFileExtension);
-
- maskPatterns(planMask, f.getPath());
- int exitVal = executeDiffCommand(f.getPath(),
- outFileName, false,
- qSortSet.contains(tname));
-
- if (exitVal != 0 && overWrite) {
- exitVal = overwriteResults(f.getPath(), outFileName);
- }
-
- return exitVal;
- }
-
-
- public int checkCompareCliDriverResults(String tname, List<String> outputs) throws Exception {
- assert outputs.size() > 1;
- maskPatterns(planMask, outputs.get(0));
- for (int i = 1; i < outputs.size(); ++i) {
- maskPatterns(planMask, outputs.get(i));
- int ecode = executeDiffCommand(
- outputs.get(i - 1), outputs.get(i), false, qSortSet.contains(tname));
- if (ecode != 0) {
- LOG.info("Files don't match: " + outputs.get(i - 1) + " and " + outputs.get(i));
- return ecode;
- }
- }
- return 0;
- }
-
- private static int overwriteResults(String inFileName, String outFileName) throws Exception {
- // This method can be replaced with Files.copy(source, target, REPLACE_EXISTING)
- // once Hive uses JAVA 7.
- LOG.info("Overwriting results " + inFileName + " to " + outFileName);
- return executeCmd(new String[]{
- "cp",
- getQuotedString(inFileName),
- getQuotedString(outFileName)
- });
- }
-
- private static int executeDiffCommand(String inFileName,
- String outFileName,
- boolean ignoreWhiteSpace,
- boolean sortResults
- ) throws Exception {
-
- int result = 0;
-
- if (sortResults) {
- // sort will try to open the output file in write mode on windows. We need to
- // close it first.
- SessionState ss = SessionState.get();
- if (ss != null && ss.out != null && ss.out != System.out) {
- ss.out.close();
- }
-
- String inSorted = inFileName + SORT_SUFFIX;
- String outSorted = outFileName + SORT_SUFFIX;
-
- result = sortFiles(inFileName, inSorted);
- result |= sortFiles(outFileName, outSorted);
- if (result != 0) {
- LOG.error("ERROR: Could not sort files before comparing");
- return result;
- }
- inFileName = inSorted;
- outFileName = outSorted;
- }
-
- ArrayList<String> diffCommandArgs = new ArrayList<String>();
- diffCommandArgs.add("diff");
-
- // Text file comparison
- diffCommandArgs.add("-a");
-
- // Ignore changes in the amount of white space
- if (ignoreWhiteSpace || Shell.WINDOWS) {
- diffCommandArgs.add("-b");
- }
-
- // Files created on Windows machines have different line endings
- // than files created on Unix/Linux. Windows uses carriage return and line feed
- // ("\r\n") as a line ending, whereas Unix uses just line feed ("\n").
- // Also StringBuilder.toString(), Stream to String conversions adds extra
- // spaces at the end of the line.
- if (Shell.WINDOWS) {
- diffCommandArgs.add("--strip-trailing-cr"); // Strip trailing carriage return on input
- diffCommandArgs.add("-B"); // Ignore changes whose lines are all blank
- }
- // Add files to compare to the arguments list
- diffCommandArgs.add(getQuotedString(inFileName));
- diffCommandArgs.add(getQuotedString(outFileName));
-
- result = executeCmd(diffCommandArgs);
-
- if (sortResults) {
- new File(inFileName).delete();
- new File(outFileName).delete();
- }
-
- return result;
- }
-
- private static int sortFiles(String in, String out) throws Exception {
- return executeCmd(new String[]{
- "sort",
- getQuotedString(in),
- }, out, null);
- }
-
- private static int executeCmd(Collection<String> args) throws Exception {
- return executeCmd(args, null, null);
- }
-
- private static int executeCmd(String[] args) throws Exception {
- return executeCmd(args, null, null);
- }
-
- private static int executeCmd(Collection<String> args, String outFile, String errFile) throws
- Exception {
- String[] cmdArray = args.toArray(new String[args.size()]);
- return executeCmd(cmdArray, outFile, errFile);
- }
-
- private static int executeCmd(String[] args, String outFile, String errFile) throws Exception {
- LOG.info("Running: " + org.apache.commons.lang.StringUtils.join(args, ' '));
-
- PrintStream out = outFile == null ?
- SessionState.getConsole().getChildOutStream() :
- new PrintStream(new FileOutputStream(outFile), true);
- PrintStream err = errFile == null ?
- SessionState.getConsole().getChildErrStream() :
- new PrintStream(new FileOutputStream(errFile), true);
-
- Process executor = Runtime.getRuntime().exec(args);
-
- StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, err);
- StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, out);
-
- outPrinter.start();
- errPrinter.start();
-
- int result = executor.waitFor();
-
- outPrinter.join();
- errPrinter.join();
-
- if (outFile != null) {
- out.close();
- }
-
- if (errFile != null) {
- err.close();
- }
-
- return result;
- }
-
- private static String getQuotedString(String str) {
- return Shell.WINDOWS ? String.format("\"%s\"", str) : str;
- }
-
- public ASTNode parseQuery(String tname) throws Exception {
- return pd.parse(qMap.get(tname));
- }
-
- public void resetParser() throws SemanticException {
- pd = new ParseDriver();
- sem = new SemanticAnalyzer(conf);
- }
-
- public TreeMap<String, String> getQMap() {
- return qMap;
- }
-
- /**
- * HiveTestSetup defines test fixtures which are reused across testcases,
- * and are needed before any test can be run
- */
- public static class HiveTestSetup {
- private MiniZooKeeperCluster zooKeeperCluster = null;
- private int zkPort;
- private ZooKeeper zooKeeper;
-
- public HiveTestSetup() {
- }
-
- public void preTest(HiveConf conf) throws Exception {
-
- if (zooKeeperCluster == null) {
- //create temp dir
- String tmpBaseDir = System.getProperty("test.tmp.dir");
- File tmpDir = Utilities.createTempDir(tmpBaseDir);
-
- zooKeeperCluster = new MiniZooKeeperCluster();
- zkPort = zooKeeperCluster.startup(tmpDir);
- }
-
- if (zooKeeper != null) {
- zooKeeper.close();
- }
-
- int sessionTimeout = (int) conf.getTimeVar(HiveConf.ConfVars
- .HIVE_ZOOKEEPER_SESSION_TIMEOUT, TimeUnit.MILLISECONDS);
- zooKeeper = new ZooKeeper("localhost:" + zkPort, sessionTimeout, new Watcher() {
- @Override
- public void process(WatchedEvent arg0) {
- }
- });
-
- String zkServer = "localhost";
- conf.set("hive.zookeeper.quorum", zkServer);
- conf.set("hive.zookeeper.client.port", "" + zkPort);
- }
-
- public void postTest(HiveConf conf) throws Exception {
- if (zooKeeperCluster == null) {
- return;
- }
-
- if (zooKeeper != null) {
- zooKeeper.close();
- }
-
- ZooKeeperHiveLockManager.releaseAllLocks(conf);
- }
-
- public void tearDown() throws Exception {
- if (zooKeeperCluster != null) {
- zooKeeperCluster.shutdown();
- zooKeeperCluster = null;
- }
- }
- }
-
- /**
- * QTRunner: Runnable class for running a a single query file.
- **/
- public static class HiveTestRunner implements Runnable {
- private final HiveTestUtil qt;
- private final String fname;
-
- public HiveTestRunner(HiveTestUtil qt, String fname) {
- this.qt = qt;
- this.fname = fname;
- }
-
- @Override
- public void run() {
- try {
- // assumption is that environment has already been cleaned once globally
- // hence each thread does not call cleanUp() and createSources() again
- qt.cliInit(fname, false);
- qt.executeClient(fname);
- } catch (Throwable e) {
- LOG.error("Query file " + fname + " failed with exception ", e);
- e.printStackTrace();
- outputTestFailureHelpMessage();
- }
- }
- }
-
- /**
- * Executes a set of query files in sequence.
- *
- * @param qfiles array of input query files containing arbitrary number of hive
- * queries
- * @param qt array of HiveTestUtils, one per qfile
- * @return true if all queries passed, false otw
- */
- public static boolean queryListRunnerSingleThreaded(File[] qfiles, HiveTestUtil[] qt)
- throws Exception {
- boolean failed = false;
- qt[0].cleanUp();
- qt[0].createSources();
- for (int i = 0; i < qfiles.length && !failed; i++) {
- qt[i].clearTestSideEffects();
- qt[i].cliInit(qfiles[i].getName(), false);
- qt[i].executeClient(qfiles[i].getName());
- int ecode = qt[i].checkCliDriverResults(qfiles[i].getName());
- if (ecode != 0) {
- failed = true;
- LOG.error("Test " + qfiles[i].getName()
- + " results check failed with error code " + ecode);
- outputTestFailureHelpMessage();
- }
- qt[i].clearPostTestEffects();
- }
- return (!failed);
- }
-
- public static void outputTestFailureHelpMessage() {
- LOG.error("See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, "
- + "or check ./ql/target/surefire-reports or " +
- "./itests/qtest/target/surefire-reports/ for specific test cases logs.");
- }
-
- public static String ensurePathEndsInSlash(String path) {
- if (path == null) {
- throw new NullPointerException("Path cannot be null");
- }
- if (path.endsWith(File.separator)) {
- return path;
- } else {
- return path + File.separator;
- }
- }
-
- private static String[] cachedQvFileList = null;
- private static List<String> cachedDefaultQvFileList = null;
- private static Pattern qvSuffix = Pattern.compile("_[0-9]+.qv$", Pattern.CASE_INSENSITIVE);
-
- public static List<String> getVersionFiles(String queryDir, String tname) {
- ensureQvFileList(queryDir);
- List<String> result = getVersionFilesInternal(tname);
- if (result == null) {
- result = cachedDefaultQvFileList;
- }
- return result;
- }
-
- private static void ensureQvFileList(String queryDir) {
- if (cachedQvFileList != null) return;
- // Not thread-safe.
- LOG.info("Getting versions from " + queryDir);
- cachedQvFileList = (new File(queryDir)).list(new FilenameFilter() {
- @Override
- public boolean accept(File dir, String name) {
- return name.toLowerCase().endsWith(".qv");
- }
- });
- if (cachedQvFileList == null) return; // no files at all
- Arrays.sort(cachedQvFileList, String.CASE_INSENSITIVE_ORDER);
- List<String> defaults = getVersionFilesInternal("default");
- cachedDefaultQvFileList = (defaults != null)
- ? Collections.unmodifiableList(new ArrayList<String>(defaults)) : Collections.unmodifiableList(new ArrayList<String>());
- }
-
- private static List<String> getVersionFilesInternal(String tname) {
- if (cachedQvFileList == null) {
- return new ArrayList<String>();
- }
- int pos = Arrays.binarySearch(cachedQvFileList, tname, String.CASE_INSENSITIVE_ORDER);
- if (pos >= 0) {
- throw new BuildException("Unexpected file list element: " + cachedQvFileList[pos]);
- }
- List<String> result = null;
- for (pos = (-pos - 1); pos < cachedQvFileList.length; ++pos) {
- String candidate = cachedQvFileList[pos];
- if (candidate.length() <= tname.length()
- || !tname.equalsIgnoreCase(candidate.substring(0, tname.length()))
- || !qvSuffix.matcher(candidate.substring(tname.length())).matches()) {
- break;
- }
- if (result == null) {
- result = new ArrayList<String>();
- }
- result.add(candidate);
- }
- return result;
- }
-
- public void failed(int ecode, String fname, String debugHint) {
- String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null;
- Assert.fail("Client Execution failed with error code = " + ecode +
- (command != null ? " running " + command : "") + (debugHint != null ? debugHint :
- ""));
- }
-
- // for negative tests, which is succeeded.. no need to print the query string
- public void failed(String fname, String debugHint) {
- Assert.fail("Client Execution was expected to fail, but succeeded with error code 0 " +
- (debugHint != null ? debugHint : ""));
- }
-
- public void failedDiff(int ecode, String fname, String debugHint) {
- Assert.fail("Client Execution results failed with error code = " + ecode +
- (debugHint != null ? debugHint : ""));
- }
-
- public void failed(Throwable e, String fname, String debugHint) {
- String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null;
- LOG.error("Exception: ", e);
- e.printStackTrace();
- LOG.error("Failed query: " + fname);
- Assert.fail("Unexpected exception " +
- org.apache.hadoop.util.StringUtils.stringifyException(e) + "\n" +
- (command != null ? " running " + command : "") +
- (debugHint != null ? debugHint : ""));
- }
-
- public static class WindowsPathUtil {
-
- public static void convertPathsFromWindowsToHdfs(HiveConf conf) {
- // Following local paths are used as HDFS paths in unit tests.
- // It works well in Unix as the path notation in Unix and HDFS is more or less same.
- // But when it comes to Windows, drive letter separator ':' & backslash '\" are invalid
- // characters in HDFS so we need to converts these local paths to HDFS paths before
- // using them
- // in unit tests.
-
- String orgWarehouseDir = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
- conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, getHdfsUriString(orgWarehouseDir));
-
- String orgTestTempDir = System.getProperty("test.tmp.dir");
- System.setProperty("test.tmp.dir", getHdfsUriString(orgTestTempDir));
-
- String orgTestWarehouseDir = System.getProperty("test.warehouse.dir");
- System.setProperty("test.warehouse.dir", getHdfsUriString(orgTestWarehouseDir));
-
- String orgScratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR);
- conf.setVar(HiveConf.ConfVars.SCRATCHDIR, getHdfsUriString(orgScratchDir));
- }
-
- public static String getHdfsUriString(String uriStr) {
- assert uriStr != null;
- if (Shell.WINDOWS) {
- // If the URI conversion is from Windows to HDFS then replace the '\' with '/'
- // and remove the windows single drive letter & colon from absolute path.
- return uriStr.replace('\\', '/')
- .replaceFirst("/[c-zC-Z]:", "/")
- .replaceFirst("^[c-zC-Z]:", "");
- }
- return uriStr;
- }
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
deleted file mode 100644
index bac2449..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
+++ /dev/null
@@ -1,231 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.metastore.HiveMetaHook;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.util.PhoenixConnectionUtil;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.hive.util.PhoenixUtil;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.util.*;
-
-import static org.apache.phoenix.hive.util.ColumnMappingUtils.getColumnMappingMap;
-
-/**
- * Implementation for notification methods which are invoked as part of transactions against the
- * hive metastore,allowing Phoenix metadata to be kept in sync with Hive'smetastore.
- */
-public class PhoenixMetaHook implements HiveMetaHook {
-
- private static final Log LOG = LogFactory.getLog(PhoenixMetaHook.class);
-
- @Override
- public void preCreateTable(Table table) throws MetaException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Precreate table : " + table.getTableName());
- }
-
- try (Connection conn = PhoenixConnectionUtil.getConnection(table)) {
- String tableType = table.getTableType();
- String tableName = PhoenixStorageHandlerUtil.getTargetTableName(table);
-
- if (TableType.EXTERNAL_TABLE.name().equals(tableType)) {
- // Check whether phoenix table exists.
- if (!PhoenixUtil.existTable(conn, tableName)) {
- // Error if phoenix table not exist.
- throw new MetaException("Phoenix table " + tableName + " doesn't exist");
- }
- } else if (TableType.MANAGED_TABLE.name().equals(tableType)) {
- // Check whether phoenix table exists.
- if (PhoenixUtil.existTable(conn, tableName)) {
- // Error if phoenix table already exist.
- throw new MetaException("Phoenix table " + tableName + " already exist.");
- }
-
- PhoenixUtil.createTable(conn, createTableStatement(table));
- } else {
- throw new MetaException("Unsupported table Type: " + table.getTableType());
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Phoenix table " + tableName + " was created");
- }
- } catch (SQLException e) {
- throw new MetaException(e.getMessage());
- }
- }
-
- private String createTableStatement(Table table) throws MetaException {
- Map<String, String> tableParameterMap = table.getParameters();
-
- String tableName = PhoenixStorageHandlerUtil.getTargetTableName(table);
- StringBuilder ddl = new StringBuilder("create table ").append(tableName).append(" (\n");
-
- String phoenixRowKeys = tableParameterMap.get(PhoenixStorageHandlerConstants
- .PHOENIX_ROWKEYS);
- StringBuilder realRowKeys = new StringBuilder();
- List<String> phoenixRowKeyList = new ArrayList<>();
- for (String key:phoenixRowKeys.split(PhoenixStorageHandlerConstants.COMMA)) {
- phoenixRowKeyList.add(key.trim());
- }
- Map<String, String> columnMappingMap = getColumnMappingMap(tableParameterMap.get
- (PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING));
-
- List<FieldSchema> fieldSchemaList = table.getSd().getCols();
- for (int i = 0, limit = fieldSchemaList.size(); i < limit; i++) {
- FieldSchema fieldSchema = fieldSchemaList.get(i);
- String fieldName = fieldSchema.getName();
- String fieldType = fieldSchema.getType();
- String columnType = PhoenixUtil.getPhoenixType(fieldType);
-
- String rowKeyName = getRowKeyMapping(fieldName, phoenixRowKeyList);
- if (rowKeyName != null) {
- String columnName = columnMappingMap.get(fieldName);
- if(columnName != null) {
- rowKeyName = columnName;
- }
- // In case of RowKey
- if ("binary".equals(columnType)) {
- // Phoenix must define max length of binary when type definition. Obtaining
- // information from the column mapping. ex) phoenix.rowkeys = "r1, r2(100), ..."
- List<String> tokenList =
- new ArrayList<>();
- for (String name: rowKeyName.split("\\(|\\)")) {
- tokenList.add(name.trim());
- }
- columnType = columnType + "(" + tokenList.get(1) + ")";
- rowKeyName = tokenList.get(0);
- }
-
- ddl.append(" ").append("\"").append(rowKeyName).append("\"").append(" ").append(columnType).append(" not " +
- "null,\n");
- realRowKeys.append("\"").append(rowKeyName).append("\",");
- } else {
- // In case of Column
- String columnName = columnMappingMap.get(fieldName);
-
- if (columnName == null) {
- // Use field definition.
- columnName = fieldName;
- }
-
- if ("binary".equals(columnType)) {
- // Phoenix must define max length of binary when type definition. Obtaining
- // information from the column mapping. ex) phoenix.column.mapping=c1:c1(100)
- List<String> tokenList = new ArrayList<>();
- for(String name: columnName.split("\\(|\\)")){
- tokenList.add(name.trim());
- }
- columnType = columnType + "(" + tokenList.get(1) + ")";
- columnName = tokenList.get(0);
- }
-
- ddl.append(" ").append("\"").append(columnName).append("\"").append(" ").append(columnType).append(",\n");
- }
- }
- ddl.append(" ").append("constraint pk_").append(PhoenixUtil.getTableSchema(tableName.toUpperCase())[1]).append(" primary key(")
- .append(realRowKeys.deleteCharAt(realRowKeys.length() - 1)).append(")\n)\n");
-
- String tableOptions = tableParameterMap.get(PhoenixStorageHandlerConstants
- .PHOENIX_TABLE_OPTIONS);
- if (tableOptions != null) {
- ddl.append(tableOptions);
- }
-
- String statement = ddl.toString();
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("DDL : " + statement);
- }
-
- return statement;
- }
-
- private String getRowKeyMapping(String rowKeyName, List<String> phoenixRowKeyList) {
- String rowKeyMapping = null;
-
- for (String phoenixRowKey : phoenixRowKeyList) {
- if (phoenixRowKey.equals(rowKeyName)) {
- rowKeyMapping = phoenixRowKey;
- break;
- } else if (phoenixRowKey.startsWith(rowKeyName + "(") && phoenixRowKey.endsWith(")")) {
- rowKeyMapping = phoenixRowKey;
- break;
- }
- }
-
- return rowKeyMapping;
- }
-
- @Override
- public void rollbackCreateTable(Table table) throws MetaException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Rollback for table : " + table.getTableName());
- }
-
- dropTableIfExist(table);
- }
-
- @Override
- public void commitCreateTable(Table table) throws MetaException {
-
- }
-
- @Override
- public void preDropTable(Table table) throws MetaException {
- }
-
- @Override
- public void rollbackDropTable(Table table) throws MetaException {
- }
-
- @Override
- public void commitDropTable(Table table, boolean deleteData) throws MetaException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Commit drop table : " + table.getTableName());
- }
-
- dropTableIfExist(table);
- }
-
- private void dropTableIfExist(Table table) throws MetaException {
- try (Connection conn = PhoenixConnectionUtil.getConnection(table)) {
- String tableType = table.getTableType();
- String tableName = PhoenixStorageHandlerUtil.getTargetTableName(table);
-
- if (TableType.MANAGED_TABLE.name().equals(tableType)) {
- // Drop if phoenix table exist.
- if (PhoenixUtil.existTable(conn, tableName)) {
- PhoenixUtil.dropTable(conn, tableName);
- }
- }
- } catch (SQLException e) {
- throw new MetaException(e.getMessage());
- }
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
deleted file mode 100644
index 4e9f465..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.mapred.TableMapReduceUtil;
-import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaHook;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
-import org.apache.hadoop.hive.ql.metadata.InputEstimator;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDe;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputFormat;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.mapreduce.PhoenixInputFormat;
-import org.apache.phoenix.hive.mapreduce.PhoenixOutputFormat;
-import org.apache.phoenix.hive.ppd.PhoenixPredicateDecomposer;
-import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.UUID;
-
-/**
- * This class manages all the Phoenix/Hive table initial configurations and SerDe Election
- */
-@SuppressWarnings("deprecation")
-public class PhoenixStorageHandler extends DefaultStorageHandler implements
- HiveStoragePredicateHandler, InputEstimator {
-
-
- private Configuration jobConf;
- private Configuration hbaseConf;
-
-
- @Override
- public void setConf(Configuration conf) {
- jobConf = conf;
- hbaseConf = HBaseConfiguration.create(conf);
- }
-
- @Override
- public Configuration getConf() {
- return hbaseConf;
- }
-
- private static final Log LOG = LogFactory.getLog(PhoenixStorageHandler.class);
-
- public PhoenixStorageHandler() {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PhoenixStorageHandler created");
- }
- }
-
- @Override
- public HiveMetaHook getMetaHook() {
- return new PhoenixMetaHook();
- }
-
- @Override
- public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
- try {
- TableMapReduceUtil.addDependencyJars(jobConf);
- org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(jobConf,
- PhoenixStorageHandler.class);
- JobConf hbaseJobConf = new JobConf(getConf());
- org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf);
- ShimLoader.getHadoopShims().mergeCredentials(jobConf, hbaseJobConf);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
-
-
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public Class<? extends OutputFormat> getOutputFormatClass() {
- return PhoenixOutputFormat.class;
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public Class<? extends InputFormat> getInputFormatClass() {
- return PhoenixInputFormat.class;
- }
-
- @Override
- public void configureInputJobProperties(TableDesc tableDesc, Map<String, String>
- jobProperties) {
- configureJobProperties(tableDesc, jobProperties);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Configuring input job for table : " + tableDesc.getTableName());
- }
-
- // initialization efficiency. Inform to SerDe about in/out work.
- tableDesc.getProperties().setProperty(PhoenixStorageHandlerConstants.IN_OUT_WORK,
- PhoenixStorageHandlerConstants.IN_WORK);
- }
-
- @Override
- public void configureOutputJobProperties(TableDesc tableDesc, Map<String, String>
- jobProperties) {
- configureJobProperties(tableDesc, jobProperties);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Configuring output job for table : " + tableDesc.getTableName());
- }
-
- // initialization efficiency. Inform to SerDe about in/out work.
- tableDesc.getProperties().setProperty(PhoenixStorageHandlerConstants.IN_OUT_WORK,
- PhoenixStorageHandlerConstants.OUT_WORK);
- }
-
- @Override
- public void configureTableJobProperties(TableDesc tableDesc, Map<String, String>
- jobProperties) {
- configureJobProperties(tableDesc, jobProperties);
- }
-
- @SuppressWarnings({"unchecked", "rawtypes"})
- protected void configureJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
- Properties tableProperties = tableDesc.getProperties();
-
- String inputFormatClassName =
- tableProperties.getProperty(PhoenixStorageHandlerConstants
- .HBASE_INPUT_FORMAT_CLASS);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug(PhoenixStorageHandlerConstants.HBASE_INPUT_FORMAT_CLASS + " is " +
- inputFormatClassName);
- }
-
- Class<?> inputFormatClass;
- try {
- if (inputFormatClassName != null) {
- inputFormatClass = JavaUtils.loadClass(inputFormatClassName);
- } else {
- inputFormatClass = PhoenixInputFormat.class;
- }
- } catch (Exception e) {
- LOG.error(e.getMessage(), e);
- throw new RuntimeException(e);
- }
-
- if (inputFormatClass != null) {
- tableDesc.setInputFileFormatClass((Class<? extends InputFormat>) inputFormatClass);
- }
-
- String tableName = tableProperties.getProperty(PhoenixStorageHandlerConstants
- .PHOENIX_TABLE_NAME);
- if (tableName == null) {
- tableName = tableDesc.getTableName();
- tableProperties.setProperty(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME,
- tableName);
- }
- SessionState sessionState = SessionState.get();
-
- String sessionId;
- if(sessionState!= null) {
- sessionId = sessionState.getSessionId();
- } else {
- sessionId = UUID.randomUUID().toString();
- }
- jobProperties.put(PhoenixConfigurationUtil.SESSION_ID, sessionId);
- jobProperties.put(PhoenixConfigurationUtil.INPUT_TABLE_NAME, tableName);
- jobProperties.put(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM, tableProperties
- .getProperty(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM,
- PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_QUORUM));
- jobProperties.put(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT, tableProperties
- .getProperty(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT, String.valueOf
- (PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_PORT)));
- jobProperties.put(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT, tableProperties
- .getProperty(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT,
- PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_PARENT));
- String columnMapping = tableProperties
- .getProperty(PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING);
- if(columnMapping != null) {
- jobProperties.put(PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING, columnMapping);
- }
-
- jobProperties.put(hive_metastoreConstants.META_TABLE_STORAGE, this.getClass().getName());
-
- // set configuration when direct work with HBase.
- jobProperties.put(HConstants.ZOOKEEPER_QUORUM, jobProperties.get
- (PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM));
- jobProperties.put(HConstants.ZOOKEEPER_CLIENT_PORT, jobProperties.get
- (PhoenixStorageHandlerConstants.ZOOKEEPER_PORT));
- jobProperties.put(HConstants.ZOOKEEPER_ZNODE_PARENT, jobProperties.get
- (PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT));
- addHBaseResources(jobConf, jobProperties);
- }
-
- /**
- * Utility method to add hbase-default.xml and hbase-site.xml properties to a new map
- * if they are not already present in the jobConf.
- * @param jobConf Job configuration
- * @param newJobProperties Map to which new properties should be added
- */
- private void addHBaseResources(Configuration jobConf,
- Map<String, String> newJobProperties) {
- Configuration conf = new Configuration(false);
- HBaseConfiguration.addHbaseResources(conf);
- for (Map.Entry<String, String> entry : conf) {
- if (jobConf.get(entry.getKey()) == null) {
- newJobProperties.put(entry.getKey(), entry.getValue());
- }
- }
- }
-
- @Override
- public Class<? extends SerDe> getSerDeClass() {
- return PhoenixSerDe.class;
- }
-
- @Override
- public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer,
- ExprNodeDesc predicate) {
- PhoenixSerDe phoenixSerDe = (PhoenixSerDe) deserializer;
- List<String> columnNameList = phoenixSerDe.getSerdeParams().getColumnNames();
-
- return PhoenixPredicateDecomposer.create(columnNameList).decomposePredicate(predicate);
- }
-
- @Override
- public Estimation estimate(JobConf job, TableScanOperator ts, long remaining) throws
- HiveException {
- String hiveTableName = ts.getConf().getTableMetadata().getTableName();
- int reducerCount = job.getInt(hiveTableName + PhoenixStorageHandlerConstants
- .PHOENIX_REDUCER_NUMBER, 1);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Estimating input size for table: " + hiveTableName + " with reducer count " +
- reducerCount + ". Remaining : " + remaining);
- }
-
- long bytesPerReducer = job.getLong(HiveConf.ConfVars.BYTESPERREDUCER.varname,
- Long.parseLong(HiveConf.ConfVars.BYTESPERREDUCER.getDefaultValue()));
- long totalLength = reducerCount * bytesPerReducer;
-
- return new Estimation(0, totalLength);
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
deleted file mode 100644
index 2c3c602..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.constants;
-
-
-/**
- * Constants using for Hive Storage Handler implementation
- */
-public class PhoenixStorageHandlerConstants {
-
- public static final String HBASE_INPUT_FORMAT_CLASS = "phoenix.input.format.class";
-
- public static final String PHOENIX_TABLE_NAME = "phoenix.table.name";
-
- public static final String ZOOKEEPER_QUORUM = "phoenix.zookeeper.quorum";
- public static final String ZOOKEEPER_PORT = "phoenix.zookeeper.client.port";
- public static final String ZOOKEEPER_PARENT = "phoenix.zookeeper.znode.parent";
- public static final String DEFAULT_ZOOKEEPER_QUORUM = "localhost";
- public static final int DEFAULT_ZOOKEEPER_PORT = 2181;
- public static final String DEFAULT_ZOOKEEPER_PARENT = "/hbase";
-
- public static final String PHOENIX_ROWKEYS = "phoenix.rowkeys";
- public static final String PHOENIX_COLUMN_MAPPING = "phoenix.column.mapping";
- public static final String PHOENIX_TABLE_OPTIONS = "phoenix.table.options";
-
- public static final String PHOENIX_TABLE_QUERY_HINT = ".query.hint";
- public static final String PHOENIX_REDUCER_NUMBER = ".reducer.count";
- public static final String DISABLE_WAL = ".disable.wal";
- public static final String BATCH_MODE = "batch.mode";
- public static final String AUTO_FLUSH = ".auto.flush";
-
- public static final String COLON = ":";
- public static final String COMMA = ",";
- public static final String EMPTY_STRING = "";
- public static final String EQUAL = "=";
- public static final String IS = "is";
- public static final String QUESTION = "?";
-
- public static final String SPLIT_BY_STATS = "split.by.stats";
- public static final String HBASE_SCAN_CACHE = "hbase.scan.cache";
- public static final String HBASE_SCAN_CACHEBLOCKS = "hbase.scan.cacheblock";
- public static final String HBASE_DATE_FORMAT = "hbase.date.format";
- public static final String HBASE_TIMESTAMP_FORMAT = "hbase.timestamp.format";
- public static final String DEFAULT_DATE_FORMAT = "yyyy-MM-dd";
- public static final String DEFAULT_TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
-
- public static final String IN_OUT_WORK = "in.out.work";
- public static final String IN_WORK = "input";
- public static final String OUT_WORK = "output";
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
deleted file mode 100644
index 7584e0d..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.mapreduce;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.RegionSizeCalculator;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.TableScanDesc;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapred.InputFormat;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.db.DBWritable;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.phoenix.compile.QueryPlan;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.ppd.PhoenixPredicateDecomposer;
-import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
-import org.apache.phoenix.hive.query.PhoenixQueryBuilder;
-import org.apache.phoenix.hive.util.PhoenixConnectionUtil;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.iterate.MapReduceParallelScanGrouper;
-import org.apache.phoenix.jdbc.PhoenixStatement;
-import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
-import org.apache.phoenix.query.KeyRange;
-import org.apache.phoenix.util.PhoenixRuntime;
-
-/**
- * Custom InputFormat to feed into Hive
- */
-@SuppressWarnings({"deprecation", "rawtypes"})
-public class PhoenixInputFormat<T extends DBWritable> implements InputFormat<WritableComparable,
- T> {
-
- private static final Log LOG = LogFactory.getLog(PhoenixInputFormat.class);
-
- public PhoenixInputFormat() {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PhoenixInputFormat created");
- }
- }
-
- @Override
- public InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException {
- String tableName = jobConf.get(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME);
-
- String query;
- String executionEngine = jobConf.get(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname,
- HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.getDefaultValue());
- if (LOG.isDebugEnabled()) {
- LOG.debug("Target table name at split phase : " + tableName + "with whereCondition :" +
- jobConf.get(TableScanDesc.FILTER_TEXT_CONF_STR) +
- " and " + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname + " : " +
- executionEngine);
- }
-
- List<IndexSearchCondition> conditionList = null;
- String filterExprSerialized = jobConf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
- if (filterExprSerialized != null) {
- ExprNodeGenericFuncDesc filterExpr =
- Utilities.deserializeExpression(filterExprSerialized);
- PhoenixPredicateDecomposer predicateDecomposer =
- PhoenixPredicateDecomposer
- .create(Arrays.asList(jobConf.get(serdeConstants.LIST_COLUMNS).split(",")));
- predicateDecomposer.decomposePredicate(filterExpr);
- if (predicateDecomposer.isCalledPPD()) {
- conditionList = predicateDecomposer.getSearchConditionList();
- }
- }
-
- query = PhoenixQueryBuilder.getInstance().buildQuery(jobConf, tableName,
- PhoenixStorageHandlerUtil.getReadColumnNames(jobConf), conditionList);
-
- final QueryPlan queryPlan = getQueryPlan(jobConf, query);
- final List<KeyRange> allSplits = queryPlan.getSplits();
- final List<InputSplit> splits = generateSplits(jobConf, queryPlan, allSplits, query);
-
- return splits.toArray(new InputSplit[splits.size()]);
- }
-
- private List<InputSplit> generateSplits(final JobConf jobConf, final QueryPlan qplan,
- final List<KeyRange> splits, String query) throws
- IOException {
- if (qplan == null){
- throw new NullPointerException();
- }if (splits == null){
- throw new NullPointerException();
- }
- final List<InputSplit> psplits = new ArrayList<>(splits.size());
-
- Path[] tablePaths = FileInputFormat.getInputPaths(ShimLoader.getHadoopShims()
- .newJobContext(new Job(jobConf)));
- boolean splitByStats = jobConf.getBoolean(PhoenixStorageHandlerConstants.SPLIT_BY_STATS,
- false);
-
- setScanCacheSize(jobConf);
-
- // Adding Localization
- try (HConnection connection = HConnectionManager.createConnection(PhoenixConnectionUtil.getConfiguration(jobConf))) {
- RegionLocator regionLocator = connection.getRegionLocator(TableName.valueOf(qplan
- .getTableRef().getTable().getPhysicalName().toString()));
- RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(regionLocator, connection
- .getAdmin());
-
- for (List<Scan> scans : qplan.getScans()) {
- PhoenixInputSplit inputSplit;
-
- HRegionLocation location = regionLocator.getRegionLocation(scans.get(0).getStartRow()
- , false);
- long regionSize = sizeCalculator.getRegionSize(location.getRegionInfo().getRegionName
- ());
- String regionLocation = PhoenixStorageHandlerUtil.getRegionLocation(location, LOG);
-
- if (splitByStats) {
- for (Scan aScan : scans) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Split for scan : " + aScan + "with scanAttribute : " + aScan
- .getAttributesMap() + " [scanCache, cacheBlock, scanBatch] : [" +
- aScan.getCaching() + ", " + aScan.getCacheBlocks() + ", " + aScan
- .getBatch() + "] and regionLocation : " + regionLocation);
- }
-
- inputSplit = new PhoenixInputSplit(new ArrayList<>(Arrays.asList(aScan)), tablePaths[0],
- regionLocation, regionSize);
- inputSplit.setQuery(query);
- psplits.add(inputSplit);
- }
- } else {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Scan count[" + scans.size() + "] : " + Bytes.toStringBinary(scans
- .get(0).getStartRow()) + " ~ " + Bytes.toStringBinary(scans.get(scans
- .size() - 1).getStopRow()));
- LOG.debug("First scan : " + scans.get(0) + "with scanAttribute : " + scans
- .get(0).getAttributesMap() + " [scanCache, cacheBlock, scanBatch] : " +
- "[" + scans.get(0).getCaching() + ", " + scans.get(0).getCacheBlocks()
- + ", " + scans.get(0).getBatch() + "] and regionLocation : " +
- regionLocation);
-
- for (int i = 0, limit = scans.size(); i < limit; i++) {
- LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " + Bytes
- .toStringBinary(scans.get(i).getAttribute
- (BaseScannerRegionObserver.EXPECTED_UPPER_REGION_KEY)));
- }
- }
-
- inputSplit = new PhoenixInputSplit(scans, tablePaths[0], regionLocation,
- regionSize);
- inputSplit.setQuery(query);
- psplits.add(inputSplit);
- }
- }
- }
-
- return psplits;
- }
-
- private void setScanCacheSize(JobConf jobConf) {
- int scanCacheSize = jobConf.getInt(PhoenixStorageHandlerConstants.HBASE_SCAN_CACHE, -1);
- if (scanCacheSize > 0) {
- jobConf.setInt(HConstants.HBASE_CLIENT_SCANNER_CACHING, scanCacheSize);
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Generating splits with scanCacheSize : " + scanCacheSize);
- }
- }
-
- @Override
- public RecordReader<WritableComparable, T> getRecordReader(InputSplit split, JobConf job,
- Reporter reporter) throws
- IOException {
- final QueryPlan queryPlan = getQueryPlan(job, ((PhoenixInputSplit) split).getQuery());
- @SuppressWarnings("unchecked")
- final Class<T> inputClass = (Class<T>) job.getClass(PhoenixConfigurationUtil.INPUT_CLASS,
- PhoenixResultWritable.class);
-
- PhoenixRecordReader<T> recordReader = new PhoenixRecordReader<T>(inputClass, job,
- queryPlan);
- recordReader.initialize(split);
-
- return recordReader;
- }
-
- /**
- * Returns the query plan associated with the select query.
- */
- private QueryPlan getQueryPlan(final Configuration configuration, String selectStatement)
- throws IOException {
- try {
- final String currentScnValue = configuration.get(PhoenixConfigurationUtil
- .CURRENT_SCN_VALUE);
- final Properties overridingProps = new Properties();
- if (currentScnValue != null) {
- overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, currentScnValue);
- }
- final Connection connection = PhoenixConnectionUtil.getInputConnection(configuration,
- overridingProps);
- if (selectStatement == null) {
- throw new NullPointerException();
- }
- final Statement statement = connection.createStatement();
- final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Compiled query : " + selectStatement);
- }
-
- // Optimize the query plan so that we potentially use secondary indexes
- final QueryPlan queryPlan = pstmt.optimizeQuery(selectStatement);
- // Initialize the query plan so it sets up the parallel scans
- queryPlan.iterator(MapReduceParallelScanGrouper.getInstance());
- return queryPlan;
- } catch (Exception exception) {
- LOG.error(String.format("Failed to get the query plan with error [%s]", exception.getMessage()));
- throw new RuntimeException(exception);
- }
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
deleted file mode 100644
index 71abbdb..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputSplit.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.mapreduce;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.mapred.FileSplit;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.phoenix.query.KeyRange;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * InputSplit implementation. Represents the data to be processed by an individual Mapper
- */
-public class PhoenixInputSplit extends FileSplit implements InputSplit {
-
- private List<Scan> scans;
- private KeyRange keyRange;
-
- private long regionSize;
-
- // query is in the split because it is not delivered in jobConf.
- private String query;
-
- public PhoenixInputSplit() {
- }
-
- public PhoenixInputSplit(final List<Scan> scans, Path dummyPath, String regionLocation, long
- length) {
- super(dummyPath, 0, 0, new String[]{regionLocation});
-
- regionSize = length;
-
- if(scans == null) {
- throw new NullPointerException();
- }
- if (scans.isEmpty()) {
- throw new IllegalStateException();
- }
- this.scans = scans;
- init();
- }
-
- public List<Scan> getScans() {
- return scans;
- }
-
- public KeyRange getKeyRange() {
- return keyRange;
- }
-
- public String getQuery() {
- return query;
- }
-
- public void setQuery(String query) {
- this.query = query;
- }
-
- private void init() {
- this.keyRange = KeyRange.getKeyRange(scans.get(0).getStartRow(), scans.get(scans.size() -
- 1).getStopRow());
- }
-
- @Override
- public void write(DataOutput out) throws IOException {
- super.write(out);
-
- if (scans == null) {
- throw new NullPointerException();
- }
- WritableUtils.writeVInt(out, scans.size());
- for (Scan scan : scans) {
- ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan);
- byte[] protoScanBytes = protoScan.toByteArray();
- WritableUtils.writeVInt(out, protoScanBytes.length);
- out.write(protoScanBytes);
- }
-
- WritableUtils.writeString(out, query);
- WritableUtils.writeVLong(out, regionSize);
- }
-
- @Override
- public void readFields(DataInput in) throws IOException {
- super.readFields(in);
-
- int count = WritableUtils.readVInt(in);
- scans = new ArrayList<>(count);
- for (int i = 0; i < count; i++) {
- byte[] protoScanBytes = new byte[WritableUtils.readVInt(in)];
- in.readFully(protoScanBytes);
- ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes);
- Scan scan = ProtobufUtil.toScan(protoScan);
- scans.add(scan);
- }
- init();
-
- query = WritableUtils.readString(in);
- regionSize = WritableUtils.readVLong(in);
- }
-
- @Override
- public long getLength() {
- return regionSize;
- }
-
- @Override
- public String[] getLocations() throws IOException {
- return new String[]{};
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + keyRange.hashCode();
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj == null) {
- return false;
- }
- if (!(obj instanceof PhoenixInputSplit)) {
- return false;
- }
- PhoenixInputSplit other = (PhoenixInputSplit) obj;
- if (keyRange == null) {
- if (other.keyRange != null) {
- return false;
- }
- } else if (!keyRange.equals(other.keyRange)) {
- return false;
- }
- return true;
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
deleted file mode 100644
index 734f021..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.mapreduce;
-
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapreduce.lib.db.DBWritable;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.phoenix.compile.QueryPlan;
-import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
-import org.apache.phoenix.hive.PhoenixRowKey;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.iterate.ConcatResultIterator;
-import org.apache.phoenix.iterate.LookAheadResultIterator;
-import org.apache.phoenix.iterate.MapReduceParallelScanGrouper;
-import org.apache.phoenix.iterate.PeekingResultIterator;
-import org.apache.phoenix.iterate.ResultIterator;
-import org.apache.phoenix.iterate.RoundRobinResultIterator;
-import org.apache.phoenix.iterate.SequenceResultIterator;
-import org.apache.phoenix.iterate.TableResultIterator;
-import org.apache.phoenix.jdbc.PhoenixResultSet;
-import org.apache.phoenix.monitoring.ReadMetricQueue;
-import org.apache.phoenix.monitoring.ScanMetricsHolder;
-
-
-/**
- * @RecordReader implementation that iterates over the the records.
- */
-@SuppressWarnings("rawtypes")
-public class PhoenixRecordReader<T extends DBWritable> implements
- RecordReader<WritableComparable, T> {
-
- private static final Log LOG = LogFactory.getLog(PhoenixRecordReader.class);
-
- private final Configuration configuration;
- private final QueryPlan queryPlan;
- private WritableComparable key;
- private T value = null;
- private Class<T> inputClass;
- private ResultIterator resultIterator = null;
- private PhoenixResultSet resultSet;
- private long readCount;
-
-
- private boolean isTransactional;
-
- public PhoenixRecordReader(Class<T> inputClass, final Configuration configuration, final
- QueryPlan queryPlan) throws IOException {
- this.inputClass = inputClass;
- this.configuration = configuration;
- this.queryPlan = queryPlan;
-
- isTransactional = PhoenixStorageHandlerUtil.isTransactionalTable(configuration);
- }
-
- public void initialize(InputSplit split) throws IOException {
- final PhoenixInputSplit pSplit = (PhoenixInputSplit) split;
- final List<Scan> scans = pSplit.getScans();
-
- if (LOG.isInfoEnabled()) {
- LOG.info("Target table : " + queryPlan.getTableRef().getTable().getPhysicalName());
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Scan count[" + scans.size() + "] : " + Bytes.toStringBinary(scans.get(0)
- .getStartRow()) + " ~ " + Bytes.toStringBinary(scans.get(scans.size() - 1)
- .getStopRow()));
- LOG.debug("First scan : " + scans.get(0) + " scanAttribute : " + scans.get(0)
- .getAttributesMap());
-
- for (int i = 0, limit = scans.size(); i < limit; i++) {
- LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " +
- Bytes.toStringBinary(scans.get(i).getAttribute(BaseScannerRegionObserver
- .EXPECTED_UPPER_REGION_KEY)));
- }
- }
-
- try {
- List<PeekingResultIterator> iterators = new ArrayList<>(scans.size
- ());
- StatementContext ctx = queryPlan.getContext();
- ReadMetricQueue readMetrics = ctx.getReadMetricsQueue();
- String tableName = queryPlan.getTableRef().getTable().getPhysicalName().getString();
- long renewScannerLeaseThreshold = queryPlan.getContext().getConnection()
- .getQueryServices().getRenewLeaseThresholdMilliSeconds();
- for (Scan scan : scans) {
- scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, Bytes
- .toBytes(true));
- ScanMetricsHolder scanMetricsHolder = ScanMetricsHolder.getInstance(readMetrics, tableName, scan, ctx.getConnection().getLogLevel());
- final TableResultIterator tableResultIterator = new TableResultIterator(
- queryPlan.getContext().getConnection().getMutationState(), scan, scanMetricsHolder,
- renewScannerLeaseThreshold, queryPlan, MapReduceParallelScanGrouper.getInstance());
-
- PeekingResultIterator peekingResultIterator = LookAheadResultIterator.wrap
- (tableResultIterator);
- iterators.add(peekingResultIterator);
- }
- ResultIterator iterator = queryPlan.useRoundRobinIterator()
- ? RoundRobinResultIterator.newIterator(iterators, queryPlan)
- : ConcatResultIterator.newIterator(iterators);
- if (queryPlan.getContext().getSequenceManager().getSequenceCount() > 0) {
- iterator = new SequenceResultIterator(iterator, queryPlan.getContext()
- .getSequenceManager());
- }
- this.resultIterator = iterator;
- // Clone the row projector as it's not thread safe and would be used
- // simultaneously by multiple threads otherwise.
- this.resultSet = new PhoenixResultSet(this.resultIterator, queryPlan.getProjector()
- .cloneIfNecessary(),
- queryPlan.getContext());
- } catch (SQLException e) {
- LOG.error(String.format(" Error [%s] initializing PhoenixRecordReader. ", e
- .getMessage()));
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public boolean next(WritableComparable key, T value) throws IOException {
- try {
- if (!resultSet.next()) {
- return false;
- }
- value.readFields(resultSet);
-
- if (isTransactional) {
- ((PhoenixResultWritable) value).readPrimaryKey((PhoenixRowKey) key);
- }
-
- ++readCount;
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Result[" + readCount + "] : " + ((PhoenixResultWritable) value)
- .getResultMap());
- }
-
- return true;
- } catch (SQLException e) {
- LOG.error(String.format(" Error [%s] occurred while iterating over the resultset. ",
- e.getMessage()));
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public WritableComparable createKey() {
- if (isTransactional) {
- key = new PhoenixRowKey();
- } else {
- key = NullWritable.get();
- }
-
- return key;
- }
-
- @Override
- public T createValue() {
- value = ReflectionUtils.newInstance(inputClass, this.configuration);
- return value;
- }
-
- @Override
- public long getPos() throws IOException {
- return 0;
- }
-
- @Override
- public void close() throws IOException {
- if (LOG.isInfoEnabled()) {
- LOG.info("Read Count : " + readCount);
- }
-
- if (resultIterator != null) {
- try {
- resultIterator.close();
- } catch (SQLException e) {
- LOG.error(" Error closing resultset.");
- throw new RuntimeException(e);
- }
- }
-
- }
-
- @Override
- public float getProgress() throws IOException {
- return 0;
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java
deleted file mode 100644
index c6884df..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordWriter.java
+++ /dev/null
@@ -1,355 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.mapreduce;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
-import org.apache.hadoop.hive.ql.io.RecordUpdater;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapred.RecordWriter;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapreduce.lib.db.DBWritable;
-import org.apache.phoenix.hive.PhoenixSerializer;
-import org.apache.phoenix.hive.PhoenixSerializer.DmlType;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.util.PhoenixConnectionUtil;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.hive.util.PhoenixUtil;
-import org.apache.phoenix.jdbc.PhoenixConnection;
-import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
-import org.apache.phoenix.schema.ConcurrentTableMutationException;
-import org.apache.phoenix.schema.MetaDataClient;
-import org.apache.phoenix.util.QueryUtil;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.Properties;
-
-/**
- *
- * RecordWriter implementation. Writes records to the output
- * WARNING : There is possibility that WAL disable setting not working properly due concurrent
- * enabling/disabling WAL.
- *
- */
-public class PhoenixRecordWriter<T extends DBWritable> implements RecordWriter<NullWritable, T>,
- org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter, RecordUpdater {
-
- private static final Log LOG = LogFactory.getLog(PhoenixRecordWriter.class);
-
- private Connection conn;
- private PreparedStatement pstmt;
- private long batchSize;
- private long numRecords = 0;
-
- private Configuration config;
- private String tableName;
- private MetaDataClient metaDataClient;
- private boolean restoreWalMode;
-
- // For RecordUpdater
- private long rowCountDelta = 0;
- private PhoenixSerializer phoenixSerializer;
- private ObjectInspector objInspector;
- private PreparedStatement pstmtForDelete;
-
- // For RecordUpdater
- public PhoenixRecordWriter(Path path, AcidOutputFormat.Options options) throws IOException {
- Configuration config = options.getConfiguration();
- Properties props = new Properties();
-
- try {
- initialize(config, props);
- } catch (SQLException e) {
- throw new IOException(e);
- }
-
- this.objInspector = options.getInspector();
- try {
- phoenixSerializer = new PhoenixSerializer(config, options.getTableProperties());
- } catch (SerDeException e) {
- throw new IOException(e);
- }
- }
-
- public PhoenixRecordWriter(final Configuration configuration, final Properties props) throws
- SQLException {
- initialize(configuration, props);
- }
-
- private void initialize(Configuration config, Properties properties) throws SQLException {
- this.config = config;
- tableName = config.get(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME);
-
- // Disable WAL
- String walConfigName = tableName.toLowerCase() + PhoenixStorageHandlerConstants.DISABLE_WAL;
- boolean disableWal = config.getBoolean(walConfigName, false);
- if (disableWal) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Property " + walConfigName + " is true. batch.mode will be set true. ");
- }
-
- properties.setProperty(PhoenixStorageHandlerConstants.BATCH_MODE, "true");
- }
-
- this.conn = PhoenixConnectionUtil.getInputConnection(config, properties);
-
- if (disableWal) {
- metaDataClient = new MetaDataClient((PhoenixConnection) conn);
-
- if (!PhoenixUtil.isDisabledWal(metaDataClient, tableName)) {
- // execute alter tablel statement if disable_wal is not true.
- try {
- PhoenixUtil.alterTableForWalDisable(conn, tableName, true);
- } catch (ConcurrentTableMutationException e) {
- if (LOG.isWarnEnabled()) {
- LOG.warn("Another mapper or task processing wal disable");
- }
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug(tableName + "s wal disabled.");
- }
-
- // restore original value of disable_wal at the end.
- restoreWalMode = true;
- }
- }
-
- this.batchSize = PhoenixConfigurationUtil.getBatchSize(config);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Batch-size : " + batchSize);
- }
-
- String upsertQuery = QueryUtil.constructUpsertStatement(tableName, PhoenixUtil
- .getColumnInfoList(conn, tableName));
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Upsert-query : " + upsertQuery);
- }
- this.pstmt = this.conn.prepareStatement(upsertQuery);
- }
-
- @Override
- public void write(NullWritable key, T record) throws IOException {
- try {
- record.write(pstmt);
- numRecords++;
- pstmt.executeUpdate();
-
- if (numRecords % batchSize == 0) {
- LOG.debug("Commit called on a batch of size : " + batchSize);
- conn.commit();
- }
- } catch (SQLException e) {
- throw new IOException("Exception while writing to table.", e);
- }
- }
-
- @Override
- public void close(Reporter reporter) throws IOException {
- try {
- conn.commit();
-
- if (LOG.isInfoEnabled()) {
- LOG.info("Wrote row : " + numRecords);
- }
- } catch (SQLException e) {
- LOG.error("SQLException while performing the commit for the task.");
- throw new IOException(e);
- } finally {
- try {
- if (restoreWalMode && PhoenixUtil.isDisabledWal(metaDataClient, tableName)) {
- try {
- PhoenixUtil.alterTableForWalDisable(conn, tableName, false);
- } catch (ConcurrentTableMutationException e) {
- if (LOG.isWarnEnabled()) {
- LOG.warn("Another mapper or task processing wal enable");
- }
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug(tableName + "s wal enabled.");
- }
- }
-
- // flush if [table-name].auto.flush is true.
- String autoFlushConfigName = tableName.toLowerCase() +
- PhoenixStorageHandlerConstants.AUTO_FLUSH;
- boolean autoFlush = config.getBoolean(autoFlushConfigName, false);
- if (autoFlush) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("autoFlush is true.");
- }
-
- PhoenixUtil.flush(conn, tableName);
- }
-
- PhoenixUtil.closeResource(pstmt);
- PhoenixUtil.closeResource(pstmtForDelete);
- PhoenixUtil.closeResource(conn);
- } catch (SQLException ex) {
- LOG.error("SQLException while closing the connection for the task.");
- throw new IOException(ex);
- }
- }
- }
-
- // For Testing
- public boolean isRestoreWalMode() {
- return restoreWalMode;
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public void write(Writable w) throws IOException {
- PhoenixResultWritable row = (PhoenixResultWritable) w;
-
- write(NullWritable.get(), (T) row);
- }
-
- @Override
- public void close(boolean abort) throws IOException {
- close(Reporter.NULL);
- }
-
- @Override
- public void insert(long currentTransaction, Object row) throws IOException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("insert transaction : " + currentTransaction + ", row : " +
- PhoenixStorageHandlerUtil.toString(row));
- }
-
- PhoenixResultWritable pResultWritable = (PhoenixResultWritable) phoenixSerializer
- .serialize(row, objInspector, DmlType.INSERT);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Data : " + pResultWritable.getValueList());
- }
-
- write(pResultWritable);
- rowCountDelta++;
- }
-
- @Override
- public void update(long currentTransaction, Object row) throws IOException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("update transaction : " + currentTransaction + ", row : " +
- PhoenixStorageHandlerUtil
- .toString(row));
- }
-
- PhoenixResultWritable pResultWritable = (PhoenixResultWritable) phoenixSerializer
- .serialize(row, objInspector, DmlType.UPDATE);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Data : " + pResultWritable.getValueList());
- }
-
- write(pResultWritable);
- }
-
- @Override
- public void delete(long currentTransaction, Object row) throws IOException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("delete transaction : " + currentTransaction + ", row : " +
- PhoenixStorageHandlerUtil.toString(row));
- }
-
- PhoenixResultWritable pResultWritable = (PhoenixResultWritable) phoenixSerializer
- .serialize(row, objInspector, DmlType.DELETE);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Data : " + pResultWritable.getValueList());
- }
-
- if (pstmtForDelete == null) {
- try {
- String deleteQuery = PhoenixUtil.constructDeleteStatement(conn, tableName);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Delete query : " + deleteQuery);
- }
-
- pstmtForDelete = conn.prepareStatement(deleteQuery);
- } catch (SQLException e) {
- throw new IOException(e);
- }
- }
-
- delete(pResultWritable);
-
- rowCountDelta--;
- }
-
- private void delete(PhoenixResultWritable pResultWritable) throws IOException {
- try {
- pResultWritable.delete(pstmtForDelete);
- numRecords++;
- pstmtForDelete.executeUpdate();
-
- if (numRecords % batchSize == 0) {
- LOG.debug("Commit called on a batch of size : " + batchSize);
- conn.commit();
- }
- } catch (SQLException e) {
- throw new IOException("Exception while deleting to table.", e);
- }
- }
-
- @Override
- public void flush() throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Flush called");
- }
-
- try {
- conn.commit();
-
- if (LOG.isInfoEnabled()) {
- LOG.info("Written row : " + numRecords);
- }
- } catch (SQLException e) {
- LOG.error("SQLException while performing the commit for the task.");
- throw new IOException(e);
- }
- }
-
- @Override
- public SerDeStats getStats() {
- if (LOG.isDebugEnabled()) {
- LOG.debug("getStats called");
- }
-
- SerDeStats stats = new SerDeStats();
- stats.setRowCount(rowCountDelta);
- // Don't worry about setting raw data size diff. There is no reasonable way to calculate
- // that without finding the row we are updating or deleting, which would be a mess.
- return stats;
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixByteObjectInspector.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixByteObjectInspector.java
deleted file mode 100644
index a19342a..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixByteObjectInspector.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.ByteWritable;
-
-/**
- * ObjectInspector for byte type
- */
-public class PhoenixByteObjectInspector extends AbstractPhoenixObjectInspector<ByteWritable>
- implements ByteObjectInspector {
-
- public PhoenixByteObjectInspector() {
- super(TypeInfoFactory.byteTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new Byte((Byte) o);
- }
-
- @Override
- public ByteWritable getPrimitiveWritableObject(Object o) {
- return new ByteWritable(get(o));
- }
-
- @Override
- public byte get(Object o) {
- Byte value = null;
-
- if (o != null) {
- try {
- value = (Byte) o;
- } catch (Exception e) {
- logExceptionMessage(o, "BYTE");
- }
- }
-
- return value;
- }
-
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDecimalObjectInspector.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDecimalObjectInspector.java
deleted file mode 100644
index 3853c18..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDecimalObjectInspector.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.metastore.api.Decimal;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-import java.math.BigDecimal;
-
-public class PhoenixDecimalObjectInspector extends
- AbstractPhoenixObjectInspector<HiveDecimalWritable>
- implements HiveDecimalObjectInspector {
-
- public PhoenixDecimalObjectInspector() {
- this(TypeInfoFactory.decimalTypeInfo);
- }
-
- public PhoenixDecimalObjectInspector(PrimitiveTypeInfo typeInfo) {
- super(typeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new BigDecimal(o.toString());
- }
-
- @Override
- public HiveDecimal getPrimitiveJavaObject(Object o) {
- if (o == null) {
- return null;
- }
-
- return HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create((BigDecimal) o),(DecimalTypeInfo)typeInfo);
- }
-
- @Override
- public HiveDecimalWritable getPrimitiveWritableObject(Object o) {
- HiveDecimalWritable value = null;
-
- if (o != null) {
- try {
- value = new HiveDecimalWritable(getPrimitiveJavaObject(o));
- } catch (Exception e) {
- logExceptionMessage(o, "DECIMAL");
- }
- }
-
- return value;
- }
-
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDoubleObjectInspector.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDoubleObjectInspector.java
deleted file mode 100644
index 9f440ed..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixDoubleObjectInspector.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.DoubleWritable;
-
-/**
- * ObjectInspector for double type
- */
-public class PhoenixDoubleObjectInspector extends AbstractPhoenixObjectInspector<DoubleWritable>
- implements DoubleObjectInspector {
-
- public PhoenixDoubleObjectInspector() {
- super(TypeInfoFactory.doubleTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new Double((Double) o);
- }
-
- @Override
- public DoubleWritable getPrimitiveWritableObject(Object o) {
- return new DoubleWritable(get(o));
- }
-
- @Override
- public double get(Object o) {
- Double value = null;
-
- if (o != null) {
- try {
- value = ((Double) o).doubleValue();
- } catch (Exception e) {
- logExceptionMessage(o, "LONG");
- }
- }
-
- return value;
- }
-
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixIntObjectInspector.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixIntObjectInspector.java
deleted file mode 100644
index 3511ee3..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixIntObjectInspector.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.IntWritable;
-
-public class PhoenixIntObjectInspector extends AbstractPhoenixObjectInspector<IntWritable>
- implements IntObjectInspector {
-
- public PhoenixIntObjectInspector() {
- super(TypeInfoFactory.intTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new Integer((Integer) o);
- }
-
- @Override
- public Category getCategory() {
- return Category.PRIMITIVE;
- }
-
- @Override
- public IntWritable getPrimitiveWritableObject(Object o) {
- return new IntWritable(get(o));
- }
-
- @Override
- public int get(Object o) {
- Integer value = null;
-
- if (o != null) {
- try {
- value = ((Integer) o).intValue();
- } catch (Exception e) {
- logExceptionMessage(o, "INT");
- }
- }
-
- return value;
- }
-
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
deleted file mode 100644
index 8ffb5fa..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
+++ /dev/null
@@ -1,522 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.ql.index;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
-import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
-import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.lib.NodeProcessor;
-import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.lib.Rule;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
-import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUtcTimestamp;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToVarchar;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Stack;
-
-/**
- * Clone of org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer with modifying
- * analyzePredicate method.
- *
- *
- */
-public class IndexPredicateAnalyzer {
-
- private static final Log LOG = LogFactory.getLog(IndexPredicateAnalyzer.class);
-
- private final Set<String> udfNames;
- private final Map<String, Set<String>> columnToUDFs;
- private FieldValidator fieldValidator;
-
- private boolean acceptsFields;
-
- public IndexPredicateAnalyzer() {
- udfNames = new HashSet<String>();
- columnToUDFs = new HashMap<String, Set<String>>();
- }
-
- public void setFieldValidator(FieldValidator fieldValidator) {
- this.fieldValidator = fieldValidator;
- }
-
- /**
- * Registers a comparison operator as one which can be satisfied by an index
- * search. Unless this is called, analyzePredicate will never find any
- * indexable conditions.
- *
- * @param udfName name of comparison operator as returned by either
- * {@link GenericUDFBridge#getUdfName} (for simple UDF's) or
- * udf.getClass().getName() (for generic UDF's).
- */
- public void addComparisonOp(String udfName) {
- udfNames.add(udfName);
- }
-
- /**
- * Clears the set of column names allowed in comparisons. (Initially, all
- * column names are allowed.)
- */
- public void clearAllowedColumnNames() {
- columnToUDFs.clear();
- }
-
- /**
- * Adds a column name to the set of column names allowed.
- *
- * @param columnName name of column to be allowed
- */
- public void allowColumnName(String columnName) {
- columnToUDFs.put(columnName, udfNames);
- }
-
- /**
- * add allowed functions per column
- *
- * @param columnName
- * @param udfs
- */
- public void addComparisonOp(String columnName, String... udfs) {
- Set<String> allowed = columnToUDFs.get(columnName);
- if (allowed == null || allowed == udfNames) {
- // override
- columnToUDFs.put(columnName, new HashSet<String>(Arrays.asList(udfs)));
- } else {
- allowed.addAll(Arrays.asList(udfs));
- }
- }
-
- /**
- * Analyzes a predicate.
- *
- * @param predicate predicate to be analyzed
- * @param searchConditions receives conditions produced by analysis
- * @return residual predicate which could not be translated to
- * searchConditions
- */
- public ExprNodeDesc analyzePredicate(ExprNodeDesc predicate, final List<IndexSearchCondition>
- searchConditions) {
-
- Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
- NodeProcessor nodeProcessor = new NodeProcessor() {
- @Override
- public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object...
- nodeOutputs) throws SemanticException {
-
- // We can only push down stuff which appears as part of
- // a pure conjunction: reject OR, CASE, etc.
- for (Node ancestor : stack) {
- if (nd == ancestor) {
- break;
- }
- if (!FunctionRegistry.isOpAnd((ExprNodeDesc) ancestor)) {
- return nd;
- }
- }
-
- return analyzeExpr((ExprNodeGenericFuncDesc) nd, searchConditions, nodeOutputs);
- }
- };
-
- Dispatcher disp = new DefaultRuleDispatcher(nodeProcessor, opRules, null);
- GraphWalker ogw = new DefaultGraphWalker(disp);
- ArrayList<Node> topNodes = new ArrayList<Node>();
- topNodes.add(predicate);
- HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
-
- try {
- ogw.startWalking(topNodes, nodeOutput);
- } catch (SemanticException ex) {
- throw new RuntimeException(ex);
- }
-
- ExprNodeDesc residualPredicate = (ExprNodeDesc) nodeOutput.get(predicate);
- return residualPredicate;
- }
-
- // Check if ExprNodeColumnDesc is wrapped in expr.
- // If so, peel off. Otherwise return itself.
- private ExprNodeDesc getColumnExpr(ExprNodeDesc expr) {
- if (expr instanceof ExprNodeColumnDesc) {
- return expr;
- }
- ExprNodeGenericFuncDesc funcDesc = null;
- if (expr instanceof ExprNodeGenericFuncDesc) {
- funcDesc = (ExprNodeGenericFuncDesc) expr;
- }
- if (null == funcDesc) {
- return expr;
- }
- GenericUDF udf = funcDesc.getGenericUDF();
- // check if its a simple cast expression.
- if ((udf instanceof GenericUDFBridge || udf instanceof GenericUDFToBinary || udf
- instanceof GenericUDFToChar
- || udf instanceof GenericUDFToVarchar || udf instanceof GenericUDFToDecimal
- || udf instanceof GenericUDFToDate || udf instanceof GenericUDFToUnixTimeStamp
- || udf instanceof GenericUDFToUtcTimestamp) && funcDesc.getChildren().size() == 1
- && funcDesc.getChildren().get(0) instanceof ExprNodeColumnDesc) {
- return expr.getChildren().get(0);
- }
- return expr;
- }
-
- private void processingBetweenOperator(ExprNodeGenericFuncDesc expr,
- List<IndexSearchCondition> searchConditions, Object...
- nodeOutputs) {
- ExprNodeColumnDesc columnDesc = null;
- String[] fields = null;
-
- if (nodeOutputs[1] instanceof ExprNodeFieldDesc) {
- // rowKey field
- ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) nodeOutputs[1];
- fields = ExprNodeDescUtils.extractFields(fieldDesc);
-
- ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair((ExprNodeDesc)
- nodeOutputs[1], (ExprNodeDesc) nodeOutputs[2]);
- columnDesc = (ExprNodeColumnDesc) extracted[0];
- } else if (nodeOutputs[0] instanceof ExprNodeGenericFuncDesc) {
- columnDesc = (ExprNodeColumnDesc) ((ExprNodeGenericFuncDesc) nodeOutputs[1])
- .getChildren().get(0);
- } else {
- columnDesc = (ExprNodeColumnDesc) nodeOutputs[1];
- }
-
- String udfName = expr.getGenericUDF().getUdfName();
- ExprNodeConstantDesc[] betweenConstants = new ExprNodeConstantDesc[]{
- (ExprNodeConstantDesc) nodeOutputs[2], (ExprNodeConstantDesc) nodeOutputs[3]};
- boolean isNot = (Boolean) ((ExprNodeConstantDesc) nodeOutputs[0]).getValue();
-
- searchConditions.add(new IndexSearchCondition(columnDesc, udfName, betweenConstants,
- expr, fields, isNot));
- }
-
- private void processingInOperator(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition>
- searchConditions, boolean isNot, Object... nodeOutputs) {
- ExprNodeColumnDesc columnDesc = null;
- String[] fields = null;
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Processing In Operator. nodeOutputs : " + new ArrayList<>(Arrays.asList(nodeOutputs)));
- }
-
- if (nodeOutputs[0] instanceof ExprNodeFieldDesc) {
- // rowKey field
- ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) nodeOutputs[0];
- fields = ExprNodeDescUtils.extractFields(fieldDesc);
-
- ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair((ExprNodeDesc)
- nodeOutputs[0], (ExprNodeDesc) nodeOutputs[1]);
-
- if (extracted == null) { // adding for tez
- return;
- }
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("nodeOutputs[0] : " + nodeOutputs[0] + ", nodeOutputs[1] : " +
- nodeOutputs[1] + " => " + new ArrayList<>(Arrays.asList(extracted)));
- }
-
- columnDesc = (ExprNodeColumnDesc) extracted[0];
- } else if (nodeOutputs[0] instanceof ExprNodeGenericFuncDesc) {
- columnDesc = (ExprNodeColumnDesc) ((ExprNodeGenericFuncDesc) nodeOutputs[0])
- .getChildren().get(0);
- } else {
- columnDesc = (ExprNodeColumnDesc) nodeOutputs[0];
- }
-
- String udfName = expr.getGenericUDF().getUdfName();
- ExprNodeConstantDesc[] inConstantDescs = new ExprNodeConstantDesc[nodeOutputs.length - 1];
-
- for (int i = 0, limit = inConstantDescs.length; i < limit; i++) {
- if (!(nodeOutputs[i + 1] instanceof ExprNodeConstantDesc)) { // adding for tez
- return;
- }
-
- inConstantDescs[i] = (ExprNodeConstantDesc) nodeOutputs[i + 1];
- }
-
- searchConditions.add(new IndexSearchCondition(columnDesc, udfName, inConstantDescs, expr,
- fields, isNot));
- }
-
- private void processingNullOperator(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition>
- searchConditions, Object... nodeOutputs) {
- ExprNodeColumnDesc columnDesc = null;
- String[] fields = null;
-
- if (nodeOutputs[0] instanceof ExprNodeFieldDesc) {
- // rowKey field
- ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) nodeOutputs[0];
- fields = ExprNodeDescUtils.extractFields(fieldDesc);
-
- ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair((ExprNodeDesc)
- nodeOutputs[0], new ExprNodeConstantDesc());
- columnDesc = (ExprNodeColumnDesc) extracted[0];
- } else if (nodeOutputs[0] instanceof ExprNodeGenericFuncDesc) {
- columnDesc = (ExprNodeColumnDesc) ((ExprNodeGenericFuncDesc) nodeOutputs[0])
- .getChildren().get(0);
- } else {
- columnDesc = (ExprNodeColumnDesc) nodeOutputs[0];
- }
-
- String udfName = expr.getGenericUDF().getUdfName();
-
- searchConditions.add(new IndexSearchCondition(columnDesc, udfName, null, expr, fields,
- false));
- }
-
- private void processingNotNullOperator(ExprNodeGenericFuncDesc expr,
- List<IndexSearchCondition> searchConditions, Object...
- nodeOutputs) {
- ExprNodeColumnDesc columnDesc = null;
- String[] fields = null;
-
- if (nodeOutputs[0] instanceof ExprNodeFieldDesc) {
- // rowKey field
- ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) nodeOutputs[0];
- fields = ExprNodeDescUtils.extractFields(fieldDesc);
-
- ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair((ExprNodeDesc)
- nodeOutputs[0], new ExprNodeConstantDesc());
- columnDesc = (ExprNodeColumnDesc) extracted[0];
- } else if (nodeOutputs[0] instanceof ExprNodeGenericFuncDesc) {
- columnDesc = (ExprNodeColumnDesc) ((ExprNodeGenericFuncDesc) nodeOutputs[0])
- .getChildren().get(0);
- } else {
- columnDesc = (ExprNodeColumnDesc) nodeOutputs[0];
- }
-
- String udfName = expr.getGenericUDF().getUdfName();
-
- searchConditions.add(new IndexSearchCondition(columnDesc, udfName, null, expr, fields,
- true));
- }
-
- private ExprNodeDesc analyzeExpr(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition>
- searchConditions, Object... nodeOutputs) throws SemanticException {
-
- if (FunctionRegistry.isOpAnd(expr)) {
- assert (nodeOutputs.length == 2);
- ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
- ExprNodeDesc residual2 = (ExprNodeDesc) nodeOutputs[1];
- if (residual1 == null) {
- return residual2;
- }
- if (residual2 == null) {
- return residual1;
- }
- List<ExprNodeDesc> residuals = new ArrayList<ExprNodeDesc>();
- residuals.add(residual1);
- residuals.add(residual2);
- return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, FunctionRegistry
- .getGenericUDFForAnd(), residuals);
- }
-
- GenericUDF genericUDF = expr.getGenericUDF();
- if (!(genericUDF instanceof GenericUDFBaseCompare)) {
- // 2015-10-22 Added by JeongMin Ju : Processing Between/In Operator
- if (genericUDF instanceof GenericUDFBetween) {
- // In case of not between, The value of first element of nodeOutputs is true.
- // otherwise false.
- processingBetweenOperator(expr, searchConditions, nodeOutputs);
- return expr;
- } else if (genericUDF instanceof GenericUDFIn) {
- // In case of not in operator, in operator exist as child of not operator.
- processingInOperator(expr, searchConditions, false, nodeOutputs);
- return expr;
- } else if (genericUDF instanceof GenericUDFOPNot &&
- ((ExprNodeGenericFuncDesc) expr.getChildren().get(0)).getGenericUDF()
- instanceof GenericUDFIn) {
- // In case of not in operator, in operator exist as child of not operator.
- processingInOperator((ExprNodeGenericFuncDesc) expr.getChildren().get(0),
- searchConditions, true, ((ExprNodeGenericFuncDesc) nodeOutputs[0])
- .getChildren().toArray());
- return expr;
- } else if (genericUDF instanceof GenericUDFOPNull) {
- processingNullOperator(expr, searchConditions, nodeOutputs);
- return expr;
- } else if (genericUDF instanceof GenericUDFOPNotNull) {
- processingNotNullOperator(expr, searchConditions, nodeOutputs);
- return expr;
- } else {
- return expr;
- }
- }
- ExprNodeDesc expr1 = (ExprNodeDesc) nodeOutputs[0];
- ExprNodeDesc expr2 = (ExprNodeDesc) nodeOutputs[1];
- // We may need to peel off the GenericUDFBridge that is added by CBO or
- // user
- if (expr1.getTypeInfo().equals(expr2.getTypeInfo())) {
- expr1 = getColumnExpr(expr1);
- expr2 = getColumnExpr(expr2);
- }
-
- ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair(expr1, expr2);
- if (extracted == null || (extracted.length > 2 && !acceptsFields)) {
- return expr;
- }
-
- ExprNodeColumnDesc columnDesc;
- ExprNodeConstantDesc constantDesc;
- if (extracted[0] instanceof ExprNodeConstantDesc) {
- genericUDF = genericUDF.flip();
- columnDesc = (ExprNodeColumnDesc) extracted[1];
- constantDesc = (ExprNodeConstantDesc) extracted[0];
- } else {
- columnDesc = (ExprNodeColumnDesc) extracted[0];
- constantDesc = (ExprNodeConstantDesc) extracted[1];
- }
-
- Set<String> allowed = columnToUDFs.get(columnDesc.getColumn());
- if (allowed == null) {
- return expr;
- }
-
- String udfName = genericUDF.getUdfName();
- if (!allowed.contains(genericUDF.getUdfName())) {
- return expr;
- }
-
- String[] fields = null;
- if (extracted.length > 2) {
- ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) extracted[2];
- if (!isValidField(fieldDesc)) {
- return expr;
- }
- fields = ExprNodeDescUtils.extractFields(fieldDesc);
- }
-
- // We also need to update the expr so that the index query can be
- // generated.
- // Note that, hive does not support UDFToDouble etc in the query text.
- List<ExprNodeDesc> list = new ArrayList<ExprNodeDesc>();
- list.add(expr1);
- list.add(expr2);
- expr = new ExprNodeGenericFuncDesc(expr.getTypeInfo(), expr.getGenericUDF(), list);
-
- searchConditions.add(new IndexSearchCondition(columnDesc, udfName, constantDesc, expr,
- fields));
-
- // we converted the expression to a search condition, so
- // remove it from the residual predicate
- return fields == null ? null : expr;
- }
-
- private boolean isValidField(ExprNodeFieldDesc field) {
- return fieldValidator == null || fieldValidator.validate(field);
- }
-
- /**
- * Translates search conditions back to ExprNodeDesc form (as a left-deep
- * conjunction).
- *
- * @param searchConditions (typically produced by analyzePredicate)
- * @return ExprNodeGenericFuncDesc form of search conditions
- */
- public ExprNodeGenericFuncDesc translateSearchConditions(List<IndexSearchCondition>
- searchConditions) {
-
- ExprNodeGenericFuncDesc expr = null;
-
- for (IndexSearchCondition searchCondition : searchConditions) {
- if (expr == null) {
- expr = searchCondition.getComparisonExpr();
- continue;
- }
-
- List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
- children.add(expr);
- children.add(searchCondition.getComparisonExpr());
- expr = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, FunctionRegistry
- .getGenericUDFForAnd(), children);
- }
-
- return expr;
- }
-
- public void setAcceptsFields(boolean acceptsFields) {
- this.acceptsFields = acceptsFields;
- }
-
- public static interface FieldValidator {
- boolean validate(ExprNodeFieldDesc exprNodeDesc);
- }
-
- public static IndexPredicateAnalyzer createAnalyzer(boolean equalOnly) {
- IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual");
-
- if (equalOnly) {
- return analyzer;
- }
-
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic" +
- ".GenericUDFOPEqualOrGreaterThan");
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic" +
- ".GenericUDFOPEqualOrLessThan");
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan");
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan");
-
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual");
- // apply !=
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween");
- // apply (Not) Between
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn"); //
- // apply (Not) In
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn"); //
- // apply In
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull");
- // apply Null
- analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull");
- // apply Not Null
-
- return analyzer;
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
deleted file mode 100644
index 0ab78ba..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/query/PhoenixQueryBuilder.java
+++ /dev/null
@@ -1,391 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.query;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
-import org.apache.phoenix.hive.util.ColumnMappingUtils;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.hive.util.PhoenixUtil;
-import org.apache.phoenix.util.StringUtil;
-
-import static org.apache.phoenix.hive.util.ColumnMappingUtils.getColumnMappingMap;
-
-/**
- * Query builder. Produces a query depending on the column list and conditions
- */
-
-public class PhoenixQueryBuilder {
-
- private static final Log LOG = LogFactory.getLog(PhoenixQueryBuilder.class);
-
- private static final String QUERY_TEMPLATE = "select $HINT$ $COLUMN_LIST$ from $TABLE_NAME$";
-
- private static final PhoenixQueryBuilder QUERY_BUILDER = new PhoenixQueryBuilder();
-
- private PhoenixQueryBuilder() {
- if (LOG.isInfoEnabled()) {
- LOG.info("PhoenixQueryBuilder created");
- }
- }
-
- public static PhoenixQueryBuilder getInstance() {
- return QUERY_BUILDER;
- }
-
- private void addConditionColumnToReadColumn(List<String> readColumnList, List<String>
- conditionColumnList) {
- if (readColumnList.isEmpty()) {
- return;
- }
-
- for (String conditionColumn : conditionColumnList) {
- if (!readColumnList.contains(conditionColumn)) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Condition column " + conditionColumn + " does not exist in " +
- "read-columns.");
- }
-
- readColumnList.add(conditionColumn);
- }
- }
- }
-
- private static String findReplacement(JobConf jobConf, String column) {
- Map<String, String> columnMappingMap = getColumnMappingMap(jobConf.get
- (PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING));
- if (columnMappingMap != null && columnMappingMap.containsKey(column)) {
- return columnMappingMap.get(column);
- } else {
- return column;
- }
- }
- private static List<String> replaceColumns(JobConf jobConf, List<String> columnList) {
- Map<String, String> columnMappingMap = getColumnMappingMap(jobConf.get
- (PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING));
- if(columnMappingMap != null) {
- List<String> newList = new ArrayList<>();
- for(String column:columnList) {
- if(columnMappingMap.containsKey(column)) {
- newList.add(columnMappingMap.get(column));
- } else {
- newList.add(column);
- }
- }
- return newList;
- }
- return null;
- }
-
- private String makeQueryString(JobConf jobConf, String tableName, List<String>
- readColumnList, List<IndexSearchCondition> searchConditions, String queryTemplate,
- String hints) throws IOException {
- StringBuilder query = new StringBuilder();
- List<String> conditionColumnList = buildWhereClause(jobConf, query, searchConditions);
-
- if (conditionColumnList.size() > 0) {
- readColumnList = replaceColumns(jobConf, readColumnList);
- addConditionColumnToReadColumn(readColumnList, conditionColumnList);
- query.insert(0, queryTemplate.replace("$HINT$", hints).replace("$COLUMN_LIST$",
- getSelectColumns(jobConf, tableName, readColumnList)).replace("$TABLE_NAME$",
- tableName));
- } else {
- readColumnList = replaceColumns(jobConf, readColumnList);
- query.append(queryTemplate.replace("$HINT$", hints).replace("$COLUMN_LIST$",
- getSelectColumns(jobConf, tableName, readColumnList)).replace("$TABLE_NAME$",
- tableName));
- }
-
- if (LOG.isInfoEnabled()) {
- LOG.info("Input query : " + query.toString());
- }
-
- return query.toString();
- }
-
- private String getSelectColumns(JobConf jobConf, String tableName, List<String>
- readColumnList) throws IOException {
- String selectColumns = String.join(PhoenixStorageHandlerConstants.COMMA,
- ColumnMappingUtils.quoteColumns(readColumnList));
- if (PhoenixStorageHandlerConstants.EMPTY_STRING.equals(selectColumns)) {
- selectColumns = "*";
- } else {
- if (PhoenixStorageHandlerUtil.isTransactionalTable(jobConf)) {
- List<String> pkColumnList = PhoenixUtil.getPrimaryKeyColumnList(jobConf, tableName);
- StringBuilder pkColumns = new StringBuilder();
-
- for (String pkColumn : pkColumnList) {
- if (!readColumnList.contains(pkColumn)) {
- pkColumns.append("\"").append(pkColumn).append("\"" + PhoenixStorageHandlerConstants.COMMA);
- }
- }
-
- selectColumns = pkColumns.toString() + selectColumns;
- }
- }
-
- return selectColumns;
- }
-
- public String buildQuery(JobConf jobConf, String tableName, List<String> readColumnList,
- List<IndexSearchCondition> searchConditions) throws IOException {
- String hints = getHint(jobConf, tableName);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Building query with columns : " + readColumnList + " table name : " +
- tableName + " search conditions : " + searchConditions + " hints : " + hints);
- }
-
- return makeQueryString(jobConf, tableName, new ArrayList<>(readColumnList),
- searchConditions, QUERY_TEMPLATE, hints);
- }
-
- private String getHint(JobConf jobConf, String tableName) {
- StringBuilder hints = new StringBuilder("/*+ ");
- if (!jobConf.getBoolean(PhoenixStorageHandlerConstants.HBASE_SCAN_CACHEBLOCKS, Boolean
- .FALSE)) {
- hints.append("NO_CACHE ");
- }
-
- String queryHint = jobConf.get(tableName + PhoenixStorageHandlerConstants
- .PHOENIX_TABLE_QUERY_HINT);
- if (queryHint != null) {
- hints.append(queryHint);
- }
- hints.append(" */");
-
- return hints.toString();
- }
-
- protected List<String> buildWhereClause(JobConf jobConf, StringBuilder sql,
- List<IndexSearchCondition> conditions)
- throws IOException {
- if (conditions == null || conditions.size() == 0) {
- return Collections.emptyList();
- }
-
- List<String> columns = new ArrayList<>();
- sql.append(" where ");
-
- Iterator<IndexSearchCondition> iter = conditions.iterator();
- appendExpression(jobConf, sql, iter.next(), columns);
- while (iter.hasNext()) {
- sql.append(" and ");
- appendExpression(jobConf, sql, iter.next(), columns);
- }
-
- return columns;
- }
-
- private void appendExpression(JobConf jobConf, StringBuilder sql, IndexSearchCondition condition,
- List<String> columns) {
- Expression expr = findExpression(condition);
- if (expr != null) {
- sql.append(expr.buildExpressionStringFrom(jobConf, condition));
- String column = condition.getColumnDesc().getColumn();
- String rColumn = findReplacement(jobConf, column);
- if(rColumn != null) {
- column = rColumn;
- }
-
- columns.add(column);
- }
- }
-
- private Expression findExpression(final IndexSearchCondition condition) {
- for (Expression exp:Expression.values()) {
- if(exp.isFor(condition)){
- return exp;
- }
- }
- return null;
- }
-
- private static final StrJoiner JOINER_COMMA = new StrJoiner(", ");
- private static final StrJoiner JOINER_AND = new StrJoiner(" and ");
- private static final StrJoiner JOINER_SPACE = new StrJoiner(" ");
-
- private static class StrJoiner{
- private String delimiter;
-
- StrJoiner(String delimiter){
- this.delimiter = delimiter;
- }
- public String join(List<String> list){
- return String.join(this.delimiter,list);
- }
- }
- private enum Expression {
- EQUAL("UDFOPEqual", "="),
- GREATER_THAN_OR_EQUAL_TO("UDFOPEqualOrGreaterThan", ">="),
- GREATER_THAN("UDFOPGreaterThan", ">"),
- LESS_THAN_OR_EQUAL_TO("UDFOPEqualOrLessThan", "<="),
- LESS_THAN("UDFOPLessThan", "<"),
- NOT_EQUAL("UDFOPNotEqual", "!="),
- BETWEEN("GenericUDFBetween", "between", JOINER_AND,true) {
- public boolean checkCondition(IndexSearchCondition condition) {
- return condition.getConstantDescs() != null;
- }
- },
- IN("GenericUDFIn", "in", JOINER_COMMA,true) {
- public boolean checkCondition(IndexSearchCondition condition) {
- return condition.getConstantDescs() != null;
- }
-
- public String createConstants(final String typeName, ExprNodeConstantDesc[] desc) {
- return "(" + super.createConstants(typeName, desc) + ")";
- }
- },
- IS_NULL("GenericUDFOPNull", "is null") {
- public boolean checkCondition(IndexSearchCondition condition) {
- return true;
- }
- },
- IS_NOT_NULL("GenericUDFOPNotNull", "is not null") {
- public boolean checkCondition(IndexSearchCondition condition) {
- return true;
- }
- };
-
- private final String hiveCompOp;
- private final String sqlCompOp;
- private final StrJoiner joiner;
- private final boolean supportNotOperator;
-
- Expression(String hiveCompOp, String sqlCompOp) {
- this(hiveCompOp, sqlCompOp, null,null);
- }
-
- Expression(String hiveCompOp, String sqlCompOp, StrJoiner joiner, String joiner2) {
- this(hiveCompOp, sqlCompOp, joiner,false);
- }
-
- Expression(String hiveCompOp, String sqlCompOp, StrJoiner joiner, boolean supportNotOp) {
- this.hiveCompOp = hiveCompOp;
- this.sqlCompOp = sqlCompOp;
- this.joiner = joiner;
- this.supportNotOperator = supportNotOp;
- }
-
- public boolean checkCondition(IndexSearchCondition condition) {
- return condition.getConstantDesc().getValue() != null;
- }
-
- public boolean isFor(IndexSearchCondition condition) {
- return condition.getComparisonOp().endsWith(hiveCompOp) && checkCondition(condition);
- }
-
- public String buildExpressionStringFrom(JobConf jobConf, IndexSearchCondition condition) {
- final String type = condition.getColumnDesc().getTypeString();
- String column = condition.getColumnDesc().getColumn();
- String rColumn = findReplacement(jobConf, column);
- if(rColumn != null) {
- column = rColumn;
- }
- return String.join(" ",
- "\"" + column + "\"",
- getSqlCompOpString(condition),
- joiner != null ? createConstants(type, condition.getConstantDescs()) :
- createConstant(type, condition.getConstantDesc()));
- }
-
- public String getSqlCompOpString(IndexSearchCondition condition) {
- return supportNotOperator ?
- (condition.isNot() ? "not " : "") + sqlCompOp : sqlCompOp;
- }
-
- public String createConstant(String typeName, ExprNodeConstantDesc constantDesc) {
- if (constantDesc == null) {
- return StringUtil.EMPTY_STRING;
- }
-
- return createConstantString(typeName, String.valueOf(constantDesc.getValue()));
- }
-
- public String createConstants(final String typeName, ExprNodeConstantDesc[] constantDesc) {
- if (constantDesc == null) {
- return StringUtil.EMPTY_STRING;
- }
- List<String> constants = new ArrayList<>();
- for (ExprNodeConstantDesc s:constantDesc) {
- constants.add(createConstantString(typeName, String.valueOf(s.getValue())));
- }
- return joiner.join(constants);
- }
-
- private static class ConstantStringWrapper {
- private List<String> types;
- private String prefix;
- private String postfix;
-
- ConstantStringWrapper(String type, String prefix, String postfix) {
- this(new ArrayList<>(Arrays.asList(type)), prefix, postfix);
- }
-
- ConstantStringWrapper(List<String> types, String prefix, String postfix) {
- this.types = types;
- this.prefix = prefix;
- this.postfix = postfix;
- }
-
- public String apply(final String typeName, String value) {
- boolean hasMatch = false;
- for (String type:types){
- if (typeName.startsWith(type)) {
- hasMatch = true;
- break;
- }
- }
- return hasMatch ? prefix + value + postfix : value;
- }
- }
-
- private static final String SINGLE_QUOTATION = "'";
- private static List<ConstantStringWrapper> WRAPPERS = new ArrayList<>(Arrays.asList(
- new ConstantStringWrapper(new ArrayList<>(Arrays.asList(
- serdeConstants.STRING_TYPE_NAME, serdeConstants.CHAR_TYPE_NAME,
- serdeConstants.VARCHAR_TYPE_NAME, serdeConstants.DATE_TYPE_NAME,
- serdeConstants.TIMESTAMP_TYPE_NAME)
- ), SINGLE_QUOTATION, SINGLE_QUOTATION),
- new ConstantStringWrapper(serdeConstants.DATE_TYPE_NAME, "to_date(", ")"),
- new ConstantStringWrapper(serdeConstants.TIMESTAMP_TYPE_NAME, "to_timestamp(", ")"))
- );
-
- private String createConstantString(String typeName, String value) {
- for (ConstantStringWrapper wrapper : WRAPPERS) {
- value = wrapper.apply(typeName, value);
- }
-
- return value;
- }
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
deleted file mode 100644
index d4e3067..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/ColumnMappingUtils.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.phoenix.hive.util;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-
-import java.util.*;
-import java.util.stream.Collectors;
-
-/**
- * Util class for mapping between Hive and Phoenix column names
- */
-public class ColumnMappingUtils {
-
- private static final Log LOG = LogFactory.getLog(ColumnMappingUtils.class);
-
- public static Map<String, String> getColumnMappingMap(String columnMappings) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Column mappings : " + columnMappings);
- }
-
- if (columnMappings == null || columnMappings.length() == 0) {
- if (LOG.isInfoEnabled()) {
- LOG.info("phoenix.column.mapping not set. using field definition");
- }
-
- return Collections.emptyMap();
- }
-
- Map<String, String> columnMappingMap = new HashMap<>();
- for (String item:columnMappings.split(PhoenixStorageHandlerConstants.COMMA)) {
- String[] kv= item.trim().split(PhoenixStorageHandlerConstants.COLON);
- columnMappingMap.put(kv[0],kv[1].length()>1?kv[1]:"");
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Column mapping map : " + columnMappingMap);
- }
-
- return columnMappingMap;
- }
-
- public static Map<String, String> getReverseColumnMapping(String columnMapping) {
- Map<String, String> myNewHashMap = new LinkedHashMap<>();
- Map<String, String> forward = getColumnMappingMap(columnMapping);
- for(Map.Entry<String, String> entry : forward.entrySet()){
- myNewHashMap.put(entry.getValue(), entry.getKey());
- }
- return myNewHashMap;
- }
-
- public static List<String> quoteColumns(List<String> readColumnList) {
- List<String> newList = new LinkedList<>();
- for(String column : readColumnList) {
- newList.add("\""+ column + "\"");
- }
- return newList;
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixConnectionUtil.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixConnectionUtil.java
deleted file mode 100644
index 8d76ac0..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixConnectionUtil.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.util;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.util.PropertiesUtil;
-import org.apache.phoenix.util.QueryUtil;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Set of methods to obtain Connection depending on configuration
- */
-
-public class PhoenixConnectionUtil {
-
- private static final Log LOG = LogFactory.getLog(PhoenixConnectionUtil.class);
-
- public static Connection getInputConnection(final Configuration conf, final Properties props)
- throws SQLException {
- String quorum = conf.get(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM);
- quorum = quorum == null ? props.getProperty(PhoenixStorageHandlerConstants
- .ZOOKEEPER_QUORUM, PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_QUORUM) :
- quorum;
-
- int zooKeeperClientPort = conf.getInt(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT, 0);
- zooKeeperClientPort = zooKeeperClientPort == 0 ?
- Integer.parseInt(props.getProperty(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT,
- String.valueOf(PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_PORT))) :
- zooKeeperClientPort;
-
- String zNodeParent = conf.get(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT);
- zNodeParent = zNodeParent == null ? props.getProperty(PhoenixStorageHandlerConstants
- .ZOOKEEPER_PARENT, PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_PARENT) :
- zNodeParent;
-
- return getConnection(quorum, zooKeeperClientPort, zNodeParent, PropertiesUtil
- .combineProperties(props, conf));
- }
-
- public static Connection getConnection(final Table table) throws SQLException {
- Map<String, String> tableParameterMap = table.getParameters();
-
- String zookeeperQuorum = tableParameterMap.get(PhoenixStorageHandlerConstants
- .ZOOKEEPER_QUORUM);
- zookeeperQuorum = zookeeperQuorum == null ? PhoenixStorageHandlerConstants
- .DEFAULT_ZOOKEEPER_QUORUM : zookeeperQuorum;
-
- String clientPortString = tableParameterMap.get(PhoenixStorageHandlerConstants
- .ZOOKEEPER_PORT);
- int clientPort = clientPortString == null ? PhoenixStorageHandlerConstants
- .DEFAULT_ZOOKEEPER_PORT : Integer.parseInt(clientPortString);
-
- String zNodeParent = tableParameterMap.get(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT);
- zNodeParent = zNodeParent == null ? PhoenixStorageHandlerConstants
- .DEFAULT_ZOOKEEPER_PARENT : zNodeParent;
-
- return DriverManager.getConnection(QueryUtil.getUrl(zookeeperQuorum, clientPort,
- zNodeParent));
- }
-
- private static Connection getConnection(final String quorum, final Integer clientPort, String
- zNodeParent, Properties props) throws SQLException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Connection attrs [quorum, port, znode] : " + quorum + ", " + clientPort +
- ", " +
- zNodeParent);
- }
-
- return DriverManager.getConnection(clientPort != null ? QueryUtil.getUrl(quorum,
- clientPort, zNodeParent) : QueryUtil.getUrl(quorum), props);
- }
-
- public static Configuration getConfiguration(JobConf jobConf) {
- Configuration conf = new Configuration(jobConf);
- String quorum = conf.get(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM);
- if(quorum!=null) {
- conf.set(HConstants.ZOOKEEPER_QUORUM, quorum);
- }
- int zooKeeperClientPort = conf.getInt(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT, 0);
- if(zooKeeperClientPort != 0) {
- conf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zooKeeperClientPort);
- }
- String zNodeParent = conf.get(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT);
- if(zNodeParent != null) {
- conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, zNodeParent);
- }
- return conf;
- }
-}
diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
deleted file mode 100644
index a72f78e..0000000
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.util;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.jdbc.PhoenixConnection;
-import org.apache.phoenix.schema.MetaDataClient;
-import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.TableNotFoundException;
-import org.apache.phoenix.util.ColumnInfo;
-import org.apache.phoenix.util.PhoenixRuntime;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Misc utils
- */
-public class PhoenixUtil {
-
- private static final Log LOG = LogFactory.getLog(PhoenixUtil.class);
-
- public static String getPhoenixType(String hiveTypeName) {
- if (hiveTypeName.startsWith("array")) {
- List<String> tokenList = new ArrayList<>(Arrays.asList(hiveTypeName.split("[<>]")));
- return getPhoenixType(tokenList.get(1)) + "[]";
- } else if (hiveTypeName.startsWith("int")) {
- return "integer";
- } else if (hiveTypeName.equals("string")) {
- return "varchar";
- } else {
- return hiveTypeName;
- }
- }
-
- public static boolean existTable(Connection conn, String tableName) throws SQLException {
- boolean exist = false;
- DatabaseMetaData dbMeta = conn.getMetaData();
-
- String[] schemaInfo = getTableSchema(tableName.toUpperCase());
- try (ResultSet rs = dbMeta.getTables(null, schemaInfo[0], schemaInfo[1], null)) {
- exist = rs.next();
-
- if (LOG.isDebugEnabled()) {
- if (exist) {
- LOG.debug(rs.getString("TABLE_NAME") + " table exist. ");
- } else {
- LOG.debug("table " + tableName + " doesn't exist.");
- }
- }
- }
-
- return exist;
- }
-
- public static List<String> getPrimaryKeyColumnList(Connection conn, String tableName) throws
- SQLException {
- Map<Short, String> primaryKeyColumnInfoMap = new HashMap<>();
- DatabaseMetaData dbMeta = conn.getMetaData();
-
- String[] schemaInfo = getTableSchema(tableName.toUpperCase());
- try (ResultSet rs = dbMeta.getPrimaryKeys(null, schemaInfo[0], schemaInfo[1])) {
- while (rs.next()) {
- primaryKeyColumnInfoMap.put(rs.getShort("KEY_SEQ"), rs.getString("COLUMN_NAME"));
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("PK-columns : " + primaryKeyColumnInfoMap);
- }
- }
-
- return new ArrayList<>(primaryKeyColumnInfoMap.values());
- }
-
- public static List<String> getPrimaryKeyColumnList(Configuration config, String tableName) {
- List<String> pkColumnNameList = null;
-
- try (Connection conn = PhoenixConnectionUtil.getInputConnection(config, new Properties())) {
- pkColumnNameList = getPrimaryKeyColumnList(conn, tableName);
- } catch (SQLException e) {
- throw new RuntimeException(e);
- }
-
- return pkColumnNameList;
- }
-
- public static void createTable(Connection conn, String createTableStatement) throws
- SQLException {
- conn.createStatement().execute(createTableStatement);
- }
-
- public static void dropTable(Connection conn, String tableName) throws SQLException {
- conn.createStatement().execute("drop table " + tableName);
- }
-
- public static List<ColumnInfo> getColumnInfoList(Connection conn, String tableName) throws
- SQLException {
- List<ColumnInfo> columnInfoList = null;
-
- try {
- columnInfoList = PhoenixRuntime.generateColumnInfo(conn, tableName, null);
- } catch (TableNotFoundException e) {
- // Exception can be occurred when table create.
- columnInfoList = Collections.emptyList();
- }
-
- return columnInfoList;
- }
-
- public static String[] getTableSchema(String tableName) {
- String[] schemaInfo = new String[2];
- String[] tokens = tableName.split("\\.");
-
- if (tokens.length == 2) {
- schemaInfo = tokens;
- } else {
- schemaInfo[1] = tokens[0];
- }
-
- return schemaInfo;
- }
-
- public static boolean isDisabledWal(MetaDataClient metaDataClient, String tableName) throws
- SQLException {
- String[] schemaInfo = getTableSchema(tableName.toUpperCase());
- MetaDataMutationResult result = metaDataClient.updateCache(schemaInfo[0], schemaInfo[1]);
- PTable dataTable = result.getTable();
-
- return dataTable.isWALDisabled();
- }
-
- public static void alterTableForWalDisable(Connection conn, String tableName, boolean
- disableMode) throws SQLException {
- conn.createStatement().execute("alter table " + tableName + " set disable_wal=" +
- disableMode);
- }
-
- public static void flush(Connection conn, String tableName) throws SQLException {
- try (HBaseAdmin admin = ((PhoenixConnection) conn).getQueryServices().getAdmin()) {
- admin.flush(TableName.valueOf(tableName));
- } catch (IOException e) {
- throw new SQLException(e);
- }
- }
-
- public static String constructDeleteStatement(Connection conn, String tableName) throws
- SQLException {
- StringBuilder deleteQuery = new StringBuilder("delete from ").append(tableName).append(" " +
- "where ");
-
- List<String> primaryKeyColumnList = getPrimaryKeyColumnList(conn, tableName);
- for (int i = 0, limit = primaryKeyColumnList.size(); i < limit; i++) {
- String pkColumn = primaryKeyColumnList.get(i);
- deleteQuery.append(pkColumn).append(PhoenixStorageHandlerConstants.EQUAL).append
- (PhoenixStorageHandlerConstants.QUESTION);
-
- if ((i + 1) != primaryKeyColumnList.size()) {
- deleteQuery.append(" and ");
- }
- }
-
- return deleteQuery.toString();
- }
-
- public static void closeResource(Statement stmt) throws SQLException {
- if (stmt != null && !stmt.isClosed()) {
- stmt.close();
- }
- }
-
- public static void closeResource(Connection conn) throws SQLException {
- if (conn != null && !conn.isClosed()) {
- conn.close();
- }
- }
-}
diff --git a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
deleted file mode 100644
index cfdf995..0000000
--- a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.query;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.junit.Assert.assertEquals;
-
-public class PhoenixQueryBuilderTest {
- private static final PhoenixQueryBuilder BUILDER = PhoenixQueryBuilder.getInstance();
- private static final String TABLE_NAME = "TEST_TABLE";
-
- private IndexSearchCondition mockedIndexSearchCondition(String comparisionOp,
- Object constantValue,
- Object[] constantValues,
- String columnName,
- String typeString,
- boolean isNot) {
- IndexSearchCondition condition = mock(IndexSearchCondition.class);
- when(condition.getComparisonOp()).thenReturn(comparisionOp);
-
- if (constantValue != null) {
- ExprNodeConstantDesc constantDesc = mock(ExprNodeConstantDesc.class);
- when(constantDesc.getValue()).thenReturn(constantValue);
- when(condition.getConstantDesc()).thenReturn(constantDesc);
- }
-
- ExprNodeColumnDesc columnDesc = mock(ExprNodeColumnDesc.class);
- when(columnDesc.getColumn()).thenReturn(columnName);
- when(columnDesc.getTypeString()).thenReturn(typeString);
- when(condition.getColumnDesc()).thenReturn(columnDesc);
-
-
- if (ArrayUtils.isNotEmpty(constantValues)) {
- ExprNodeConstantDesc[] constantDescs = new ExprNodeConstantDesc[constantValues.length];
- for (int i = 0; i < constantDescs.length; i++) {
- constantDescs[i] = mock(ExprNodeConstantDesc.class);
- when(condition.getConstantDesc(i)).thenReturn(constantDescs[i]);
- when(constantDescs[i].getValue()).thenReturn(constantValues[i]);
- }
- when(condition.getConstantDescs()).thenReturn(constantDescs);
- }
-
- when(condition.isNot()).thenReturn(isNot);
-
- return condition;
- }
-
- @Test
- public void testBuildQueryWithCharColumns() throws IOException {
- final String COLUMN_CHAR = "Column_Char";
- final String COLUMN_VARCHAR = "Column_VChar";
- final String expectedQueryPrefix = "select /*+ NO_CACHE */ \"" + COLUMN_CHAR + "\",\"" + COLUMN_VARCHAR +
- "\" from TEST_TABLE where ";
-
- JobConf jobConf = new JobConf();
- List<String> readColumnList = new ArrayList<>(Arrays.asList(COLUMN_CHAR, COLUMN_VARCHAR));
- List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFOPEqual", "CHAR_VALUE", null, COLUMN_CHAR, "char(10)", false),
- mockedIndexSearchCondition("GenericUDFOPEqual", "CHAR_VALUE2", null, COLUMN_VARCHAR, "varchar(10)", false)
- ));
-
- assertEquals(expectedQueryPrefix + "\"Column_Char\" = 'CHAR_VALUE' and \"Column_VChar\" = 'CHAR_VALUE2'",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
-
- searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFIn", null,
- new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, "char(10)", false))
- );
-
- assertEquals(expectedQueryPrefix + "\"Column_Char\" in ('CHAR1', 'CHAR2', 'CHAR3')",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
-
- searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFIn", null,
- new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, "char(10)", true))
- );
-
- assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 'CHAR2', 'CHAR3')",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
-
- searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFBetween", null,
- new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, "char(10)", false))
- );
-
- assertEquals(expectedQueryPrefix + "\"Column_Char\" between 'CHAR1' and 'CHAR2'",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
-
- searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFBetween", null,
- new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, "char(10)", true))
- );
-
- assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 'CHAR1' and 'CHAR2'",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
- }
-
- @Test
- public void testBuildBetweenQueryWithDateColumns() throws IOException {
- final String COLUMN_DATE = "Column_Date";
- final String tableName = "TEST_TABLE";
- final String expectedQueryPrefix = "select /*+ NO_CACHE */ \"" + COLUMN_DATE +
- "\" from " + tableName + " where ";
-
- JobConf jobConf = new JobConf();
- List<String> readColumnList = new ArrayList<>(Arrays.asList(COLUMN_DATE));
-
- List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFBetween", null,
- new Object[]{"1992-01-02", "1992-02-02"}, COLUMN_DATE, "date", false)
- ));
-
- assertEquals(expectedQueryPrefix +
- "\"" + COLUMN_DATE + "\" between to_date('1992-01-02') and to_date('1992-02-02')",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
-
- searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFBetween", null,
- new Object[]{"1992-01-02", "1992-02-02"}, COLUMN_DATE, "date", true)
- ));
-
- assertEquals(expectedQueryPrefix +
- "\"" + COLUMN_DATE + "\" not between to_date('1992-01-02') and to_date('1992-02-02')",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
- }
-
- @Test
- public void testBuildQueryWithNotNull() throws IOException {
- final String COLUMN_DATE = "Column_Date";
- final String tableName = "TEST_TABLE";
- final String expectedQueryPrefix = "select /*+ NO_CACHE */ \"" + COLUMN_DATE +
- "\" from " + tableName + " where ";
-
- JobConf jobConf = new JobConf();
- List<String> readColumnList =new ArrayList<>(Arrays.asList(COLUMN_DATE));
-
- List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFOPNotNull", null,
- null, COLUMN_DATE, "date", true))
- );
-
- assertEquals(expectedQueryPrefix +
- "\"" + COLUMN_DATE + "\" is not null ",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
- }
-
- @Test
- public void testBuildQueryWithBigintColumns() throws IOException {
- final String COLUMN_BIGINT = "Column_Bigint";
- final String tableName = "TEST_TABLE";
- final String expectedQueryPrefix = "select /*+ NO_CACHE */ \"" + COLUMN_BIGINT +
- "\" from " + tableName + " where ";
-
- JobConf jobConf = new JobConf();
- List<String> readColumnList = new ArrayList<>(Arrays.asList(COLUMN_BIGINT));
-
- List<IndexSearchCondition> searchConditions = new ArrayList<>(Arrays.asList(
- mockedIndexSearchCondition("GenericUDFOPEqual", 100L,
- null, COLUMN_BIGINT, "bigint", false))
- );
-
- assertEquals(expectedQueryPrefix + "\"" + COLUMN_BIGINT + "\" = 100",
- BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, searchConditions));
- }
-}
diff --git a/phoenix-hive3/pom.xml b/phoenix-hive3/pom.xml
deleted file mode 100644
index 9e25700..0000000
--- a/phoenix-hive3/pom.xml
+++ /dev/null
@@ -1,329 +0,0 @@
-<?xml version='1.0'?>
-<!--
-
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied. See the License for the
- specific language governing permissions and limitations
- under the License.
-
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-connectors</artifactId>
- <version>6.0.0-SNAPSHOT</version>
- </parent>
- <artifactId>phoenix-hive3</artifactId>
- <name>Phoenix Hive Connector for Phoenix 5</name>
- <properties>
- <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
- <netty.version>4.1.47.Final</netty.version>
- <phoenix.version>${phoenix-five.version}</phoenix.version>
- <hbase.version>${hbase-two.version}</hbase.version>
- <hadoop.version>${hadoop-three.version}</hadoop.version>
- <avatica.version>1.12.0</avatica.version>
- <hive3.version>3.1.2</hive3.version>
- <curator.version>4.0.0</curator.version>
- <tez.version>0.9.1</tez.version>
- <jetty.version>9.3.8.v20160314</jetty.version>
- <jdk.version>1.8</jdk.version>
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-core</artifactId>
- <exclusions>
- <exclusion>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-cli</artifactId>
- <version>${hive3.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-exec</artifactId>
- <version>${hive3.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-metastore</artifactId>
- <version>${hive3.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-standalone-metastore</artifactId>
- <type>test-jar</type>
- <version>${hive3.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>${slf4j.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>${slf4j.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>${log4j.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-core</artifactId>
- <exclusions>
- <exclusion>
- <groupId>io.netty</groupId>
- <artifactId>netty</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>io.netty</groupId>
- <artifactId>netty-all</artifactId>
- <version>${netty.version}</version>
- </dependency>
-
- <!-- Test dependencies -->
- <dependency>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix-core</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hbase</groupId>
- <artifactId>hbase-testing-util</artifactId>
- <scope>test</scope>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.apache.hbase</groupId>
- <artifactId>hbase-it</artifactId>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-auth</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-common</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-minicluster</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.tez</groupId>
- <artifactId>tez-tests</artifactId>
- <scope>test</scope>
- <version>${tez.version}</version>
- <type>test-jar</type>
- <exclusions>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.tez</groupId>
- <artifactId>tez-dag</artifactId>
- <scope>test</scope>
- <version>${tez.version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-util</artifactId>
- <scope>test</scope>
- <version>${jetty.version}</version>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-http</artifactId>
- <scope>test</scope>
- <version>${jetty.version}</version>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-server</artifactId>
- <scope>test</scope>
- <version>${jetty.version}</version>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- <version>${mockito-all.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- <version>19.0</version>
- </dependency>
- <dependency>
- <groupId>org.apache.calcite.avatica</groupId>
- <artifactId>avatica</artifactId>
- <!-- Overriding the version of Avatica that PQS uses so that Hive will work -->
- <version>${avatica.version}</version>
- <scope>test</scope>
- <!-- And removing a bunch of dependencies that haven't been shaded in this older
- Avatica version which conflict with HDFS -->
- <exclusions>
- <exclusion>
- <groupId>org.hsqldb</groupId>
- <artifactId>hsqldb</artifactId>
- </exclusion>
- <exclusion>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-databind</artifactId>
- </exclusion>
- <exclusion>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-annotations</artifactId>
- </exclusion>
- <exclusion>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-core</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-failsafe-plugin</artifactId>
- <executions>
- <execution>
- <id>HBaseManagedTimeTests</id>
- <configuration>
- <encoding>UTF-8</encoding>
- <forkCount>1</forkCount>
- <runOrder>alphabetical</runOrder>
- <reuseForks>false</reuseForks>
- <argLine>-enableassertions -Xmx2000m -XX:MaxPermSize=256m
- -Djava.security.egd=file:/dev/./urandom
- "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
- -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/
- -Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded.
- </argLine>
- <redirectTestOutputToFile>${test.output.tofile}
- </redirectTestOutputToFile>
- <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
- <groups>org.apache.phoenix.end2end.HBaseManagedTimeTest</groups>
- <shutdown>kill</shutdown>
- <useSystemClassLoader>false</useSystemClassLoader>
- </configuration>
- <goals>
- <goal>integration-test</goal>
- <goal>verify</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>${maven-dependency-plugin.version}</version>
- <executions>
- <execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- </configuration>
- <executions>
- <execution>
- <id>make-jar-with-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- <configuration>
- <appendAssemblyId>false</appendAssemblyId>
- <finalName>phoenix-${project.version}-hive</finalName>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java
deleted file mode 100644
index 1f26df1..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRecordUpdater.java
+++ /dev/null
@@ -1,341 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
-import org.apache.hadoop.hive.ql.io.RecordUpdater;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.phoenix.hive.PhoenixSerializer.DmlType;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.mapreduce.PhoenixResultWritable;
-import org.apache.phoenix.hive.util.PhoenixConnectionUtil;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.hive.util.PhoenixUtil;
-import org.apache.phoenix.jdbc.PhoenixConnection;
-import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
-import org.apache.phoenix.schema.ConcurrentTableMutationException;
-import org.apache.phoenix.schema.MetaDataClient;
-import org.apache.phoenix.util.QueryUtil;
-
-public class PhoenixRecordUpdater implements RecordUpdater {
-
- private static final Log LOG = LogFactory.getLog(PhoenixRecordUpdater.class);
-
- private final Connection conn;
- private final PreparedStatement pstmt;
- private final long batchSize;
- private long numRecords = 0;
-
- private Configuration config;
- private String tableName;
- private MetaDataClient metaDataClient;
- private boolean restoreWalMode;
-
- private long rowCountDelta = 0;
-
- private PhoenixSerializer phoenixSerializer;
- private ObjectInspector objInspector;
- private PreparedStatement pstmtForDelete;
-
- public PhoenixRecordUpdater(Path path, AcidOutputFormat.Options options) throws IOException {
- this.config = options.getConfiguration();
- tableName = config.get(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME);
-
- Properties props = new Properties();
-
- try {
- // Disable WAL
- String walConfigName = tableName.toLowerCase() + PhoenixStorageHandlerConstants
- .DISABLE_WAL;
- boolean disableWal = config.getBoolean(walConfigName, false);
- if (disableWal) {
- if (LOG.isDebugEnabled()) {
- LOG.debug(walConfigName + " is true. batch.mode will be set true.");
- }
-
- props.setProperty(PhoenixStorageHandlerConstants.BATCH_MODE, "true");
- }
-
- this.conn = PhoenixConnectionUtil.getInputConnection(config, props);
-
- if (disableWal) {
- metaDataClient = new MetaDataClient((PhoenixConnection) conn);
-
- if (!PhoenixUtil.isDisabledWal(metaDataClient, tableName)) {
- // execute alter tablel statement if disable_wal is not true.
- try {
- PhoenixUtil.alterTableForWalDisable(conn, tableName, true);
- } catch (ConcurrentTableMutationException e) {
- if (LOG.isWarnEnabled()) {
- LOG.warn("Concurrent modification of disableWAL");
- }
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug(tableName + "s wal disabled.");
- }
-
- // restore original value of disable_wal at the end.
- restoreWalMode = true;
- }
- }
-
- this.batchSize = PhoenixConfigurationUtil.getBatchSize(config);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Batch-size : " + batchSize);
- }
-
- String upsertQuery = QueryUtil.constructUpsertStatement(tableName, PhoenixUtil
- .getColumnInfoList(conn, tableName));
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Upsert-query : " + upsertQuery);
- }
- this.pstmt = this.conn.prepareStatement(upsertQuery);
- } catch (SQLException e) {
- throw new IOException(e);
- }
-
- this.objInspector = options.getInspector();
- try {
- phoenixSerializer = new PhoenixSerializer(config, options.getTableProperties());
- } catch (SerDeException e) {
- throw new IOException(e);
- }
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.ql.io.RecordUpdater#insert(long, java.lang.Object)
- */
- @Override
- public void insert(long currentTransaction, Object row) throws IOException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("Insert - currentTranscation : " + currentTransaction + ", row : " +
- PhoenixStorageHandlerUtil.toString(row));
- }
-
- PhoenixResultWritable pResultWritable = (PhoenixResultWritable) phoenixSerializer
- .serialize(row, objInspector, DmlType.INSERT);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Data : " + pResultWritable.getValueList());
- }
-
- write(pResultWritable);
-
- rowCountDelta++;
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.ql.io.RecordUpdater#update(long, java.lang.Object)
- */
- @Override
- public void update(long currentTransaction, Object row) throws IOException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("Update - currentTranscation : " + currentTransaction + ", row : " +
- PhoenixStorageHandlerUtil.toString(row));
- }
-
- PhoenixResultWritable pResultWritable = (PhoenixResultWritable) phoenixSerializer
- .serialize(row, objInspector, DmlType.UPDATE);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Data : " + pResultWritable.getValueList());
- }
-
- write(pResultWritable);
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.ql.io.RecordUpdater#delete(long, java.lang.Object)
- */
- @Override
- public void delete(long currentTransaction, Object row) throws IOException {
- if (LOG.isTraceEnabled()) {
- LOG.trace("Delete - currentTranscation : " + currentTransaction + ", row : " +
- PhoenixStorageHandlerUtil.toString(row));
- }
-
- PhoenixResultWritable pResultWritable = (PhoenixResultWritable) phoenixSerializer
- .serialize(row, objInspector, DmlType.DELETE);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Data : " + pResultWritable.getValueList());
- }
-
- if (pstmtForDelete == null) {
- try {
- String deleteQuery = PhoenixUtil.constructDeleteStatement(conn, tableName);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Delete query : " + deleteQuery);
- }
-
- pstmtForDelete = conn.prepareStatement(deleteQuery);
- } catch (SQLException e) {
- throw new IOException(e);
- }
- }
-
- delete(pResultWritable);
-
- rowCountDelta--;
- }
-
- private void delete(PhoenixResultWritable pResultWritable) throws IOException {
- try {
- pResultWritable.delete(pstmtForDelete);
- numRecords++;
- pstmtForDelete.executeUpdate();
-
- if (numRecords % batchSize == 0) {
- LOG.debug("Commit called on a batch of size : " + batchSize);
- conn.commit();
- }
- } catch (SQLException e) {
- throw new IOException("Exception while deleting to table.", e);
- }
- }
-
- private void write(PhoenixResultWritable pResultWritable) throws IOException {
- try {
- pResultWritable.write(pstmt);
- numRecords++;
- pstmt.executeUpdate();
-
- if (numRecords % batchSize == 0) {
- LOG.debug("Commit called on a batch of size : " + batchSize);
- conn.commit();
- }
- } catch (SQLException e) {
- throw new IOException("Exception while writing to table.", e);
- }
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.ql.io.RecordUpdater#flush()
- */
- @Override
- public void flush() throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Flush called");
- }
-
- try {
- conn.commit();
-
- if (LOG.isInfoEnabled()) {
- LOG.info("Written row : " + numRecords);
- }
- } catch (SQLException e) {
- LOG.error("SQLException while performing the commit for the task.");
- throw new IOException(e);
- }
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.ql.io.RecordUpdater#close(boolean)
- */
- @Override
- public void close(boolean abort) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("abort : " + abort);
- }
-
- try {
- conn.commit();
-
- if (LOG.isInfoEnabled()) {
- LOG.info("Written row : " + numRecords);
- }
- } catch (SQLException e) {
- LOG.error("SQLException while performing the commit for the task.");
- throw new IOException(e);
- } finally {
- try {
- if (restoreWalMode && PhoenixUtil.isDisabledWal(metaDataClient, tableName)) {
- try {
- PhoenixUtil.alterTableForWalDisable(conn, tableName, false);
- } catch (ConcurrentTableMutationException e) {
- if (LOG.isWarnEnabled()) {
- LOG.warn("Concurrent modification of disableWAL");
- }
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug(tableName + "s wal enabled.");
- }
- }
-
- // flush when [table-name].auto.flush is true.
- String autoFlushConfigName = tableName.toLowerCase() +
- PhoenixStorageHandlerConstants.AUTO_FLUSH;
- boolean autoFlush = config.getBoolean(autoFlushConfigName, false);
- if (autoFlush) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("autoFlush is " + autoFlush);
- }
-
- PhoenixUtil.flush(conn, tableName);
- }
-
- PhoenixUtil.closeResource(pstmt);
- PhoenixUtil.closeResource(pstmtForDelete);
- PhoenixUtil.closeResource(conn);
- } catch (SQLException ex) {
- LOG.error("SQLException while closing the connection for the task.");
- throw new IOException(ex);
- }
- }
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.ql.io.RecordUpdater#getStats()
- */
- @Override
- public SerDeStats getStats() {
- if (LOG.isDebugEnabled()) {
- LOG.debug("getStats called");
- }
-
- SerDeStats stats = new SerDeStats();
- stats.setRowCount(rowCountDelta);
- // Don't worry about setting raw data size diff. There is no reasonable way to calculate
- // that without finding the row we are updating or deleting, which would be a mess.
- return stats;
- }
-
- @Override
- public long getBufferedRowCount() {
- return numRecords;
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRow.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
deleted file mode 100644
index cae8f6c..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRow.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import org.apache.hadoop.hive.serde2.StructObject;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Implementation for Hive SerDe StructObject
- */
-public class PhoenixRow implements StructObject {
-
- private List<String> columnList;
- private Map<String, Object> resultRowMap;
-
- public PhoenixRow(List<String> columnList) {
- this.columnList = columnList;
- }
-
- public PhoenixRow setResultRowMap(Map<String, Object> resultRowMap) {
- this.resultRowMap = resultRowMap;
- return this;
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.serde2.StructObject#getField(int)
- */
- @Override
- public Object getField(int fieldID) {
- return resultRowMap.get(columnList.get(fieldID));
- }
-
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.serde2.StructObject#getFieldsAsList()
- */
- @Override
- public List<Object> getFieldsAsList() {
- return new ArrayList<>(resultRowMap.values());
- }
-
-
- @Override
- public String toString() {
- return resultRowMap.toString();
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRowKey.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRowKey.java
deleted file mode 100644
index a963fba..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixRowKey.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import org.apache.hadoop.hive.ql.io.RecordIdentifier;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.Map;
-
-/**
- * Hive's RecordIdentifier implementation.
- */
-
-public class PhoenixRowKey extends RecordIdentifier {
-
- private PrimaryKeyData rowKeyMap = PrimaryKeyData.EMPTY;
-
- public PhoenixRowKey() {
-
- }
-
- public void setRowKeyMap(Map<String, Object> rowKeyMap) {
- this.rowKeyMap = new PrimaryKeyData(rowKeyMap);
- }
-
- @Override
- public void write(DataOutput dataOutput) throws IOException {
- super.write(dataOutput);
-
- rowKeyMap.serialize((OutputStream) dataOutput);
- }
-
- @Override
- public void readFields(DataInput dataInput) throws IOException {
- super.readFields(dataInput);
-
- try {
- rowKeyMap = PrimaryKeyData.deserialize((InputStream) dataInput);
- } catch (ClassNotFoundException e) {
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
deleted file mode 100644
index 9b5083d..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixSerDe.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.AbstractSerDe;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.io.Writable;
-import org.apache.phoenix.hive.PhoenixSerializer.DmlType;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.mapreduce.PhoenixResultWritable;
-import org.apache.phoenix.hive.objectinspector.PhoenixObjectInspectorFactory;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Properties;
-
-/**
- * SerDe implementation for Phoenix Hive Storage
- *
- */
-public class PhoenixSerDe extends AbstractSerDe {
-
- public static final Log LOG = LogFactory.getLog(PhoenixSerDe.class);
-
- private PhoenixSerializer serializer;
- private ObjectInspector objectInspector;
-
- private LazySerDeParameters serdeParams;
- private PhoenixRow row;
-
- private Properties tableProperties;
-
- /**
- * @throws SerDeException
- */
- public PhoenixSerDe() throws SerDeException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PhoenixSerDe created");
- }
- }
-
- @Override
- public void initialize(Configuration conf, Properties tbl) throws SerDeException {
- tableProperties = tbl;
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("SerDe initialize : " + tbl.getProperty("name"));
- }
-
- serdeParams = new LazySerDeParameters(conf, tbl, getClass().getName());
- objectInspector = createLazyPhoenixInspector(conf, tbl);
-
- String inOutWork = tbl.getProperty(PhoenixStorageHandlerConstants.IN_OUT_WORK);
- if (inOutWork == null) {
- return;
- }
-
- serializer = new PhoenixSerializer(conf, tbl);
- row = new PhoenixRow(serdeParams.getColumnNames());
- }
-
- @Override
- public Object deserialize(Writable result) throws SerDeException {
- if (!(result instanceof PhoenixResultWritable)) {
- throw new SerDeException(result.getClass().getName() + ": expects " +
- "PhoenixResultWritable!");
- }
-
- return row.setResultRowMap(((PhoenixResultWritable) result).getResultMap());
- }
-
- @Override
- public Class<? extends Writable> getSerializedClass() {
- return PhoenixResultWritable.class;
- }
-
- @Override
- public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
- try {
- return serializer.serialize(obj, objInspector, DmlType.NONE);
- } catch (Exception e) {
- throw new SerDeException(e);
- }
- }
-
- @Override
- public SerDeStats getSerDeStats() {
- // no support for statistics
- return null;
- }
-
- public Properties getTableProperties() {
- return tableProperties;
- }
-
- public LazySerDeParameters getSerdeParams() {
- return serdeParams;
- }
-
- @Override
- public ObjectInspector getObjectInspector() throws SerDeException {
- return objectInspector;
- }
-
- private ObjectInspector createLazyPhoenixInspector(Configuration conf, Properties tbl) throws
- SerDeException {
- List<String> columnNameList = Arrays.asList(tbl.getProperty(serdeConstants.LIST_COLUMNS)
- .split(PhoenixStorageHandlerConstants.COMMA));
- List<TypeInfo> columnTypeList = TypeInfoUtils.getTypeInfosFromTypeString(tbl.getProperty
- (serdeConstants.LIST_COLUMN_TYPES));
-
- List<ObjectInspector> columnObjectInspectors = new ArrayList<>(columnTypeList.size());
-
- for (TypeInfo typeInfo : columnTypeList) {
- columnObjectInspectors.add(PhoenixObjectInspectorFactory.createObjectInspector
- (typeInfo, serdeParams));
- }
-
- return LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(columnNameList,
- columnObjectInspectors, null, serdeParams.getSeparators()[0], serdeParams,
- ObjectInspectorOptions.JAVA);
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java
deleted file mode 100644
index 9f3d35c..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PhoenixSerializer.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.mapreduce.PhoenixResultWritable;
-import org.apache.phoenix.hive.util.PhoenixConnectionUtil;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.hive.util.PhoenixUtil;
-import org.apache.phoenix.util.ColumnInfo;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Serializer used in PhoenixSerDe and PhoenixRecordUpdater to produce Writable.
- */
-public class PhoenixSerializer {
-
- private static final Log LOG = LogFactory.getLog(PhoenixSerializer.class);
-
- public static enum DmlType {
- NONE,
- SELECT,
- INSERT,
- UPDATE,
- DELETE
- }
-
- private int columnCount = 0;
- private PhoenixResultWritable pResultWritable;
-
- public PhoenixSerializer(Configuration config, Properties tbl) throws SerDeException {
- String mapping = tbl.getProperty(PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING,
- null);
- if(mapping != null) {
- config.set(PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING, mapping);
- }
-
- // Populate the table properties into config, because these values are used in
- // the initialization of PhoenixResultWritable if the table is transactional
- String tableName = tbl.getProperty(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME);
- config.set(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME, tableName);
-
- config.set(PhoenixStorageHandlerConstants.PHOENIX_ROWKEYS,
- tbl.getProperty(PhoenixStorageHandlerConstants.PHOENIX_ROWKEYS));
-
- String quorum = config.get(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM);
- if (quorum == null) {
- config.set(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM,
- tbl.getProperty(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM,
- PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_QUORUM));
- }
-
- int zooKeeperClientPort =
- config.getInt(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT, 0);
- if (zooKeeperClientPort == 0) {
- config.setInt(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT,
- Integer.parseInt(tbl.getProperty(PhoenixStorageHandlerConstants.ZOOKEEPER_PORT,
- String.valueOf(PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_PORT))));
- }
-
- String zNodeParent = config.get(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT);
- if (zNodeParent == null) {
- config.set(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT,
- tbl.getProperty(PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT,
- PhoenixStorageHandlerConstants.DEFAULT_ZOOKEEPER_PARENT));
- }
-
- try (Connection conn = PhoenixConnectionUtil.getInputConnection(config, tbl)) {
- List<ColumnInfo> columnMetadata = PhoenixUtil.getColumnInfoList(conn, tableName);
-
- columnCount = columnMetadata.size();
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Column-meta : " + columnMetadata);
- }
-
- pResultWritable = new PhoenixResultWritable(config, columnMetadata);
- } catch (SQLException | IOException e) {
- throw new SerDeException(e);
- }
- }
-
- public Writable serialize(Object values, ObjectInspector objInspector, DmlType dmlType) {
- pResultWritable.clear();
-
- final StructObjectInspector structInspector = (StructObjectInspector) objInspector;
- final List<? extends StructField> fieldList = structInspector.getAllStructFieldRefs();
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("FieldList : " + fieldList + " values(" + values.getClass() + ") : " +
- values);
- }
-
- int fieldCount = columnCount;
- if (dmlType == DmlType.UPDATE || dmlType == DmlType.DELETE) {
- fieldCount++;
- }
-
- for (int i = 0; i < fieldCount; i++) {
- if (fieldList.size() <= i) {
- break;
- }
-
- StructField structField = fieldList.get(i);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("structField[" + i + "] : " + structField);
- }
-
- if (structField != null) {
- Object fieldValue = structInspector.getStructFieldData(values, structField);
- ObjectInspector fieldOI = structField.getFieldObjectInspector();
-
- String fieldName = structField.getFieldName();
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Field " + fieldName + "[" + i + "] : " + fieldValue + ", " +
- fieldOI);
- }
-
- Object value = null;
- switch (fieldOI.getCategory()) {
- case PRIMITIVE:
- value = ((PrimitiveObjectInspector) fieldOI).getPrimitiveJavaObject
- (fieldValue);
-
- if (LOG.isTraceEnabled()) {
- LOG.trace("Field " + fieldName + "[" + i + "] : " + value + "(" + value
- .getClass() + ")");
- }
-
- if (value instanceof HiveDecimal) {
- value = ((HiveDecimal) value).bigDecimalValue();
- } else if (value instanceof HiveChar) {
- value = ((HiveChar) value).getValue().trim();
- } else if (value instanceof Date) {
- value = java.sql.Date.valueOf(value.toString());
- } else if (value instanceof Timestamp) {
- value = java.sql.Timestamp.valueOf(value.toString());
- }
-
- pResultWritable.add(value);
- break;
- case LIST:
- // Not support for arrays in insert statement yet
- break;
- case STRUCT:
- if (dmlType == DmlType.DELETE) {
- // When update/delete, First value is struct<transactionid:bigint,
- // bucketid:int,rowid:bigint,primaryKey:binary>>
- List<Object> fieldValueList = ((StandardStructObjectInspector)
- fieldOI).getStructFieldsDataAsList(fieldValue);
-
- // convert to map from binary of primary key.
- @SuppressWarnings("unchecked")
- Map<String, Object> primaryKeyMap = (Map<String, Object>)
- PhoenixStorageHandlerUtil.toMap(((BytesWritable)
- fieldValueList.get(3)).getBytes());
- for (Object pkValue : primaryKeyMap.values()) {
- pResultWritable.add(pkValue);
- }
- }
-
- break;
- default:
- new SerDeException("Phoenix Unsupported column type: " + fieldOI
- .getCategory());
- }
- }
- }
-
- return pResultWritable;
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PrimaryKeyData.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PrimaryKeyData.java
deleted file mode 100644
index 7773997..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/PrimaryKeyData.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InvalidClassException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.io.ObjectStreamClass;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- * Wrapper around the primary key data for Hive.
- */
-public class PrimaryKeyData implements Serializable{
- public static final PrimaryKeyData EMPTY = new PrimaryKeyData(Collections.<String,Object> emptyMap());
- private static final long serialVersionUID = 1L;
-
- // Based on https://www.ibm.com/developerworks/library/se-lookahead/. Prevents unexpected
- // deserialization of other objects of an unexpected class.
- private static class LookAheadObjectInputStream extends ObjectInputStream {
- public LookAheadObjectInputStream(InputStream in) throws IOException {
- super(in);
- }
-
- @Override
- protected Class<?> resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException {
- if (!desc.getName().equals(PrimaryKeyData.class.getName()) &&
- !desc.getName().startsWith("java.lang.") &&
- !desc.getName().startsWith("java.util.") &&
- !desc.getName().startsWith("java.sql.")) {
- throw new InvalidClassException(desc.getName(), "Expected an instance of PrimaryKeyData");
- }
- return super.resolveClass(desc);
- }
- }
-
- private final HashMap<String,Object> data;
-
- public PrimaryKeyData(Map<String,Object> data) {
- if (data instanceof HashMap) {
- this.data = (HashMap<String,Object>) data;
- } else {
- this.data = new HashMap<>(Objects.requireNonNull(data));
- }
- }
-
- public HashMap<String,Object> getData() {
- return data;
- }
-
- public void serialize(OutputStream output) throws IOException {
- try (ObjectOutputStream oos = new ObjectOutputStream(output)) {
- oos.writeObject(this);
- oos.flush();
- }
- }
-
- public static PrimaryKeyData deserialize(InputStream input) throws IOException, ClassNotFoundException {
- try (LookAheadObjectInputStream ois = new LookAheadObjectInputStream(input)) {
- Object obj = ois.readObject();
- if (obj instanceof PrimaryKeyData) {
- return (PrimaryKeyData) obj;
- }
- throw new InvalidClassException(obj == null ? "null" : obj.getClass().getName(), "Disallowed serialized class");
- }
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixOutputFormat.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixOutputFormat.java
deleted file mode 100644
index ed47176..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixOutputFormat.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.mapreduce;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
-import org.apache.hadoop.hive.ql.io.RecordUpdater;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputFormat;
-import org.apache.hadoop.mapred.RecordWriter;
-import org.apache.hadoop.mapreduce.lib.db.DBWritable;
-import org.apache.hadoop.util.Progressable;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.Properties;
-
-/**
- * Custom OutputFormat to feed into Hive. Describes the output-specification for a Map-Reduce job.
- */
-public class PhoenixOutputFormat<T extends DBWritable> implements OutputFormat<NullWritable, T>,
- AcidOutputFormat<NullWritable, T> {
-
- private static final Log LOG = LogFactory.getLog(PhoenixOutputFormat.class);
-
- public PhoenixOutputFormat() {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PhoenixOutputFormat created");
- }
- }
-
- @Override
- public RecordWriter<NullWritable, T> getRecordWriter(FileSystem ignored, JobConf jobConf,
- String name, Progressable progress)
- throws IOException {
- return createRecordWriter(jobConf, new Properties());
- }
-
- @Override
- public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException {
-
- }
-
- @Override
- public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter
- (JobConf jobConf, Path finalOutPath, Class<? extends Writable> valueClass, boolean
- isCompressed, Properties tableProperties, Progressable progress) throws
- IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Get RecordWriter for finalOutPath : " + finalOutPath + ", valueClass" +
- " : " +
- valueClass
- .getName() + ", isCompressed : " + isCompressed + ", tableProperties " +
- ": " + tableProperties + ", progress : " + progress);
- }
-
- return createRecordWriter(jobConf, new Properties());
- }
-
- @Override
- public RecordUpdater getRecordUpdater(Path path, org.apache.hadoop.hive.ql.io
- .AcidOutputFormat.Options options) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Get RecordWriter for path : " + path + ", options : " +
- PhoenixStorageHandlerUtil
- .getOptionsValue(options));
- }
- return new PhoenixRecordWriter<T>(path, options);
- }
-
- @Override
- public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getRawRecordWriter(Path path,
- org.apache.hadoop.hive.ql.io.AcidOutputFormat.Options options) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Get RawRecordWriter for path : " + path + ", options : " +
- PhoenixStorageHandlerUtil.getOptionsValue(options));
- }
-
- return new PhoenixRecordWriter<T>(path, options);
- }
-
- private PhoenixRecordWriter<T> createRecordWriter(Configuration config, Properties properties) {
- try {
- return new PhoenixRecordWriter<T>(config, properties);
- } catch (SQLException e) {
- LOG.error("Error during PhoenixRecordWriter instantiation :" + e.getMessage());
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
deleted file mode 100644
index f322767..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixResultWritable.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.mapreduce;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.lib.db.DBWritable;
-import org.apache.phoenix.hive.PhoenixRowKey;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.util.ColumnMappingUtils;
-import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
-import org.apache.phoenix.hive.util.PhoenixUtil;
-import org.apache.phoenix.util.ColumnInfo;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Serialized class for SerDe
- *
- */
-public class PhoenixResultWritable implements Writable, DBWritable, Configurable {
-
- private static final Log LOG = LogFactory.getLog(PhoenixResultWritable.class);
-
- private List<ColumnInfo> columnMetadataList;
- private List<Object> valueList; // for output
- private Map<String, Object> rowMap = new HashMap<>(); // for input
- private Map<String, String> columnMap;
-
- private int columnCount = -1;
-
- private Configuration config;
- private boolean isTransactional;
- private Map<String, Object> rowKeyMap = new LinkedHashMap();
- private List<String> primaryKeyColumnList;
-
- public PhoenixResultWritable() {
- }
-
- public PhoenixResultWritable(Configuration config) throws IOException {
- setConf(config);
- }
-
- public PhoenixResultWritable(Configuration config, List<ColumnInfo> columnMetadataList)
- throws IOException {
- this(config);
- this.columnMetadataList = columnMetadataList;
- valueList = new ArrayList<>(columnMetadataList.size());
- }
-
- @Override
- public void write(DataOutput out) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public void readFields(DataInput in) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- // for write
- public void clear() {
- valueList.clear();
- }
-
- // for write
- public void add(Object value) {
- valueList.add(value);
- }
-
- @Override
- public void write(PreparedStatement statement) throws SQLException {
- ColumnInfo columnInfo = null;
- Object value = null;
-
- try {
- for (int i = 0, limit = columnMetadataList.size(); i < limit; i++) {
- columnInfo = columnMetadataList.get(i);
-
- if (valueList.size() > i) {
- value = valueList.get(i);
- } else {
- value = null;
- }
-
- if (value == null) {
- statement.setNull(i + 1, columnInfo.getSqlType());
- } else {
- statement.setObject(i + 1, value, columnInfo.getSqlType());
- }
- }
- } catch (SQLException | RuntimeException e) {
- LOG.error("[column-info, value] : " + columnInfo + ", " + value);
- throw e;
- }
- }
-
- public void delete(PreparedStatement statement) throws SQLException {
- ColumnInfo columnInfo = null;
- Object value = null;
-
- try {
- for (int i = 0, limit = primaryKeyColumnList.size(); i < limit; i++) {
- columnInfo = columnMetadataList.get(i);
-
- if (valueList.size() > i) {
- value = valueList.get(i);
- } else {
- value = null;
- }
-
- if (value == null) {
- statement.setNull(i + 1, columnInfo.getSqlType());
- } else {
- statement.setObject(i + 1, value, columnInfo.getSqlType());
- }
- }
- } catch (SQLException | RuntimeException e) {
- LOG.error("[column-info, value] : " + columnInfo + ", " + value);
- throw e;
- }
- }
-
- @Override
- public void readFields(ResultSet resultSet) throws SQLException {
- ResultSetMetaData rsmd = resultSet.getMetaData();
- if (columnCount == -1) {
- this.columnCount = rsmd.getColumnCount();
- }
- rowMap.clear();
-
- for (int i = 0; i < columnCount; i++) {
- Object value = resultSet.getObject(i + 1);
- String columnName = rsmd.getColumnName(i + 1);
- String mapName = columnMap.get(columnName);
- if(mapName != null) {
- columnName = mapName;
- }
- rowMap.put(columnName, value);
- }
-
- // Adding row__id column.
- if (isTransactional) {
- rowKeyMap.clear();
-
- for (String pkColumn : primaryKeyColumnList) {
- rowKeyMap.put(pkColumn, rowMap.get(pkColumn));
- }
- }
- }
-
- public void readPrimaryKey(PhoenixRowKey rowKey) {
- rowKey.setRowKeyMap(rowKeyMap);
- }
-
- public List<ColumnInfo> getColumnMetadataList() {
- return columnMetadataList;
- }
-
- public void setColumnMetadataList(List<ColumnInfo> columnMetadataList) {
- this.columnMetadataList = columnMetadataList;
- }
-
- public Map<String, Object> getResultMap() {
- return rowMap;
- }
-
- public List<Object> getValueList() {
- return valueList;
- }
-
- @Override
- public void setConf(Configuration conf) {
- config = conf;
- this.columnMap = ColumnMappingUtils.getReverseColumnMapping(config.get(PhoenixStorageHandlerConstants.PHOENIX_COLUMN_MAPPING,""));
-
- isTransactional = PhoenixStorageHandlerUtil.isTransactionalTable(config);
-
- if (isTransactional) {
- primaryKeyColumnList = PhoenixUtil.getPrimaryKeyColumnList(config, config.get
- (PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME));
- }
- }
-
- @Override
- public Configuration getConf() {
- return config;
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
deleted file mode 100644
index 1de1cc7..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/AbstractPhoenixObjectInspector.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive
- .AbstractPrimitiveLazyObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.io.Writable;
-
-/**
- * AbstractPhoenixObjectInspector for a LazyPrimitive object
- */
-public abstract class AbstractPhoenixObjectInspector<T extends Writable>
- extends AbstractPrimitiveLazyObjectInspector<T> {
-
- private final Log log;
-
- public AbstractPhoenixObjectInspector() {
- super();
-
- log = LogFactory.getLog(getClass());
- }
-
- protected AbstractPhoenixObjectInspector(PrimitiveTypeInfo typeInfo) {
- super(typeInfo);
-
- log = LogFactory.getLog(getClass());
- }
-
- @Override
- public Object getPrimitiveJavaObject(Object o) {
- return o == null ? null : o;
- }
-
- public void logExceptionMessage(Object value, String dataType) {
- if (log.isDebugEnabled()) {
- log.debug("Data not in the " + dataType + " data type range so converted to null. " +
- "Given data is :"
- + value.toString(), new Exception("For debugging purposes"));
- }
- }
-}
\ No newline at end of file
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBinaryObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBinaryObjectInspector.java
deleted file mode 100644
index 2c642d2..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBinaryObjectInspector.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.BytesWritable;
-
-/**
- * ObjectInspector for Binary type
- */
-
-public class PhoenixBinaryObjectInspector extends AbstractPhoenixObjectInspector<BytesWritable>
- implements BinaryObjectInspector {
-
- public PhoenixBinaryObjectInspector() {
- super(TypeInfoFactory.binaryTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- byte[] clone = null;
-
- if (o != null) {
- byte[] source = (byte[]) o;
- clone = new byte[source.length];
- System.arraycopy(source, 0, clone, 0, source.length);
- }
-
- return clone;
- }
-
- @Override
- public byte[] getPrimitiveJavaObject(Object o) {
- return (byte[]) o;
- }
-
- @Override
- public BytesWritable getPrimitiveWritableObject(Object o) {
- return new BytesWritable((byte[]) o);
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBooleanObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBooleanObjectInspector.java
deleted file mode 100644
index a767ca0..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixBooleanObjectInspector.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.BooleanWritable;
-
-public class PhoenixBooleanObjectInspector extends AbstractPhoenixObjectInspector<BooleanWritable>
- implements BooleanObjectInspector {
-
- public PhoenixBooleanObjectInspector() {
- super(TypeInfoFactory.booleanTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new Boolean((Boolean) o);
- }
-
- @Override
- public BooleanWritable getPrimitiveWritableObject(Object o) {
- return new BooleanWritable(get(o));
- }
-
- @Override
- public boolean get(Object o) {
- Boolean value = null;
-
- if (o != null) {
- try {
- value = (Boolean) o;
- } catch (Exception e) {
- logExceptionMessage(o, "BOOLEAN");
- }
- }
-
- return value;
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixCharObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixCharObjectInspector.java
deleted file mode 100644
index 17222a2..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixCharObjectInspector.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-/**
- * ObjectInspector for char type
- */
-public class PhoenixCharObjectInspector extends AbstractPhoenixObjectInspector<HiveCharWritable>
- implements HiveCharObjectInspector {
-
- public PhoenixCharObjectInspector() {
- this(TypeInfoFactory.charTypeInfo);
- }
-
- public PhoenixCharObjectInspector(PrimitiveTypeInfo type) {
- super(type);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new String((String) o);
- }
-
- @Override
- public HiveCharWritable getPrimitiveWritableObject(Object o) {
- return new HiveCharWritable(getPrimitiveJavaObject(o));
- }
-
- @Override
- public HiveChar getPrimitiveJavaObject(Object o) {
- String value = (String) o;
- return new HiveChar(value, value.length());
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixFloatObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixFloatObjectInspector.java
deleted file mode 100644
index bf1badc..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixFloatObjectInspector.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.FloatWritable;
-
-/**
- * ObjectInspector for float type
- */
-
-public class PhoenixFloatObjectInspector extends AbstractPhoenixObjectInspector<FloatWritable>
- implements FloatObjectInspector {
-
- public PhoenixFloatObjectInspector() {
- super(TypeInfoFactory.floatTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new Float((Float) o);
- }
-
- @Override
- public FloatWritable getPrimitiveWritableObject(Object o) {
- return new FloatWritable(get(o));
- }
-
- @Override
- public float get(Object o) {
- Float value = null;
-
- if (o != null) {
- try {
- value = ((Float) o).floatValue();
- } catch (Exception e) {
- logExceptionMessage(o, "LONG");
- }
- }
-
- return value;
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
deleted file mode 100644
index 07cee37..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixListObjectInspector.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.phoenix.schema.types.PhoenixArray;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * ObjectInspector for list objects.
- */
-public class PhoenixListObjectInspector implements ListObjectInspector {
-
- private ObjectInspector listElementObjectInspector;
- private byte separator;
- private LazyObjectInspectorParameters lazyParams;
-
- public PhoenixListObjectInspector(ObjectInspector listElementObjectInspector,
- byte separator, LazyObjectInspectorParameters lazyParams) {
- this.listElementObjectInspector = listElementObjectInspector;
- this.separator = separator;
- this.lazyParams = lazyParams;
- }
-
- @Override
- public String getTypeName() {
- return org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "<" +
- listElementObjectInspector.getTypeName() + ">";
- }
-
- @Override
- public Category getCategory() {
- return Category.LIST;
- }
-
- @Override
- public ObjectInspector getListElementObjectInspector() {
- return listElementObjectInspector;
- }
-
- @Override
- public Object getListElement(Object data, int index) {
- if (data == null) {
- return null;
- }
-
- PhoenixArray array = (PhoenixArray) data;
-
- return array.getElement(index);
- }
-
- @Override
- public int getListLength(Object data) {
- if (data == null) {
- return -1;
- }
-
- PhoenixArray array = (PhoenixArray) data;
- return array.getDimensions();
- }
-
- @Override
- public List<?> getList(Object data) {
- if (data == null) {
- return null;
- }
-
- PhoenixArray array = (PhoenixArray) data;
- int valueLength = array.getDimensions();
- List<Object> valueList = new ArrayList<>(valueLength);
-
- for (int i = 0; i < valueLength; i++) {
- valueList.add(array.getElement(i));
- }
-
- return valueList;
- }
-
- public byte getSeparator() {
- return separator;
- }
-
- public LazyObjectInspectorParameters getLazyParams() {
- return lazyParams;
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixLongObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixLongObjectInspector.java
deleted file mode 100644
index 554f2a4..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixLongObjectInspector.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.LongWritable;
-
-public class PhoenixLongObjectInspector extends AbstractPhoenixObjectInspector<LongWritable>
- implements LongObjectInspector {
-
- public PhoenixLongObjectInspector() {
- super(TypeInfoFactory.longTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new Long((Long) o);
- }
-
- @Override
- public LongWritable getPrimitiveWritableObject(Object o) {
- return new LongWritable(get(o));
- }
-
- @Override
- public long get(Object o) {
- Long value = null;
-
- if (o != null) {
- try {
- value = ((Long) o).longValue();
- } catch (Exception e) {
- logExceptionMessage(o, "LONG");
- }
- }
-
- return value;
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixObjectInspectorFactory.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixObjectInspectorFactory.java
deleted file mode 100644
index 3a19ea7..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixObjectInspectorFactory.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
-import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Factory for object inspectors. Matches hive type to the corresponding Phoenix object inspector.
- */
-
-public class PhoenixObjectInspectorFactory {
-
- private static final Log LOG = LogFactory.getLog(PhoenixObjectInspectorFactory.class);
-
- private PhoenixObjectInspectorFactory() {
-
- }
-
- public static LazySimpleStructObjectInspector createStructObjectInspector(TypeInfo type,
- LazySerDeParameters
- serdeParams) {
- StructTypeInfo structTypeInfo = (StructTypeInfo) type;
- List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
- List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
- List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>
- (fieldTypeInfos.size());
-
- for (int i = 0; i < fieldTypeInfos.size(); i++) {
- fieldObjectInspectors.add(createObjectInspector(fieldTypeInfos.get(i), serdeParams));
- }
-
- return LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(
- fieldNames, fieldObjectInspectors, null,
- serdeParams.getSeparators()[1],
- serdeParams, ObjectInspectorOptions.JAVA);
- }
-
- public static ObjectInspector createObjectInspector(TypeInfo type, LazySerDeParameters
- serdeParams) {
- ObjectInspector oi = null;
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Type : " + type);
- }
-
- switch (type.getCategory()) {
- case PRIMITIVE:
- switch (((PrimitiveTypeInfo) type).getPrimitiveCategory()) {
- case BOOLEAN:
- oi = new PhoenixBooleanObjectInspector();
- break;
- case BYTE:
- oi = new PhoenixByteObjectInspector();
- break;
- case SHORT:
- oi = new PhoenixShortObjectInspector();
- break;
- case INT:
- oi = new PhoenixIntObjectInspector();
- break;
- case LONG:
- oi = new PhoenixLongObjectInspector();
- break;
- case FLOAT:
- oi = new PhoenixFloatObjectInspector();
- break;
- case DOUBLE:
- oi = new PhoenixDoubleObjectInspector();
- break;
- case VARCHAR:
- // same string
- case STRING:
- oi = new PhoenixStringObjectInspector(serdeParams.isEscaped(),
- serdeParams.getEscapeChar());
- break;
- case CHAR:
- oi = new PhoenixCharObjectInspector((PrimitiveTypeInfo)type);
- break;
- case DATE:
- oi = new PhoenixDateObjectInspector();
- break;
- case TIMESTAMP:
- oi = new PhoenixTimestampObjectInspector();
- break;
- case DECIMAL:
- oi = new PhoenixDecimalObjectInspector((PrimitiveTypeInfo) type);
- break;
- case BINARY:
- oi = new PhoenixBinaryObjectInspector();
- break;
- default:
- throw new RuntimeException("Hive internal error. not supported data type " +
- ": " + type);
- }
-
- break;
- case LIST:
- if (LOG.isDebugEnabled()) {
- LOG.debug("List type started");
- }
-
- ObjectInspector listElementObjectInspector = createObjectInspector((
- (ListTypeInfo) type).getListElementTypeInfo(), serdeParams);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("List type ended");
- }
-
- oi = new PhoenixListObjectInspector(listElementObjectInspector, serdeParams
- .getSeparators()[0], serdeParams);
-
- break;
- default:
- throw new RuntimeException("Hive internal error. not supported data type : " +
- type);
- }
-
- return oi;
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixShortObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixShortObjectInspector.java
deleted file mode 100644
index 84529b0..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixShortObjectInspector.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-public class PhoenixShortObjectInspector extends AbstractPhoenixObjectInspector<ShortWritable>
- implements ShortObjectInspector {
-
- public PhoenixShortObjectInspector() {
- super(TypeInfoFactory.shortTypeInfo);
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new Short((Short) o);
- }
-
- @Override
- public ShortWritable getPrimitiveWritableObject(Object o) {
- return new ShortWritable(get(o));
- }
-
- @Override
- public short get(Object o) {
- Short value = null;
-
- if (o != null) {
- try {
- value = ((Short) o).shortValue();
- } catch (Exception e) {
- logExceptionMessage(o, "SHORT");
- }
- }
-
- return value;
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixStringObjectInspector.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixStringObjectInspector.java
deleted file mode 100644
index e409e1d..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/objectinspector/PhoenixStringObjectInspector.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.objectinspector;
-
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.Text;
-
-/**
- * ObjectInspector for string type
- */
-public class PhoenixStringObjectInspector extends AbstractPhoenixObjectInspector<Text>
- implements StringObjectInspector {
-
- private boolean escaped;
- private byte escapeChar;
-
- public PhoenixStringObjectInspector(boolean escaped, byte escapeChar) {
- super(TypeInfoFactory.stringTypeInfo);
- this.escaped = escaped;
- this.escapeChar = escapeChar;
- }
-
- @Override
- public Object copyObject(Object o) {
- return o == null ? null : new String((String) o);
- }
-
- @Override
- public String getPrimitiveJavaObject(Object o) {
- return (String) o;
- }
-
- @Override
- public Text getPrimitiveWritableObject(Object o) {
- Text value = null;
-
- if (o != null) {
- try {
- value = new Text((String) o);
- } catch (Exception e) {
- logExceptionMessage(o, "STRING");
- }
- }
-
- return value;
- }
-
- public boolean isEscaped() {
- return escaped;
- }
-
- public byte getEscapeChar() {
- return escapeChar;
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ppd/PhoenixPredicateDecomposer.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ppd/PhoenixPredicateDecomposer.java
deleted file mode 100644
index 1e65819..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ppd/PhoenixPredicateDecomposer.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.ppd;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler.DecomposedPredicate;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.phoenix.hive.ql.index.IndexPredicateAnalyzer;
-import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
-import org.apache.phoenix.hive.ql.index.PredicateAnalyzerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Supporting class that generate DecomposedPredicate companion to PhoenixHiveStorageHandler
- * basing on search conditions.
- */
-public class PhoenixPredicateDecomposer {
-
- private static final Log LOG = LogFactory.getLog(PhoenixPredicateDecomposer.class);
-
- private List<String> columnNameList;
- private boolean calledPPD;
-
- private List<IndexSearchCondition> searchConditionList;
-
- public static PhoenixPredicateDecomposer create(List<String> columnNameList) {
- return new PhoenixPredicateDecomposer(columnNameList);
- }
-
- private PhoenixPredicateDecomposer(List<String> columnNameList) {
- this.columnNameList = columnNameList;
- }
-
- public DecomposedPredicate decomposePredicate(ExprNodeDesc predicate) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("predicate - " + predicate.toString());
- }
-
- IndexPredicateAnalyzer analyzer = PredicateAnalyzerFactory.createPredicateAnalyzer
- (columnNameList, getFieldValidator());
- DecomposedPredicate decomposed = new DecomposedPredicate();
-
- List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
- decomposed.residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate
- (predicate, conditions);
- if (!conditions.isEmpty()) {
- decomposed.pushedPredicate = analyzer.translateSearchConditions(conditions);
- try {
- searchConditionList = conditions;
- calledPPD = true;
- } catch (Exception e) {
- LOG.warn("Failed to decompose predicates", e);
- return null;
- }
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("decomposed predicate - residualPredicate: " + decomposed.residualPredicate +
- ", pushedPredicate: " + decomposed.pushedPredicate);
- }
-
- return decomposed;
- }
-
- public List<IndexSearchCondition> getSearchConditionList() {
- return searchConditionList;
- }
-
- public boolean isCalledPPD() {
- return calledPPD;
- }
-
- protected IndexPredicateAnalyzer.FieldValidator getFieldValidator() {
- return null;
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java
deleted file mode 100644
index 0b5355c..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/IndexSearchCondition.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.ql.index;
-
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-import java.util.Arrays;
-
-/**
- * IndexSearchCondition represents an individual search condition found by
- * {@link IndexPredicateAnalyzer}.
- *
- */
-public class IndexSearchCondition {
- private ExprNodeColumnDesc columnDesc;
- private String comparisonOp;
- private ExprNodeConstantDesc constantDesc;
- private ExprNodeGenericFuncDesc comparisonExpr;
-
- private String[] fields;
-
- // Support (Not) Between/(Not) In Operator
- private ExprNodeConstantDesc[] multiConstants;
- private boolean isNot;
-
- public IndexSearchCondition(ExprNodeColumnDesc columnDesc, String comparisonOp,
- ExprNodeConstantDesc[] multiConstants, ExprNodeGenericFuncDesc
- comparisonExpr, boolean isNot) {
- this(columnDesc, comparisonOp, multiConstants, comparisonExpr, null, isNot);
- }
-
- public IndexSearchCondition(ExprNodeColumnDesc columnDesc, String comparisonOp,
- ExprNodeConstantDesc[] multiConstants, ExprNodeGenericFuncDesc
- comparisonExpr, String[] fields, boolean isNot) {
- this.columnDesc = columnDesc;
- this.comparisonOp = comparisonOp;
- this.multiConstants = multiConstants;
- this.comparisonExpr = comparisonExpr;
- this.fields = fields;
- this.isNot = isNot;
- }
-
- public ExprNodeConstantDesc[] getConstantDescs() {
- return multiConstants;
- }
-
- public ExprNodeConstantDesc getConstantDesc(int index) {
- return multiConstants[index];
- }
-
- public boolean isNot() {
- return isNot;
- }
- //////////////////////////////////////////////////////////////////////////////
-
- public IndexSearchCondition(ExprNodeColumnDesc columnDesc, String comparisonOp,
- ExprNodeConstantDesc constantDesc, ExprNodeGenericFuncDesc
- comparisonExpr) {
- this(columnDesc, comparisonOp, constantDesc, comparisonExpr, null);
- }
-
- /**
- * Constructs a search condition, which takes the form
- * <p>
- * <pre>
- * column-ref comparison-op constant-value
- * </pre>
- * <p>
- * .
- *
- * @param columnDesc column being compared
- * @param comparisonOp comparison operator, e.g. "=" (taken from
- * GenericUDFBridge.getUdfName())
- * @param constantDesc constant value to search for
- * @param comparisonExpr the original comparison expression
- */
- public IndexSearchCondition(ExprNodeColumnDesc columnDesc, String comparisonOp,
- ExprNodeConstantDesc constantDesc, ExprNodeGenericFuncDesc
- comparisonExpr, String[] fields) {
-
- this.columnDesc = columnDesc;
- this.comparisonOp = comparisonOp;
- this.constantDesc = constantDesc;
- this.comparisonExpr = comparisonExpr;
- this.fields = fields;
- }
-
- public void setColumnDesc(ExprNodeColumnDesc columnDesc) {
- this.columnDesc = columnDesc;
- }
-
- public ExprNodeColumnDesc getColumnDesc() {
- return columnDesc;
- }
-
- public void setComparisonOp(String comparisonOp) {
- this.comparisonOp = comparisonOp;
- }
-
- public String getComparisonOp() {
- return comparisonOp;
- }
-
- public void setConstantDesc(ExprNodeConstantDesc constantDesc) {
- this.constantDesc = constantDesc;
- }
-
- public ExprNodeConstantDesc getConstantDesc() {
- return constantDesc;
- }
-
- public void setComparisonExpr(ExprNodeGenericFuncDesc comparisonExpr) {
- this.comparisonExpr = comparisonExpr;
- }
-
- public ExprNodeGenericFuncDesc getComparisonExpr() {
- ExprNodeGenericFuncDesc ret = comparisonExpr;
- try {
- if (GenericUDFIn.class == comparisonExpr.getGenericUDF().getClass() && isNot) {
- ret = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
- FunctionRegistry.getFunctionInfo("not").getGenericUDF(),
- Arrays.asList(comparisonExpr));
- }
- } catch (SemanticException e) {
- throw new RuntimeException("hive operator -- never be thrown", e);
- }
- return ret;
- }
-
- public String[] getFields() {
- return fields;
- }
-
- @Override
- public String toString() {
- return comparisonExpr.getExprString();
- }
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/PredicateAnalyzerFactory.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/PredicateAnalyzerFactory.java
deleted file mode 100644
index b6903b9..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/ql/index/PredicateAnalyzerFactory.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.ql.index;
-
-import org.apache.phoenix.hive.ql.index.IndexPredicateAnalyzer.FieldValidator;
-
-import java.util.List;
-
-public class PredicateAnalyzerFactory {
- public static IndexPredicateAnalyzer createPredicateAnalyzer(List<String> ppdColumnList,
- FieldValidator fieldValdator) {
- // Create analyzer for conditions =, <, <=, >, >=
- IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(false);
-
- for (String columnName : ppdColumnList) {
- analyzer.allowColumnName(columnName);
- }
-
- analyzer.setAcceptsFields(true);
- analyzer.setFieldValidator(fieldValdator);
-
- return analyzer;
- }
-
-}
diff --git a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java b/phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
deleted file mode 100644
index 99caee8..0000000
--- a/phoenix-hive3/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+++ /dev/null
@@ -1,319 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.lang.reflect.Array;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.math.BigDecimal;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.ConcurrentHashMap;
-import javax.naming.NamingException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.ql.io.AcidOutputFormat.Options;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.net.DNS;
-import org.apache.phoenix.hive.PrimaryKeyData;
-import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
-import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
-import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
-
-/**
- * Misc utils for PhoenixStorageHandler
- */
-
-public class PhoenixStorageHandlerUtil {
- private static final Log LOG = LogFactory.getLog(PhoenixStorageHandlerUtil.class);
- private static final AtomicReference<Method> GET_BUCKET_METHOD_REF = new AtomicReference<>();
- private static final AtomicReference<Method> GET_BUCKET_ID_METHOD_REF = new AtomicReference<>();
-
- public static String getTargetTableName(Table table) {
- Map<String, String> tableParameterMap = table.getParameters();
- String tableName = tableParameterMap.get(PhoenixStorageHandlerConstants
- .PHOENIX_TABLE_NAME);
- if (tableName == null) {
- tableName = table.getTableName();
- tableParameterMap.put(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME, tableName);
- }
-
- return tableName;
- }
-
-
- public static Object[] toTypedValues(JobConf jobConf, String typeName, String[] values) throws
- Exception {
- Object[] results = new Object[values.length];
- DateFormat df = null;
-
- for (int i = 0, limit = values.length; i < limit; i++) {
- if (serdeConstants.STRING_TYPE_NAME.equals(typeName) ||
- typeName.startsWith(serdeConstants.CHAR_TYPE_NAME) ||
- typeName.startsWith(serdeConstants.VARCHAR_TYPE_NAME)) {
- results[i] = values[i];
- } else if (serdeConstants.INT_TYPE_NAME.equals(typeName)) {
- results[i] = new Integer(values[i]);
- } else if (serdeConstants.BIGINT_TYPE_NAME.equals(typeName)) {
- results[i] = new Long(values[i]);
- } else if (serdeConstants.DOUBLE_TYPE_NAME.equals(typeName)) {
- results[i] = new Double(values[i]);
- } else if (serdeConstants.FLOAT_TYPE_NAME.equals(typeName)) {
- results[i] = new Float(values[i]);
- } else if (serdeConstants.SMALLINT_TYPE_NAME.equals(typeName)) {
- results[i] = new Short(values[i]);
- } else if (serdeConstants.TINYINT_TYPE_NAME.equals(typeName)) {
- results[i] = new Byte(values[i]);
- } else if (serdeConstants.DATE_TYPE_NAME.equals(typeName)) {
- String dateFormat = jobConf.get(PhoenixStorageHandlerConstants.HBASE_DATE_FORMAT,
- PhoenixStorageHandlerConstants.DEFAULT_DATE_FORMAT);
- df = new SimpleDateFormat(dateFormat);
- results[i] = new Long(df.parse(values[i]).getTime());
- } else if (serdeConstants.TIMESTAMP_TYPE_NAME.equals(typeName)) {
- String timestampFormat = jobConf.get(PhoenixStorageHandlerConstants
- .HBASE_TIMESTAMP_FORMAT, PhoenixStorageHandlerConstants
- .DEFAULT_TIMESTAMP_FORMAT);
- df = new SimpleDateFormat(timestampFormat);
- results[i] = new Long(df.parse(values[i]).getTime());
- } else if (typeName.contains(serdeConstants.DECIMAL_TYPE_NAME)) {
- results[i] = new BigDecimal(values[i]);
- }
- }
-
- return results;
- }
-
- public static String[] getConstantValues(IndexSearchCondition condition, String comparisonOp) {
- String[] constantValues = null;
-
- if (comparisonOp.endsWith("UDFOPEqual") || comparisonOp.endsWith("UDFOPNotEqual")) {
- constantValues = new String[]{String.valueOf(condition.getConstantDesc().getValue())};
- } else if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) { // key >= 1
- constantValues = new String[]{String.valueOf(condition.getConstantDesc().getValue())};
- } else if (comparisonOp.endsWith("UDFOPGreaterThan")) { // key > 1
- constantValues = new String[]{String.valueOf(condition.getConstantDesc().getValue())};
- } else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) { // key <= 1
- constantValues = new String[]{String.valueOf(condition.getConstantDesc().getValue())};
- } else if (comparisonOp.endsWith("UDFOPLessThan")) { // key < 1
- constantValues = new String[]{String.valueOf(condition.getConstantDesc().getValue())};
- } else if (comparisonOp.endsWith("GenericUDFBetween")) {
- constantValues = new String[]{String.valueOf(condition.getConstantDesc(0).getValue()),
- String.valueOf(condition.getConstantDesc(1).getValue())};
- } else if (comparisonOp.endsWith("GenericUDFIn")) {
- ExprNodeConstantDesc[] constantDescs = condition.getConstantDescs();
- constantValues = new String[constantDescs.length];
- for (int i = 0, limit = constantDescs.length; i < limit; i++) {
- constantValues[i] = String.valueOf(condition.getConstantDesc(i).getValue());
- }
- }
-
- return constantValues;
- }
-
- public static String getRegionLocation(HRegionLocation location, Log log) throws IOException {
- InetSocketAddress isa = new InetSocketAddress(location.getHostname(), location.getPort());
- if (isa.isUnresolved()) {
- log.warn("Failed resolve " + isa);
- }
- InetAddress regionAddress = isa.getAddress();
- String regionLocation = null;
- try {
- regionLocation = reverseDNS(regionAddress);
- } catch (NamingException e) {
- log.warn("Cannot resolve the host name for " + regionAddress + " because of " + e);
- regionLocation = location.getHostname();
- }
-
- return regionLocation;
- }
-
- // Copy from org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.reverseDNS
- private static final Map<InetAddress, String> reverseDNSCacheMap = new ConcurrentHashMap<>();
-
- private static String reverseDNS(InetAddress ipAddress) throws NamingException,
- UnknownHostException {
- String hostName = reverseDNSCacheMap.get(ipAddress);
-
- if (hostName == null) {
- String ipAddressString = null;
- try {
- ipAddressString = DNS.reverseDns(ipAddress, null);
- } catch (Exception e) {
- // We can use InetAddress in case the jndi failed to pull up the reverse DNS entry
- // from the name service. Also, in case of ipv6, we need to use the InetAddress
- // since resolving reverse DNS using jndi doesn't work well with ipv6 addresses.
- ipAddressString = InetAddress.getByName(ipAddress.getHostAddress()).getHostName();
- }
-
- if (ipAddressString == null) {
- throw new UnknownHostException("No host found for " + ipAddress);
- }
-
- hostName = Strings.domainNamePointerToHostName(ipAddressString);
- reverseDNSCacheMap.put(ipAddress, hostName);
- }
-
- return hostName;
- }
-
- public static String getTableKeyOfSession(JobConf jobConf, String tableName) {
-
- String sessionId = jobConf.get(PhoenixConfigurationUtil.SESSION_ID);
- return new StringBuilder("[").append(sessionId).append("]-").append(tableName).toString();
- }
-
- public static Map<String, TypeInfo> createColumnTypeMap(JobConf jobConf) {
- Map<String, TypeInfo> columnTypeMap = new HashMap();
-
- String[] columnNames = jobConf.get(serdeConstants.LIST_COLUMNS).split
- (PhoenixStorageHandlerConstants.COMMA);
- List<TypeInfo> typeInfos =
- TypeInfoUtils.getTypeInfosFromTypeString(jobConf.get(serdeConstants.LIST_COLUMN_TYPES));
-
- for (int i = 0, limit = columnNames.length; i < limit; i++) {
- columnTypeMap.put(columnNames[i], typeInfos.get(i));
- }
-
- return columnTypeMap;
- }
-
- public static List<String> getReadColumnNames(Configuration conf) {
- String colNames = conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR);
- if (colNames != null && !colNames.isEmpty()) {
- return Arrays.asList(colNames.split(PhoenixStorageHandlerConstants.COMMA));
- }
- return Collections.EMPTY_LIST;
- }
-
- public static boolean isTransactionalTable(Properties tableProperties) {
- String tableIsTransactional = tableProperties.getProperty(hive_metastoreConstants
- .TABLE_IS_TRANSACTIONAL);
-
- return tableIsTransactional != null && tableIsTransactional.equalsIgnoreCase("true");
- }
-
- public static boolean isTransactionalTable(Configuration config) {
- String tableIsTransactional = config.get(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL);
-
- return tableIsTransactional != null && tableIsTransactional.equalsIgnoreCase("true");
- }
-
- public static void printConfiguration(Configuration config) {
- if (Boolean.getBoolean("dev")) {
- for (Iterator<Entry<String, String>> iterator = config.iterator(); iterator.hasNext();
- ) {
- Entry<String, String> entry = iterator.next();
-
- System.out.println(entry.getKey() + "=" + entry.getValue());
- }
- }
- }
-
- public static String toString(Object obj) {
- String content = null;
-
- if (obj instanceof Array) {
- Object[] values = (Object[]) obj;
-
- content =
- String.join(PhoenixStorageHandlerConstants.COMMA, (String[]) values);
- } else {
- content = obj.toString();
- }
-
- return content;
- }
-
- public static Map<?, ?> toMap(byte[] serialized) {
- ByteArrayInputStream bais = new ByteArrayInputStream(serialized);
-
- try {
- return PrimaryKeyData.deserialize(bais).getData();
- } catch (ClassNotFoundException | IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public static String getOptionsValue(Options options) {
- StringBuilder content = new StringBuilder();
-
- int bucket = getBucket(options);
- String inspectorInfo = options.getInspector().getCategory() + ":" + options.getInspector()
- .getTypeName();
- long maxTxnId = options.getMaximumWriteId();
- long minTxnId = options.getMinimumWriteId();
- int recordIdColumn = options.getRecordIdColumn();
- boolean isCompresses = options.isCompressed();
- boolean isWritingBase = options.isWritingBase();
-
- content.append("bucket : ").append(bucket).append(", inspectorInfo : ").append
- (inspectorInfo).append(", minTxnId : ").append(minTxnId).append(", maxTxnId : ")
- .append(maxTxnId).append(", recordIdColumn : ").append(recordIdColumn);
- content.append(", isCompressed : ").append(isCompresses).append(", isWritingBase : ")
- .append(isWritingBase);
-
- return content.toString();
- }
-
- private static int getBucket(Options options) {
- Method getBucketMethod = GET_BUCKET_METHOD_REF.get();
- try {
- if (getBucketMethod == null) {
- getBucketMethod = Options.class.getMethod("getBucket");
- GET_BUCKET_METHOD_REF.set(getBucketMethod);
- }
- return (int) getBucketMethod.invoke(options);
- } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException e) {
- LOG.trace("Failed to invoke Options.getBucket()", e);
- }
- Method getBucketIdMethod = GET_BUCKET_ID_METHOD_REF.get();
- try {
- if (getBucketIdMethod == null) {
- getBucketIdMethod = Options.class.getMethod("getBucketId");
- GET_BUCKET_ID_METHOD_REF.set(getBucketMethod);
- }
- return (int) getBucketIdMethod.invoke(options);
- } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException e) {
- throw new RuntimeException("Failed to invoke Options.getBucketId()", e);
- }
- }
-}
diff --git a/phoenix-hive3/src/test/java/org/apache/phoenix/hive/PrimaryKeyDataTest.java b/phoenix-hive3/src/test/java/org/apache/phoenix/hive/PrimaryKeyDataTest.java
deleted file mode 100644
index 3b2634f..0000000
--- a/phoenix-hive3/src/test/java/org/apache/phoenix/hive/PrimaryKeyDataTest.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hive;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InvalidClassException;
-import java.io.ObjectOutputStream;
-import java.io.Serializable;
-import java.util.HashMap;
-
-import org.junit.Test;
-
-public class PrimaryKeyDataTest {
- private static class Disallowed implements Serializable {
- private static final long serialVersionUID = 1L;
- }
-
- private byte[] serialize(Object o) throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- try (ObjectOutputStream oos = new ObjectOutputStream(baos)) {
- oos.writeObject(o);
- }
- return baos.toByteArray();
- }
-
- @Test
- public void testSerde() throws Exception {
- HashMap<String,Object> data = new HashMap<>();
- data.put("one", 1);
- data.put("two", "two");
- data.put("three", 3);
-
- PrimaryKeyData pkData = new PrimaryKeyData(data);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- pkData.serialize(baos);
-
- PrimaryKeyData pkCopy = PrimaryKeyData.deserialize(new ByteArrayInputStream(baos.toByteArray()));
- assertEquals(data, pkCopy.getData());
- }
-
- @Test
- public void testDisallowedDeserialization() throws Exception {
- byte[] serializedMap = serialize(new HashMap<String,Object>());
- byte[] serializedClass = serialize(new Disallowed());
- byte[] serializedString = serialize("asdf");
-
- try {
- PrimaryKeyData.deserialize(new ByteArrayInputStream(serializedMap));
- fail("Expected an InvalidClassException");
- } catch (InvalidClassException e) {}
- try {
- PrimaryKeyData.deserialize(new ByteArrayInputStream(serializedClass));
- fail("Expected an InvalidClassException");
- } catch (InvalidClassException e) {}
- try {
- PrimaryKeyData.deserialize(new ByteArrayInputStream(serializedString));
- fail("Expected an InvalidClassException");
- } catch (InvalidClassException e) {}
- }
-}
diff --git a/phoenix-kafka-base/phoenix4-kafka/pom.xml b/phoenix-kafka-base/phoenix4-kafka/pom.xml
index f6a130c..a26f65f 100644
--- a/phoenix-kafka-base/phoenix4-kafka/pom.xml
+++ b/phoenix-kafka-base/phoenix4-kafka/pom.xml
@@ -32,4 +32,40 @@
<artifactId>phoenix4-kafka</artifactId>
<name>Phoenix Kafka Connector for Phoenix 4</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix4-flume</artifactId>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-site-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-eclipse-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-kafka-base/phoenix5-kafka/pom.xml b/phoenix-kafka-base/phoenix5-kafka/pom.xml
index a7afad5..8c53b87 100644
--- a/phoenix-kafka-base/phoenix5-kafka/pom.xml
+++ b/phoenix-kafka-base/phoenix5-kafka/pom.xml
@@ -75,6 +75,39 @@
<artifactId>jetty-webapp</artifactId>
<version>${jetty.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix5-flume</artifactId>
+ </dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-site-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-eclipse-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-kafka-base/pom.xml b/phoenix-kafka-base/pom.xml
index 5e5a13c..5abb25b 100644
--- a/phoenix-kafka-base/pom.xml
+++ b/phoenix-kafka-base/pom.xml
@@ -189,137 +189,142 @@
<artifactId>kafka-tools</artifactId>
<version>${kafka.version}</version>
</dependency>
- <dependency>
- <groupId>org.apache.phoenix</groupId>
- <artifactId>phoenix4-flume</artifactId>
- </dependency>
</dependencies>
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix4-flume</artifactId>
+ <version>6.0.0-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix5-flume</artifactId>
+ <version>6.0.0-SNAPSHOT</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+
<build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
- <version>3.0.0</version>
- <executions>
- <execution>
- <id>add-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/main/java</source>
- </sources>
- </configuration>
- </execution>
- <execution>
- <id>add-test-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-test-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/it/java</source>
- </sources>
- </configuration>
- </execution>
- </executions>
- </plugin>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/main/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>add-test-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/it/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
- <plugin>
- <artifactId>maven-resources-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-resources</id>
- <phase>generate-resources</phase>
- <goals>
- <goal>copy-resources</goal>
- </goals>
- <configuration>
- <outputDirectory>${project.build.directory}/test-classes
- </outputDirectory>
- <overwrite>true</overwrite>
- <resources>
- <resource>
- <directory>${project.parent.basedir}/src/it/resources</directory>
- </resource>
- </resources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <!-- Add the ant-generated sources to the source path -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-site-plugin</artifactId>
- <dependencies>
- <dependency>
- <groupId>org.apache.maven.doxia</groupId>
- <artifactId>doxia-module-markdown</artifactId>
- <version>1.3</version>
- </dependency>
- <dependency>
- <groupId>lt.velykis.maven.skins</groupId>
- <artifactId>reflow-velocity-tools</artifactId>
- <version>1.0.0</version>
- </dependency>
- <dependency>
- <groupId>org.apache.velocity</groupId>
- <artifactId>velocity</artifactId>
- <version>1.7</version>
- </dependency>
- </dependencies>
- </plugin>
+ <plugin>
+ <artifactId>maven-resources-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-resources</id>
+ <phase>generate-resources</phase>
+ <goals>
+ <goal>copy-resources</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/test-classes
+ </outputDirectory>
+ <overwrite>true</overwrite>
+ <resources>
+ <resource>
+ <directory>${project.parent.basedir}/src/it/resources</directory>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <!-- Add the ant-generated sources to the source path -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-site-plugin</artifactId>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven.doxia</groupId>
+ <artifactId>doxia-module-markdown</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>lt.velykis.maven.skins</groupId>
+ <artifactId>reflow-velocity-tools</artifactId>
+ <version>1.0.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <version>1.7</version>
+ </dependency>
+ </dependencies>
+ </plugin>
- <!-- Setup eclipse -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-eclipse-plugin</artifactId>
- <configuration>
- <buildcommands>
- <buildcommand>org.jamon.project.templateBuilder</buildcommand>
- <buildcommand>org.eclipse.jdt.core.javabuilder</buildcommand>
- </buildcommands>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-failsafe-plugin</artifactId>
- </plugin>
- <plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>${maven-dependency-plugin.version}</version>
- </plugin>
+ <!-- Setup eclipse -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-eclipse-plugin</artifactId>
+ <configuration>
+ <buildcommands>
+ <buildcommand>org.jamon.project.templateBuilder</buildcommand>
+ <buildcommand>org.eclipse.jdt.core.javabuilder</buildcommand>
+ </buildcommands>
+ </configuration>
+ </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-shade-plugin</artifactId>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>shade</goal>
- </goals>
- <configuration>
- <finalName>phoenix-kafka-${project.version}-minimal</finalName>
- <shadedArtifactAttached>false</shadedArtifactAttached>
- <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
- <shadeTestJar>false</shadeTestJar>
- <artifactSet>
- <includes>
- <include>org.apache.phoenix:phoenix4-kafka</include>
- <include>org.apache.kafka:kafka-clients</include>
- <include>org.apache.phoenix:phoenix4-flume</include>
- </includes>
- </artifactSet>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <finalName>phoenix-kafka-${project.version}-minimal</finalName>
+ <shadedArtifactAttached>false</shadedArtifactAttached>
+ <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
+ <shadeTestJar>false</shadeTestJar>
+ <artifactSet>
+ <includes>
+ <include>org.apache.phoenix:phoenix4-kafka</include>
+ <include>org.apache.kafka:kafka-clients</include>
+ <include>org.apache.phoenix:phoenix4-flume</include>
+ </includes>
+ </artifactSet>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </pluginManagement>
</build>
<reporting>
diff --git a/phoenix-pig-base/phoenix4-pig/pom.xml b/phoenix-pig-base/phoenix4-pig/pom.xml
index 98bd8ca..7176709 100644
--- a/phoenix-pig-base/phoenix4-pig/pom.xml
+++ b/phoenix-pig-base/phoenix4-pig/pom.xml
@@ -32,4 +32,21 @@
<artifactId>phoenix4-pig</artifactId>
<name>Phoenix Pig Connector for Phoenix 4</name>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-pig-base/phoenix5-pig/pom.xml b/phoenix-pig-base/phoenix5-pig/pom.xml
index 44fe79a..747503a 100644
--- a/phoenix-pig-base/phoenix5-pig/pom.xml
+++ b/phoenix-pig-base/phoenix5-pig/pom.xml
@@ -77,5 +77,22 @@
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-pig-base/pom.xml b/phoenix-pig-base/pom.xml
index ae518f0..c7a9941 100644
--- a/phoenix-pig-base/pom.xml
+++ b/phoenix-pig-base/pom.xml
@@ -157,309 +157,303 @@
</dependencies>
<build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
- <version>3.0.0</version>
- <executions>
- <execution>
- <id>add-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/main/java</source>
- </sources>
- </configuration>
- </execution>
- <execution>
- <id>add-test-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-test-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/test/java</source>
- <source>${project.parent.basedir}/src/it/java</source>
- </sources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-failsafe-plugin</artifactId>
- </plugin>
- <plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>${maven-dependency-plugin.version}</version>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-shade-plugin</artifactId>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>shade</goal>
- </goals>
- <configuration>
- <finalName>phoenix-${project.version}-pig</finalName>
- <shadedArtifactAttached>false</shadedArtifactAttached>
- <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
- <shadeTestJar>false</shadeTestJar>
- <transformers>
- <transformer
- implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
- <resource>README.md</resource>
- <file>${project.basedir}/../README.md</file>
- </transformer>
- <transformer
- implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
- <resource>LICENSE.txt</resource>
- <file>${project.basedir}/../LICENSE</file>
- </transformer>
- <transformer
- implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
- <resource>NOTICE</resource>
- <file>${project.basedir}/../NOTICE</file>
- </transformer>
- </transformers>
- <artifactSet>
- <includes>
- <include>*:*</include>
- </includes>
- <excludes>
- <exclude>org.apache.phoenix:phoenix-client</exclude>
- <exclude>org.apache.pig:pig</exclude>
- <exclude>joda-time:joda-time</exclude>
- <exclude>xom:xom</exclude>
- </excludes>
- </artifactSet>
- <filters>
- <filter>
- <artifact>*:*</artifact>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/main/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>add-test-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/test/java</source>
+ <source>${project.parent.basedir}/src/it/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <finalName>phoenix-${project.version}-pig</finalName>
+ <shadedArtifactAttached>false</shadedArtifactAttached>
+ <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
+ <shadeTestJar>false</shadeTestJar>
+ <transformers>
+ <transformer
+ implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+ <resource>README.md</resource>
+ <file>${project.basedir}/../README.md</file>
+ </transformer>
+ <transformer
+ implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+ <resource>LICENSE.txt</resource>
+ <file>${project.basedir}/../LICENSE</file>
+ </transformer>
+ <transformer
+ implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+ <resource>NOTICE</resource>
+ <file>${project.basedir}/../NOTICE</file>
+ </transformer>
+ </transformers>
+ <artifactSet>
+ <includes>
+ <include>*:*</include>
+ </includes>
<excludes>
- <exclude>META-INF/*.SF</exclude>
- <exclude>META-INF/*.DSA</exclude>
- <exclude>META-INF/*.RSA</exclude>
- <exclude>META-INF/license/*</exclude>
- <exclude>LICENSE.*</exclude>
- <exclude>NOTICE.*</exclude>
+ <exclude>org.apache.phoenix:phoenix-client</exclude>
+ <exclude>org.apache.pig:pig</exclude>
+ <exclude>joda-time:joda-time</exclude>
+ <exclude>xom:xom</exclude>
</excludes>
- </filter>
- </filters>
- <relocations>
- <!-- COM relocation -->
- <relocation>
- <pattern>com.codahale</pattern>
- <shadedPattern>${shaded.package}.com.codahale</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.fasterxml</pattern>
- <shadedPattern>${shaded.package}.com.fasterxml</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.google.common</pattern>
- <shadedPattern>${shaded.package}.com.google.common</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.jamesmurty</pattern>
- <shadedPattern>${shaded.package}.com.jamesmurty</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.jcraft</pattern>
- <shadedPattern>${shaded.package}.com.jcraft</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.lmax</pattern>
- <shadedPattern>${shaded.package}.com.lmax</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.sun.jersey</pattern>
- <shadedPattern>${shaded.package}.com.sun.jersey</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.thoughtworks</pattern>
- <shadedPattern>${shaded.package}.com.thoughtworks</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.yammer</pattern>
- <shadedPattern>${shaded.package}.com.yammer</shadedPattern>
- </relocation>
- <!-- IO relocations -->
- <relocation>
- <pattern>io.netty</pattern>
- <shadedPattern>${shaded.package}.io.netty</shadedPattern>
- </relocation>
- <!-- ORG relocations -->
- <relocation>
- <pattern>org.antlr</pattern>
- <shadedPattern>${shaded.package}.org.antlr</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.aopalliance</pattern>
- <shadedPattern>${shaded.package}.org.aopalliance</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.codehaus</pattern>
- <shadedPattern>${shaded.package}.org.codehaus</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.fusesource</pattern>
- <shadedPattern>${shaded.package}.org.fusesource</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.hamcrest</pattern>
- <shadedPattern>${shaded.package}.org.hamcrest</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.hsqldb</pattern>
- <shadedPattern>${shaded.package}.org.hsqldb</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.iq80</pattern>
- <shadedPattern>${shaded.package}.org.iq80</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.jamon</pattern>
- <shadedPattern>${shaded.package}.org.jamon</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.jboss</pattern>
- <shadedPattern>${shaded.package}.org.jboss</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.jcodings</pattern>
- <shadedPattern>${shaded.package}.org.jcodings</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.jets3t</pattern>
- <shadedPattern>${shaded.package}.org.jets3t</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.joni</pattern>
- <shadedPattern>${shaded.package}.org.joni</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.junit</pattern>
- <shadedPattern>${shaded.package}.org.junit</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.kosmix</pattern>
- <shadedPattern>${shaded.package}.org.kosmix</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.mortbay</pattern>
- <shadedPattern>${shaded.package}.org.mortbay</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.objectweb</pattern>
- <shadedPattern>${shaded.package}.org.objectweb</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.stringtemplate</pattern>
- <shadedPattern>${shaded.package}.org.stringtemplate</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.tukaani</pattern>
- <shadedPattern>${shaded.package}.org.tukaani</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.znerd</pattern>
- <shadedPattern>${shaded.package}.org.znerd</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.avro</pattern>
- <shadedPattern>${shaded.package}.org.apache.avro</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.commons</pattern>
- <shadedPattern>${shaded.package}.org.apache.commons</shadedPattern>
- <excludes>
- <exclude>org.apache.commons.csv.**</exclude>
- <exclude>org.apache.commons.logging.**</exclude>
- </excludes>
- </relocation>
- <relocation>
- <pattern>org.apache.directory</pattern>
- <shadedPattern>${shaded.package}.org.apache.directory</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.http</pattern>
- <shadedPattern>${shaded.package}.org.apache.http</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.jasper</pattern>
- <shadedPattern>${shaded.package}.org.apache.jasper</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.jute</pattern>
- <shadedPattern>${shaded.package}.org.apache.jute</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.mina</pattern>
- <shadedPattern>${shaded.package}.org.apache.mina</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.oro</pattern>
- <shadedPattern>${shaded.package}.org.apache.oro</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.taglibs</pattern>
- <shadedPattern>${shaded.package}.org.apache.taglibs</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.thrift</pattern>
- <shadedPattern>${shaded.package}.org.apache.thrift</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.tools</pattern>
- <shadedPattern>${shaded.package}.org.apache.tools</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.twill</pattern>
- <shadedPattern>${shaded.package}.org.apache.twill</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.velocity</pattern>
- <shadedPattern>${shaded.package}.org.apache.velocity</shadedPattern>
- </relocation>
- <relocation>
- <pattern>org.apache.zookeeper</pattern>
- <shadedPattern>${shaded.package}.org.apache.zookeeper</shadedPattern>
- </relocation>
- <!-- NET relocations -->
- <relocation>
- <pattern>net</pattern>
- <shadedPattern>${shaded.package}.net</shadedPattern>
- </relocation>
- <!-- Misc relocations -->
- <relocation>
- <pattern>antlr</pattern>
- <shadedPattern>${shaded.package}.antlr</shadedPattern>
- </relocation>
- <relocation>
- <pattern>it.unimi</pattern>
- <shadedPattern>${shaded.package}.it.unimi</shadedPattern>
- </relocation>
- <relocation>
- <pattern>jline</pattern>
- <shadedPattern>${shaded.package}.jline</shadedPattern>
- </relocation>
- <relocation>
- <pattern>junit</pattern>
- <shadedPattern>${shaded.package}.junit</shadedPattern>
- </relocation>
- </relocations>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
+ </artifactSet>
+ <filters>
+ <filter>
+ <artifact>*:*</artifact>
+ <excludes>
+ <exclude>META-INF/*.SF</exclude>
+ <exclude>META-INF/*.DSA</exclude>
+ <exclude>META-INF/*.RSA</exclude>
+ <exclude>META-INF/license/*</exclude>
+ <exclude>LICENSE.*</exclude>
+ <exclude>NOTICE.*</exclude>
+ </excludes>
+ </filter>
+ </filters>
+ <relocations>
+ <!-- COM relocation -->
+ <relocation>
+ <pattern>com.codahale</pattern>
+ <shadedPattern>${shaded.package}.com.codahale</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.fasterxml</pattern>
+ <shadedPattern>${shaded.package}.com.fasterxml</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.google.common</pattern>
+ <shadedPattern>${shaded.package}.com.google.common</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.jamesmurty</pattern>
+ <shadedPattern>${shaded.package}.com.jamesmurty</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.jcraft</pattern>
+ <shadedPattern>${shaded.package}.com.jcraft</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.lmax</pattern>
+ <shadedPattern>${shaded.package}.com.lmax</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.sun.jersey</pattern>
+ <shadedPattern>${shaded.package}.com.sun.jersey</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.thoughtworks</pattern>
+ <shadedPattern>${shaded.package}.com.thoughtworks</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.yammer</pattern>
+ <shadedPattern>${shaded.package}.com.yammer</shadedPattern>
+ </relocation>
+ <!-- IO relocations -->
+ <relocation>
+ <pattern>io.netty</pattern>
+ <shadedPattern>${shaded.package}.io.netty</shadedPattern>
+ </relocation>
+ <!-- ORG relocations -->
+ <relocation>
+ <pattern>org.antlr</pattern>
+ <shadedPattern>${shaded.package}.org.antlr</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.aopalliance</pattern>
+ <shadedPattern>${shaded.package}.org.aopalliance</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.codehaus</pattern>
+ <shadedPattern>${shaded.package}.org.codehaus</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.fusesource</pattern>
+ <shadedPattern>${shaded.package}.org.fusesource</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.hamcrest</pattern>
+ <shadedPattern>${shaded.package}.org.hamcrest</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.hsqldb</pattern>
+ <shadedPattern>${shaded.package}.org.hsqldb</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.iq80</pattern>
+ <shadedPattern>${shaded.package}.org.iq80</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.jamon</pattern>
+ <shadedPattern>${shaded.package}.org.jamon</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.jboss</pattern>
+ <shadedPattern>${shaded.package}.org.jboss</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.jcodings</pattern>
+ <shadedPattern>${shaded.package}.org.jcodings</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.jets3t</pattern>
+ <shadedPattern>${shaded.package}.org.jets3t</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.joni</pattern>
+ <shadedPattern>${shaded.package}.org.joni</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.junit</pattern>
+ <shadedPattern>${shaded.package}.org.junit</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.kosmix</pattern>
+ <shadedPattern>${shaded.package}.org.kosmix</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.mortbay</pattern>
+ <shadedPattern>${shaded.package}.org.mortbay</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.objectweb</pattern>
+ <shadedPattern>${shaded.package}.org.objectweb</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.stringtemplate</pattern>
+ <shadedPattern>${shaded.package}.org.stringtemplate</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.tukaani</pattern>
+ <shadedPattern>${shaded.package}.org.tukaani</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.znerd</pattern>
+ <shadedPattern>${shaded.package}.org.znerd</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.avro</pattern>
+ <shadedPattern>${shaded.package}.org.apache.avro</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.commons</pattern>
+ <shadedPattern>${shaded.package}.org.apache.commons</shadedPattern>
+ <excludes>
+ <exclude>org.apache.commons.csv.**</exclude>
+ <exclude>org.apache.commons.logging.**</exclude>
+ </excludes>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.directory</pattern>
+ <shadedPattern>${shaded.package}.org.apache.directory</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.http</pattern>
+ <shadedPattern>${shaded.package}.org.apache.http</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.jasper</pattern>
+ <shadedPattern>${shaded.package}.org.apache.jasper</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.jute</pattern>
+ <shadedPattern>${shaded.package}.org.apache.jute</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.mina</pattern>
+ <shadedPattern>${shaded.package}.org.apache.mina</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.oro</pattern>
+ <shadedPattern>${shaded.package}.org.apache.oro</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.taglibs</pattern>
+ <shadedPattern>${shaded.package}.org.apache.taglibs</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.thrift</pattern>
+ <shadedPattern>${shaded.package}.org.apache.thrift</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.tools</pattern>
+ <shadedPattern>${shaded.package}.org.apache.tools</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.twill</pattern>
+ <shadedPattern>${shaded.package}.org.apache.twill</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.velocity</pattern>
+ <shadedPattern>${shaded.package}.org.apache.velocity</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.apache.zookeeper</pattern>
+ <shadedPattern>${shaded.package}.org.apache.zookeeper</shadedPattern>
+ </relocation>
+ <!-- NET relocations -->
+ <relocation>
+ <pattern>net</pattern>
+ <shadedPattern>${shaded.package}.net</shadedPattern>
+ </relocation>
+ <!-- Misc relocations -->
+ <relocation>
+ <pattern>antlr</pattern>
+ <shadedPattern>${shaded.package}.antlr</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>it.unimi</pattern>
+ <shadedPattern>${shaded.package}.it.unimi</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>jline</pattern>
+ <shadedPattern>${shaded.package}.jline</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>junit</pattern>
+ <shadedPattern>${shaded.package}.junit</shadedPattern>
+ </relocation>
+ </relocations>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </pluginManagement>
</build>
</project>
diff --git a/phoenix-spark-base/phoenix4-spark/pom.xml b/phoenix-spark-base/phoenix4-spark/pom.xml
index b1bd274..f5d27ae 100644
--- a/phoenix-spark-base/phoenix4-spark/pom.xml
+++ b/phoenix-spark-base/phoenix4-spark/pom.xml
@@ -39,4 +39,29 @@
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>net.alchim31.maven</groupId>
+ <artifactId>scala-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.scalatest</groupId>
+ <artifactId>scalatest-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-spark-base/phoenix5-spark/pom.xml b/phoenix-spark-base/phoenix5-spark/pom.xml
index 3587c1c..308eb54 100644
--- a/phoenix-spark-base/phoenix5-spark/pom.xml
+++ b/phoenix-spark-base/phoenix5-spark/pom.xml
@@ -71,4 +71,29 @@
</dependencies>
</dependencyManagement>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>net.alchim31.maven</groupId>
+ <artifactId>scala-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.scalatest</groupId>
+ <artifactId>scalatest-maven-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
</project>
\ No newline at end of file
diff --git a/phoenix-spark-base/pom.xml b/phoenix-spark-base/pom.xml
index a92b445..9809a5d 100644
--- a/phoenix-spark-base/pom.xml
+++ b/phoenix-spark-base/pom.xml
@@ -485,176 +485,175 @@
</dependencies>
<build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
- <version>3.0.0</version>
- <executions>
- <execution>
- <id>add-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/main/java</source>
- <source>${project.parent.basedir}/src/main/scala</source>
- </sources>
- </configuration>
- </execution>
- <execution>
- <id>add-test-source</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>add-test-source</goal>
- </goals>
- <configuration>
- <sources>
- <source>${project.parent.basedir}/src/test/java</source>
- <source>${project.parent.basedir}/src/it/java</source>
- <source>${project.parent.basedir}/src/test/scala</source>
- <source>${project.parent.basedir}/src/it/scala</source>
- </sources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-resources-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-resources</id>
- <phase>generate-resources</phase>
- <goals>
- <goal>copy-resources</goal>
- </goals>
- <configuration>
- <outputDirectory>${project.build.directory}/test-classes
- </outputDirectory>
- <overwrite>true</overwrite>
- <resources>
- <resource>
- <directory>${project.parent.basedir}/src/it/resources</directory>
- </resource>
- </resources>
- </configuration>
- </execution>
- <execution>
- <id>copy-resources2</id>
- <phase>generate-resources</phase>
- <goals>
- <goal>copy-resources</goal>
- </goals>
- <configuration>
- <outputDirectory>${project.build.directory}/classes
- </outputDirectory>
- <overwrite>true</overwrite>
- <resources>
- <resource>
- <directory>${project.parent.basedir}/src/main/resources</directory>
- </resource>
- </resources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>1.8</source>
- <target>1.8</target>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-failsafe-plugin</artifactId>
- </plugin>
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.4.4</version>
- <configuration>
- <charset>${project.build.sourceEncoding}</charset>
- <jvmArgs>
- <jvmArg>-Xmx1024m</jvmArg>
- </jvmArgs>
- <scalaVersion>${scala.version}</scalaVersion>
- <scalaCompatVersion>${scala.binary.version}</scalaCompatVersion>
- </configuration>
- <executions>
- <execution>
- <id>scala-compile-first</id>
- <phase>process-resources</phase>
- <goals>
- <goal>add-source</goal>
- <goal>compile</goal>
- </goals>
- </execution>
- <execution>
- <id>scala-test-compile</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/main/java</source>
+ <source>${project.parent.basedir}/src/main/scala</source>
+ </sources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>add-test-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.parent.basedir}/src/test/java</source>
+ <source>${project.parent.basedir}/src/it/java</source>
+ <source>${project.parent.basedir}/src/test/scala</source>
+ <source>${project.parent.basedir}/src/it/scala</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-resources-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-resources</id>
+ <phase>generate-resources</phase>
+ <goals>
+ <goal>copy-resources</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/test-classes
+ </outputDirectory>
+ <overwrite>true</overwrite>
+ <resources>
+ <resource>
+ <directory>${project.parent.basedir}/src/it/resources</directory>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>copy-resources2</id>
+ <phase>generate-resources</phase>
+ <goals>
+ <goal>copy-resources</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/classes
+ </outputDirectory>
+ <overwrite>true</overwrite>
+ <resources>
+ <resource>
+ <directory>${project.parent.basedir}/src/main/resources</directory>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>net.alchim31.maven</groupId>
+ <artifactId>scala-maven-plugin</artifactId>
+ <version>3.4.4</version>
+ <configuration>
+ <charset>${project.build.sourceEncoding}</charset>
+ <jvmArgs>
+ <jvmArg>-Xmx1024m</jvmArg>
+ </jvmArgs>
+ <scalaVersion>${scala.version}</scalaVersion>
+ <scalaCompatVersion>${scala.binary.version}</scalaCompatVersion>
+ </configuration>
+ <executions>
+ <execution>
+ <id>scala-compile-first</id>
+ <phase>process-resources</phase>
+ <goals>
+ <goal>add-source</goal>
+ <goal>compile</goal>
+ </goals>
+ </execution>
+ <execution>
+ <id>scala-test-compile</id>
+ <phase>process-test-resources</phase>
+ <goals>
+ <goal>testCompile</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
- <plugin>
- <groupId>org.scalatest</groupId>
- <artifactId>scalatest-maven-plugin</artifactId>
- <version>1.0</version>
- <configuration>
- <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
- <junitxml>.</junitxml>
- <filereports>WDF TestSuite.txt</filereports>
- <skipTests>true</skipTests>
- </configuration>
- <executions>
- <execution>
- <id>test</id>
- <phase>test</phase>
- <goals>
- <goal>test</goal>
- </goals>
- </execution>
- <execution>
- <id>integration-test</id>
- <phase>integration-test</phase>
- <goals>
- <goal>test</goal>
- </goals>
- <configuration>
- <!-- Need this false until we can switch to JUnit 4.13 due to
- https://github.com/junit-team/junit4/issues/1223
- -->
- <parallel>false</parallel>
- <tagsToExclude>Integration-Test</tagsToExclude>
- <argLine>-Xmx1536m -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m</argLine>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <executions>
- <execution>
- <id>empty-javadoc-jar</id>
- <phase>package</phase>
- <goals>
- <goal>jar</goal>
- </goals>
- <configuration>
- <classifier>javadoc</classifier>
- <classesDirectory>${basedir}/javadoc</classesDirectory>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
+ <plugin>
+ <groupId>org.scalatest</groupId>
+ <artifactId>scalatest-maven-plugin</artifactId>
+ <version>1.0</version>
+ <configuration>
+ <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
+ <junitxml>.</junitxml>
+ <filereports>WDF TestSuite.txt</filereports>
+ <skipTests>true</skipTests>
+ </configuration>
+ <executions>
+ <execution>
+ <id>test</id>
+ <phase>test</phase>
+ <goals>
+ <goal>test</goal>
+ </goals>
+ </execution>
+ <execution>
+ <id>integration-test</id>
+ <phase>integration-test</phase>
+ <goals>
+ <goal>test</goal>
+ </goals>
+ <configuration>
+ <!-- Need this false until we can switch to JUnit 4.13 due to
+ https://github.com/junit-team/junit4/issues/1223
+ -->
+ <parallel>false</parallel>
+ <tagsToExclude>Integration-Test</tagsToExclude>
+ <argLine>-Xmx1536m -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m</argLine>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>empty-javadoc-jar</id>
+ <phase>package</phase>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ <configuration>
+ <classifier>javadoc</classifier>
+ <classesDirectory>${basedir}/javadoc</classesDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </pluginManagement>
</build>
</project>
diff --git a/phoenix4-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java b/phoenix4-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java
index 4d35543..ad841f6 100644
--- a/phoenix4-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java
+++ b/phoenix4-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java
@@ -22,7 +22,6 @@
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.util.RegionSizeCalculator;
-import org.junit.Assert;
import java.io.IOException;
@@ -50,4 +49,12 @@
public static byte[] getTableName(byte[] tableNameBytes) {
return tableNameBytes;
}
+
+ public static boolean isPhoenix4() {
+ return true;
+ }
+
+ public static boolean isPhoenix5() {
+ return false;
+ }
}
diff --git a/phoenix5-compat/pom.xml b/phoenix5-compat/pom.xml
index 729e923..4e8737e 100644
--- a/phoenix5-compat/pom.xml
+++ b/phoenix5-compat/pom.xml
@@ -35,7 +35,7 @@
<properties>
<phoenix.version>5.1.0-SNAPSHOT</phoenix.version>
- <hbase.version>2.2.4</hbase.version>
+ <hbase.version>2.1.9</hbase.version>
</properties>
<build>
diff --git a/phoenix5-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java b/phoenix5-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java
index 0fc7988..1a3fb51 100644
--- a/phoenix5-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java
+++ b/phoenix5-compat/src/main/java/org/apache/phoenix/compat/CompatUtil.java
@@ -49,4 +49,12 @@
public static TableName getTableName(byte[] tableNameBytes) {
return TableName.valueOf(tableNameBytes);
}
+
+ public static boolean isPhoenix4() {
+ return false;
+ }
+
+ public static boolean isPhoenix5() {
+ return true;
+ }
}
diff --git a/pom.xml b/pom.xml
index 4de3c0c..528fff4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,8 +29,7 @@
<module>phoenix-pig-base</module>
<module>phoenix-kafka-base</module>
<module>phoenix-spark-base</module>
- <module>phoenix-hive</module>
- <module>phoenix-hive3</module>
+ <module>phoenix-hive-base</module>
</modules>
<repositories>
@@ -72,7 +71,9 @@
<jdk.version>1.7</jdk.version>
<!-- Dependency versions -->
- <hive.version>1.2.1</hive.version>
+ <hive3.version>3.1.2</hive3.version>
+ <hive2.version>2.3.7</hive2.version>
+ <hive.version>${hive3.version}</hive.version>
<pig.version>0.13.0</pig.version>
<log4j.version>1.2.17</log4j.version>
<disruptor.version>3.3.6</disruptor.version>
@@ -423,6 +424,9 @@
<goals>
<goal>test-jar</goal>
</goals>
+ <configuration>
+ <skipIfEmpty>true</skipIfEmpty>
+ </configuration>
</execution>
</executions>
</plugin>