Merge trunk into branch
git-svn-id: https://svn.apache.org/repos/asf/hive/branches/maven@1536501 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/.gitignore b/.gitignore
index c0e9b3c..a4c23af 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,7 +13,8 @@
*.iml
*.ipr
*.iws
-ql/derby.log
derby.log
+datanucleus.log
.arc
-ql/TempStatsStore
+TempStatsStore/
+target/
diff --git a/ant/pom.xml b/ant/pom.xml
new file mode 100644
index 0000000..6fdd375
--- /dev/null
+++ b/ant/pom.xml
@@ -0,0 +1,57 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-ant</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Ant Utilities</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.ant</groupId>
+ <artifactId>ant</artifactId>
+ <version>${ant.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <version>${velocity.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <sourceDirectory>${basedir}/src</sourceDirectory>
+ </build>
+
+</project>
diff --git a/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java b/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
index 2ee0451..9703c97 100644
--- a/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
+++ b/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
@@ -452,7 +452,7 @@ static String joinPath(String...parts) {
public void init(String templateBaseDir, String buildDir) {
File generationDirectory = new File(templateBaseDir);
- String buildPath = joinPath(buildDir, "ql", "gen", "vector");
+ String buildPath = joinPath(buildDir, "generated-sources", "java");
File exprOutput = new File(joinPath(buildPath, "org", "apache", "hadoop",
"hive", "ql", "exec", "vector", "expressions", "gen"));
@@ -470,7 +470,7 @@ public void init(String templateBaseDir, String buildDir) {
File testCodeOutput =
new File(
- joinPath(buildDir, "ql", "test", "src", "org",
+ joinPath(buildDir, "generated-test-sources", "java", "org",
"apache", "hadoop", "hive", "ql", "exec", "vector",
"expressions", "gen"));
testCodeGen = new GenVectorTestCode(testCodeOutput.getAbsolutePath(),
diff --git a/beeline/pom.xml b/beeline/pom.xml
new file mode 100644
index 0000000..f34cfeb
--- /dev/null
+++ b/beeline/pom.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-beeline</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Beeline</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ <scope>runtime</scope>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>${commons-io.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>${jline.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-jdbc</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ </build>
+
+</project>
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index da9977e..bc2f4d8 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -95,7 +95,7 @@
*/
public class BeeLine {
private static final ResourceBundle resourceBundle =
- ResourceBundle.getBundle(BeeLine.class.getName());
+ ResourceBundle.getBundle(BeeLine.class.getSimpleName());
private final BeeLineSignalHandler signalHandler = null;
private static final String separator = System.getProperty("line.separator");
private boolean exit = false;
diff --git a/beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java b/beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java
index 338dc42..71c7756 100644
--- a/beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java
+++ b/beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hive.beeline.src.test;
+package org.apache.hive.beeline;
import java.io.BufferedWriter;
import java.io.File;
@@ -48,7 +48,7 @@ protected void setUp() throws Exception {
System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
"jdbc:derby:" + testMetastoreDB + ";create=true");
hiveConf = new HiveConf(this.getClass());
- schemaTool = new HiveSchemaTool(System.getProperty("hive.home"), hiveConf, "derby");
+ schemaTool = new HiveSchemaTool(System.getProperty("test.tmp.dir"), hiveConf, "derby");
System.setProperty("beeLine.system.exit", "true");
}
diff --git a/cli/pom.xml b/cli/pom.xml
new file mode 100644
index 0000000..3e2d102
--- /dev/null
+++ b/cli/pom.xml
@@ -0,0 +1,159 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-cli</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive CLI</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>${commons-cli.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>${commons-io.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>${jline.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ </build>
+
+</project>
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
index 3e1f491..a0e9505 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
@@ -156,23 +156,22 @@ public void testRun() throws Exception {
historyFile.delete();
}
HiveConf configuration = new HiveConf();
- CliSessionState ss = new CliSessionState(configuration);
- CliSessionState.start(ss);
- String[] args = {};
+ configuration.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true);
PrintStream oldOut = System.out;
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
-
PrintStream oldErr = System.err;
ByteArrayOutputStream dataErr = new ByteArrayOutputStream();
System.setErr(new PrintStream(dataErr));
-
+ CliSessionState ss = new CliSessionState(configuration);
+ CliSessionState.start(ss);
+ String[] args = {};
try {
new FakeCliDriver().run(args);
- assertTrue(dataOut.toString().contains("test message"));
- assertTrue(dataErr.toString().contains("Hive history file="));
- assertTrue(dataErr.toString().contains("File: fakeFile is not a file."));
+ assertTrue(dataOut.toString(), dataOut.toString().contains("test message"));
+ assertTrue(dataErr.toString(), dataErr.toString().contains("Hive history file="));
+ assertTrue(dataErr.toString(), dataErr.toString().contains("File: fakeFile is not a file."));
dataOut.reset();
dataErr.reset();
diff --git a/common/pom.xml b/common/pom.xml
new file mode 100644
index 0000000..b82521a
--- /dev/null
+++ b/common/pom.xml
@@ -0,0 +1,164 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-common</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Common</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-proect -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>${commons-cli.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>${log4j.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-compress</artifactId>
+ <version>${commons-compress.version}</version>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <scriptSourceDirectory>${basedir}/src/scripts</scriptSourceDirectory>
+ <testResources>
+ <testResource>
+ <directory>${basedir}/src/test/resources</directory>
+ </testResource>
+ </testResources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-version-annotation</id>
+ <phase>generate-sources</phase>
+ <configuration>
+ <target>
+ <exec executable="bash" failonerror="true">
+ <arg value="${basedir}/src/scripts/saveVersion.sh"/>
+ <arg value="${project.version}"/>
+ <arg value="${hive.version.shortname}"/>
+ <arg value="${basedir}/src"/>
+ </exec>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/gen</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/common/src/java/conf/hive-log4j.properties b/common/src/java/conf/hive-log4j.properties
index 6a95ec0..41dffa9 100644
--- a/common/src/java/conf/hive-log4j.properties
+++ b/common/src/java/conf/hive-log4j.properties
@@ -17,7 +17,7 @@
# Define some default values that can be overridden by system properties
hive.log.threshold=ALL
hive.root.logger=INFO,DRFA
-hive.log.dir=/tmp/${user.name}
+hive.log.dir=${java.io.tmpdir}/${user.name}
hive.log.file=hive.log
# Define the root logger to the system property "hadoop.root.logger".
diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
index 23b9921..8bab44f 100644
--- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.common;
import java.net.URL;
+import java.util.UUID;
import java.io.File;
import java.io.IOException;
import java.io.FileNotFoundException;
@@ -89,8 +90,11 @@ private static String initHiveLog4jCommon(ConfVars confVarName)
// property speficied file found in local file system
// use the specified file
if (confVarName == HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE) {
- System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(),
- HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
+ String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID);
+ if(queryId == null || (queryId = queryId.trim()).isEmpty()) {
+ queryId = "unknown-" + System.currentTimeMillis();
+ }
+ System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
}
LogManager.resetConfiguration();
PropertyConfigurator.configure(log4jFileName);
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index abfde42..07e2a56 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -759,7 +759,7 @@ public static enum ConfVars {
// Number of async threads
HIVE_SERVER2_ASYNC_EXEC_THREADS("hive.server2.async.exec.threads", 50),
// Number of seconds HiveServer2 shutdown will wait for async threads to terminate
- HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10),
+ HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10L),
// HiveServer2 auth configuration
@@ -974,12 +974,12 @@ public void verifyAndSet(String name, String value) throws IllegalArgumentExcept
}
public static int getIntVar(Configuration conf, ConfVars var) {
- assert (var.valClass == Integer.class);
+ assert (var.valClass == Integer.class) : var.varname;
return conf.getInt(var.varname, var.defaultIntVal);
}
public static void setIntVar(Configuration conf, ConfVars var, int val) {
- assert (var.valClass == Integer.class);
+ assert (var.valClass == Integer.class) : var.varname;
conf.setInt(var.varname, val);
}
@@ -992,7 +992,7 @@ public void setIntVar(ConfVars var, int val) {
}
public static long getLongVar(Configuration conf, ConfVars var) {
- assert (var.valClass == Long.class);
+ assert (var.valClass == Long.class) : var.varname;
return conf.getLong(var.varname, var.defaultLongVal);
}
@@ -1001,7 +1001,7 @@ public static long getLongVar(Configuration conf, ConfVars var, long defaultVal)
}
public static void setLongVar(Configuration conf, ConfVars var, long val) {
- assert (var.valClass == Long.class);
+ assert (var.valClass == Long.class) : var.varname;
conf.setLong(var.varname, val);
}
@@ -1014,7 +1014,7 @@ public void setLongVar(ConfVars var, long val) {
}
public static float getFloatVar(Configuration conf, ConfVars var) {
- assert (var.valClass == Float.class);
+ assert (var.valClass == Float.class) : var.varname;
return conf.getFloat(var.varname, var.defaultFloatVal);
}
@@ -1023,7 +1023,7 @@ public static float getFloatVar(Configuration conf, ConfVars var, float defaultV
}
public static void setFloatVar(Configuration conf, ConfVars var, float val) {
- assert (var.valClass == Float.class);
+ assert (var.valClass == Float.class) : var.varname;
ShimLoader.getHadoopShims().setFloatConf(conf, var.varname, val);
}
@@ -1036,7 +1036,7 @@ public void setFloatVar(ConfVars var, float val) {
}
public static boolean getBoolVar(Configuration conf, ConfVars var) {
- assert (var.valClass == Boolean.class);
+ assert (var.valClass == Boolean.class) : var.varname;
return conf.getBoolean(var.varname, var.defaultBoolVal);
}
@@ -1045,7 +1045,7 @@ public static boolean getBoolVar(Configuration conf, ConfVars var, boolean defau
}
public static void setBoolVar(Configuration conf, ConfVars var, boolean val) {
- assert (var.valClass == Boolean.class);
+ assert (var.valClass == Boolean.class) : var.varname;
conf.setBoolean(var.varname, val);
}
@@ -1058,7 +1058,7 @@ public void setBoolVar(ConfVars var, boolean val) {
}
public static String getVar(Configuration conf, ConfVars var) {
- assert (var.valClass == String.class);
+ assert (var.valClass == String.class) : var.varname;
return conf.get(var.varname, var.defaultVal);
}
@@ -1067,7 +1067,7 @@ public static String getVar(Configuration conf, ConfVars var, String defaultVal)
}
public static void setVar(Configuration conf, ConfVars var, String val) {
- assert (var.valClass == String.class);
+ assert (var.valClass == String.class) : var.varname;
conf.set(var.varname, val);
}
diff --git a/common/src/java/org/apache/hive/common/util/HiveTestUtils.java b/common/src/java/org/apache/hive/common/util/HiveTestUtils.java
new file mode 100644
index 0000000..db34494
--- /dev/null
+++ b/common/src/java/org/apache/hive/common/util/HiveTestUtils.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import java.net.URL;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class HiveTestUtils {
+
+ public static String getFileFromClasspath(String name) {
+ URL url = ClassLoader.getSystemResource(name);
+ if (url == null) {
+ throw new IllegalArgumentException("Could not find " + name);
+ }
+ return url.getPath();
+ }
+}
diff --git a/common/src/scripts/saveVersion.sh b/common/src/scripts/saveVersion.sh
index 5517496..252874f 100644
--- a/common/src/scripts/saveVersion.sh
+++ b/common/src/scripts/saveVersion.sh
@@ -33,7 +33,7 @@
cwd=`dirname $dir`
if [ "$revision" = "" ]; then
if git rev-parse HEAD 2>/dev/null > /dev/null ; then
- revision=`git log -1 --pretty=format:"%H" ../`
+ revision=`git log -1 --pretty=format:"%H"`
hostname=`hostname`
branch=`git branch | sed -n -e 's/^* //p'`
url="git://${hostname}${cwd}"
@@ -57,7 +57,13 @@
url="file://$cwd"
fi
-srcChecksum=`find ../ -name '*.java' | grep -v generated-sources | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
+if [ -x /sbin/md5 ]; then
+ md5="/sbin/md5"
+else
+ md5="md5sum"
+fi
+
+srcChecksum=`find ../ -name '*.java' | grep -v generated-sources | LC_ALL=C sort | xargs $md5 | $md5 | cut -d ' ' -f 1`
mkdir -p $src_dir/gen/org/apache/hive/common
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
index 25cefef..a31238b 100644
--- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
@@ -20,6 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Assert;
import org.junit.Test;
@@ -33,8 +34,7 @@
public class TestHiveConf {
@Test
public void testHiveSitePath() throws Exception {
- String expectedPath =
- new Path(System.getProperty("test.build.resources") + "/hive-site.xml").toUri().getPath();
+ String expectedPath = HiveTestUtils.getFileFromClasspath("hive-site.xml");
Assert.assertEquals(expectedPath, new HiveConf().getHiveSiteLocation().getPath());
}
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
index 1a5dd72..ebd122b 100644
--- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
@@ -24,7 +24,8 @@
import junit.framework.TestCase;
import org.apache.hadoop.hive.common.LogUtils;
-import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.common.util.HiveTestUtils;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
/**
@@ -43,53 +44,34 @@ public TestHiveLogging() {
process = null;
}
- private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) {
- System.setProperty(ConfVars.HIVE_LOG4J_FILE.varname,
- System.getProperty("test.build.resources") + "/" + hiveLog4jTest);
- System.setProperty(ConfVars.HIVE_EXEC_LOG4J_FILE.varname,
- System.getProperty("test.build.resources") + "/" + hiveExecLog4jTest);
+ private void configLog(String hiveLog4jTest, String hiveExecLog4jTest)
+ throws Exception {
+ String expectedLog4jTestPath = HiveTestUtils.getFileFromClasspath(hiveLog4jTest);
+ String expectedLog4jExecPath = HiveTestUtils.getFileFromClasspath(hiveExecLog4jTest);
+ System.setProperty(ConfVars.HIVE_LOG4J_FILE.varname, expectedLog4jTestPath);
+ System.setProperty(ConfVars.HIVE_EXEC_LOG4J_FILE.varname, expectedLog4jExecPath);
- String expectedLog4jPath = System.getProperty("test.build.resources")
- + "/" + hiveLog4jTest;
- String expectedLog4jExecPath = System.getProperty("test.build.resources")
- + "/" + hiveExecLog4jTest;
-
- try {
- LogUtils.initHiveLog4j();
- } catch (LogInitializationException e) {
- }
+ LogUtils.initHiveLog4j();
HiveConf conf = new HiveConf();
- assertEquals(expectedLog4jPath, conf.getVar(ConfVars.HIVE_LOG4J_FILE));
+ assertEquals(expectedLog4jTestPath, conf.getVar(ConfVars.HIVE_LOG4J_FILE));
assertEquals(expectedLog4jExecPath, conf.getVar(ConfVars.HIVE_EXEC_LOG4J_FILE));
}
- private void runCmd(String cmd) {
- try {
- process = runTime.exec(cmd);
- } catch (IOException e) {
- e.printStackTrace();
- }
- try {
- process.waitFor();
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
+ private void runCmd(String cmd) throws Exception {
+ process = runTime.exec(cmd);
+ process.waitFor();
}
- private void getCmdOutput(String logFile) {
+ private void getCmdOutput(String logFile) throws Exception {
boolean logCreated = false;
BufferedReader buf = new BufferedReader(
new InputStreamReader(process.getInputStream()));
String line = "";
- try {
- while((line = buf.readLine()) != null) {
- if (line.equals(logFile)) {
- logCreated = true;
- }
+ while((line = buf.readLine()) != null) {
+ if (line.equals(logFile)) {
+ logCreated = true;
}
- } catch (IOException e) {
- e.printStackTrace();
}
assertEquals(true, logCreated);
}
@@ -112,12 +94,12 @@ private void RunTest(String cleanCmd, String findCmd, String logFile,
}
public void testHiveLogging() throws Exception {
- // customized log4j config log file to be: /tmp/hiveLog4jTest.log
- String customLogPath = "/tmp/";
+ // customized log4j config log file to be: /tmp/TestHiveLogging/hiveLog4jTest.log
+ String customLogPath = "/tmp/" + System.getProperty("user.name") + "-TestHiveLogging/";
String customLogName = "hiveLog4jTest.log";
String customLogFile = customLogPath + customLogName;
String customCleanCmd = "rm -rf " + customLogFile;
- String customFindCmd = "find /tmp -name " + customLogName;
+ String customFindCmd = "find " + customLogPath + " -name " + customLogName;
RunTest(customCleanCmd, customFindCmd, customLogFile,
"hive-log4j-test.properties", "hive-exec-log4j-test.properties");
}
diff --git a/common/src/test/resources/hive-exec-log4j-test.properties b/common/src/test/resources/hive-exec-log4j-test.properties
index ece5875..29eceb2 100644
--- a/common/src/test/resources/hive-exec-log4j-test.properties
+++ b/common/src/test/resources/hive-exec-log4j-test.properties
@@ -1,6 +1,6 @@
# Define some default values that can be overridden by system properties
hive.root.logger=INFO,FA
-hive.log.dir=/tmp
+hive.log.dir=/tmp/${user.name}-TestHiveLogging
hive.log.file=hiveExecLog4jTest.log
# Define the root logger to the system property "hadoop.root.logger".
diff --git a/common/src/test/resources/hive-log4j-test.properties b/common/src/test/resources/hive-log4j-test.properties
index 2f08e9a..c6f7cc8 100644
--- a/common/src/test/resources/hive-log4j-test.properties
+++ b/common/src/test/resources/hive-log4j-test.properties
@@ -1,6 +1,6 @@
# Define some default values that can be overridden by system properties
hive.root.logger=WARN,DRFA
-hive.log.dir=/tmp
+hive.log.dir=/tmp/${user.name}-TestHiveLogging
hive.log.file=hiveLog4jTest.log
# Define the root logger to the system property "hadoop.root.logger".
diff --git a/contrib/pom.xml b/contrib/pom.xml
new file mode 100644
index 0000000..7e82804
--- /dev/null
+++ b/contrib/pom.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-contrib</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Contrib</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>${commons-codec.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ </build>
+
+</project>
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java b/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java
index d6c0b27..39562ea 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java
@@ -33,7 +33,7 @@ public class TestURLHook implements JDOConnectionURLHook {
public String getJdoConnectionUrl(Configuration conf) throws Exception {
if (originalUrl == null) {
originalUrl = conf.get(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "");
- return "jdbc:derby:;databaseName=../build/test/junit_metastore_db_blank;create=true";
+ return "jdbc:derby:;databaseName=target/tmp/junit_metastore_db_blank;create=true";
} else {
return originalUrl;
}
diff --git a/contrib/src/test/queries/clientnegative/case_with_row_sequence.q b/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
index c8f5f16..b51dc6e 100644
--- a/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
+++ b/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
@@ -1,6 +1,6 @@
drop temporary function row_sequence;
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
create temporary function row_sequence as
'org.apache.hadoop.hive.contrib.udf.UDFRowSequence';
diff --git a/contrib/src/test/queries/clientnegative/invalid_row_sequence.q b/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
index 8fc5faf..f7d7e15 100644
--- a/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
+++ b/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
@@ -2,7 +2,7 @@
drop temporary function row_sequence;
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
create temporary function row_sequence as
'org.apache.hadoop.hive.contrib.udf.UDFRowSequence';
diff --git a/contrib/src/test/queries/clientnegative/serde_regex.q b/contrib/src/test/queries/clientnegative/serde_regex.q
index 0995710..7ad3142 100644
--- a/contrib/src/test/queries/clientnegative/serde_regex.q
+++ b/contrib/src/test/queries/clientnegative/serde_regex.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
USE default;
diff --git a/contrib/src/test/queries/clientnegative/udtf_explode2.q b/contrib/src/test/queries/clientnegative/udtf_explode2.q
index 7800748..29217eb 100644
--- a/contrib/src/test/queries/clientnegative/udtf_explode2.q
+++ b/contrib/src/test/queries/clientnegative/udtf_explode2.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
diff --git a/contrib/src/test/queries/clientnegative/url_hook.q b/contrib/src/test/queries/clientnegative/url_hook.q
index d9ffd28..c346432 100644
--- a/contrib/src/test/queries/clientnegative/url_hook.q
+++ b/contrib/src/test/queries/clientnegative/url_hook.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
set hive.metastore.force.reload.conf=true;
SHOW TABLES 'src';
set hive.metastore.ds.connection.url.hook=org.apache.hadoop.hive.contrib.metastore.hooks.TestURLHook;
diff --git a/contrib/src/test/queries/clientpositive/dboutput.q b/contrib/src/test/queries/clientpositive/dboutput.q
index c56ecd1..28f1710 100644
--- a/contrib/src/test/queries/clientpositive/dboutput.q
+++ b/contrib/src/test/queries/clientpositive/dboutput.q
@@ -1,4 +1,4 @@
-ADD JAR ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput';
@@ -7,7 +7,7 @@
set mapred.map.tasks=1;
set mapred.reduce.tasks=1;
-ADD JAR ${system:build.ivy.lib.dir}/default/derby-${system:derby.version}.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/derby/derby/${system:derby.version}/derby-${system:derby.version}.jar;
DESCRIBE FUNCTION dboutput;
diff --git a/contrib/src/test/queries/clientpositive/fileformat_base64.q b/contrib/src/test/queries/clientpositive/fileformat_base64.q
index adf889e..20bbfa8 100644
--- a/contrib/src/test/queries/clientpositive/fileformat_base64.q
+++ b/contrib/src/test/queries/clientpositive/fileformat_base64.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
DROP TABLE base64_test;
diff --git a/contrib/src/test/queries/clientpositive/java_mr_example.q b/contrib/src/test/queries/clientpositive/java_mr_example.q
index 6c8a220..06911dc 100644
--- a/contrib/src/test/queries/clientpositive/java_mr_example.q
+++ b/contrib/src/test/queries/clientpositive/java_mr_example.q
@@ -1,10 +1,10 @@
FROM (
FROM src
MAP value, key
- USING 'java -cp ${system:build.dir}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.IdentityMapper'
+ USING 'java -cp ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.IdentityMapper'
AS k, v
CLUSTER BY k) map_output
REDUCE k, v
- USING 'java -cp ${system:build.dir}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.WordCountReduce'
+ USING 'java -cp ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.WordCountReduce'
AS k, v
;
\ No newline at end of file
diff --git a/contrib/src/test/queries/clientpositive/lateral_view_explode2.q b/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
index b46c9b2..210946e 100644
--- a/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
+++ b/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
diff --git a/contrib/src/test/queries/clientpositive/serde_regex.q b/contrib/src/test/queries/clientpositive/serde_regex.q
index 78567c1..5c2d2ca 100644
--- a/contrib/src/test/queries/clientpositive/serde_regex.q
+++ b/contrib/src/test/queries/clientpositive/serde_regex.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
EXPLAIN
CREATE TABLE serde_regex(
@@ -35,7 +35,7 @@
)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex;
-LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file
diff --git a/contrib/src/test/queries/clientpositive/serde_s3.q b/contrib/src/test/queries/clientpositive/serde_s3.q
index a0bca81..69cd303 100644
--- a/contrib/src/test/queries/clientpositive/serde_s3.q
+++ b/contrib/src/test/queries/clientpositive/serde_s3.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
DROP TABLE s3log;
CREATE TABLE s3log
@@ -7,7 +7,7 @@
DESCRIBE s3log;
-LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log;
+LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log;
SELECT a.* FROM s3log a;
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes.q b/contrib/src/test/queries/clientpositive/serde_typedbytes.q
index 79000b4..d0765fd 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes2.q b/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
index a76b1d1..a709558 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
CREATE TABLE dest1(key SMALLINT, value STRING) STORED AS TEXTFILE;
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes3.q b/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
index a44d296..492c576 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes4.q b/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
index 685b53a..21d61ee 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes5.q b/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
index 99b2cfe..f658c46 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
@@ -8,7 +8,7 @@
FROM src
SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
- USING 'python ../data/scripts/cat.py'
+ USING 'python ../../data/scripts/cat.py'
AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
) tmap
@@ -18,7 +18,7 @@
FROM src
SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
- USING 'python ../data/scripts/cat.py'
+ USING 'python ../../data/scripts/cat.py'
AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
) tmap
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q b/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
index fffee8d..59b757b 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
DROP TABLE table1;
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_avg.q b/contrib/src/test/queries/clientpositive/udaf_example_avg.q
index 06a7d78..d8e63f2 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_avg.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_avg.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_avg AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg';
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q b/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
index 1993314..869eac1 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_group_concat AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat';
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_max.q b/contrib/src/test/queries/clientpositive/udaf_example_max.q
index 9cd8514..ff2d66b 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_max.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_max.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_max AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax';
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_max_n.q b/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
index 2bab538..4349647 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_max_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxN';
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_min.q b/contrib/src/test/queries/clientpositive/udaf_example_min.q
index 96568a8..765f205 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_min.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_min.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_min AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin';
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_min_n.q b/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
index 32ed76e..436e3c1 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_min_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN';
EXPLAIN
diff --git a/contrib/src/test/queries/clientpositive/udf_example_add.q b/contrib/src/test/queries/clientpositive/udf_example_add.q
index 284e8f0..fb7b5c9 100644
--- a/contrib/src/test/queries/clientpositive/udf_example_add.q
+++ b/contrib/src/test/queries/clientpositive/udf_example_add.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd';
diff --git a/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q b/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
index 5565be3..2c36520 100644
--- a/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
+++ b/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_arraysum AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum';
CREATE TEMPORARY FUNCTION example_mapconcat AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat';
diff --git a/contrib/src/test/queries/clientpositive/udf_example_format.q b/contrib/src/test/queries/clientpositive/udf_example_format.q
index 589b1c8..38069dc2 100644
--- a/contrib/src/test/queries/clientpositive/udf_example_format.q
+++ b/contrib/src/test/queries/clientpositive/udf_example_format.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat';
diff --git a/contrib/src/test/queries/clientpositive/udf_row_sequence.q b/contrib/src/test/queries/clientpositive/udf_row_sequence.q
index 7d62260..061f340 100644
--- a/contrib/src/test/queries/clientpositive/udf_row_sequence.q
+++ b/contrib/src/test/queries/clientpositive/udf_row_sequence.q
@@ -4,7 +4,7 @@
drop temporary function row_sequence;
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
create temporary function row_sequence as
'org.apache.hadoop.hive.contrib.udf.UDFRowSequence';
diff --git a/contrib/src/test/queries/clientpositive/udtf_explode2.q b/contrib/src/test/queries/clientpositive/udtf_explode2.q
index 2173eec..2bd1359 100644
--- a/contrib/src/test/queries/clientpositive/udtf_explode2.q
+++ b/contrib/src/test/queries/clientpositive/udtf_explode2.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
diff --git a/contrib/src/test/queries/clientpositive/udtf_output_on_close.q b/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
index c5cdf0a..87aec5e 100644
--- a/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
+++ b/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION udtfCount2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2';
diff --git a/contrib/src/test/results/clientpositive/serde_regex.q.out b/contrib/src/test/results/clientpositive/serde_regex.q.out
index 3edf340..b918b5b 100644
--- a/contrib/src/test/results/clientpositive/serde_regex.q.out
+++ b/contrib/src/test/results/clientpositive/serde_regex.q.out
@@ -92,16 +92,16 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@serde_regex
PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
diff --git a/contrib/src/test/results/clientpositive/serde_s3.q.out b/contrib/src/test/results/clientpositive/serde_s3.q.out
index d1099f7..9054571 100644
--- a/contrib/src/test/results/clientpositive/serde_s3.q.out
+++ b/contrib/src/test/results/clientpositive/serde_s3.q.out
@@ -32,10 +32,10 @@
turnaroundtime int from deserializer
referer string from deserializer
useragent string from deserializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log
PREHOOK: type: LOAD
PREHOOK: Output: default@s3log
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log
POSTHOOK: type: LOAD
POSTHOOK: Output: default@s3log
PREHOOK: query: SELECT a.* FROM s3log a
diff --git a/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out b/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
index 090b4cc..d0d6091 100644
--- a/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
+++ b/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
@@ -12,7 +12,7 @@
FROM src
SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
- USING 'python ../data/scripts/cat.py'
+ USING 'python ../../data/scripts/cat.py'
AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
) tmap
@@ -23,14 +23,14 @@
FROM src
SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
- USING 'python ../data/scripts/cat.py'
+ USING 'python ../../data/scripts/cat.py'
AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
) tmap
INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter') 'python ../data/scripts/cat.py' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader') (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter') 'python ../../data/scripts/cat.py' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader') (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -57,7 +57,7 @@
type: string
outputColumnNames: _col0, _col1
Transform Operator
- command: python ../data/scripts/cat.py
+ command: python ../../data/scripts/cat.py
output info:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -139,7 +139,7 @@
FROM src
SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
- USING 'python ../data/scripts/cat.py'
+ USING 'python ../../data/scripts/cat.py'
AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
) tmap
@@ -151,7 +151,7 @@
FROM src
SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
- USING 'python ../data/scripts/cat.py'
+ USING 'python ../../data/scripts/cat.py'
AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
) tmap
diff --git a/data/conf/hive-log4j-new.properties b/data/conf/hive-log4j-new.properties
new file mode 100644
index 0000000..d81e8230
--- /dev/null
+++ b/data/conf/hive-log4j-new.properties
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+hive.root.logger=DEBUG,DRFA
+hive.log.dir=${test.tmp.dir}/log/
+hive.log.file=hive.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=WARN
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
+
+
+log4j.category.DataNucleus=ERROR,DRFA
+log4j.category.Datastore=ERROR,DRFA
+log4j.category.Datastore.Schema=ERROR,DRFA
+log4j.category.JPOX.Datastore=ERROR,DRFA
+log4j.category.JPOX.Plugin=ERROR,DRFA
+log4j.category.JPOX.MetaData=ERROR,DRFA
+log4j.category.JPOX.Query=ERROR,DRFA
+log4j.category.JPOX.General=ERROR,DRFA
+log4j.category.JPOX.Enhancer=ERROR,DRFA
+log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA
+
diff --git a/data/conf/hive-site-new.xml b/data/conf/hive-site-new.xml
new file mode 100644
index 0000000..8aefb3b
--- /dev/null
+++ b/data/conf/hive-site-new.xml
@@ -0,0 +1,189 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<configuration>
+
+<!-- Hive Configuration can either be stored in this file or in the hadoop configuration files -->
+<!-- that are implied by Hadoop setup variables. -->
+<!-- Aside from Hadoop setup variables - this file is provided as a convenience so that Hive -->
+<!-- users do not have to edit hadoop configuration files (that may be managed as a centralized -->
+<!-- resource). -->
+
+<!-- Hive Execution Parameters -->
+<property>
+ <name>hadoop.tmp.dir</name>
+ <value>${test.tmp.dir}/hadoop-tmp</value>
+ <description>A base for other temporary directories.</description>
+</property>
+
+<!--
+<property>
+ <name>hive.exec.reducers.max</name>
+ <value>1</value>
+ <description>maximum number of reducers</description>
+</property>
+-->
+
+<property>
+ <name>hive.exec.scratchdir</name>
+ <value>${test.tmp.dir}/scratchdir</value>
+ <description>Scratch space for Hive jobs</description>
+</property>
+
+<property>
+ <name>hive.exec.local.scratchdir</name>
+ <value>${test.tmp.dir}/localscratchdir/</value>
+ <description>Local scratch space for Hive jobs</description>
+</property>
+
+<property>
+ <name>javax.jdo.option.ConnectionURL</name>
+ <value>jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true</value>
+</property>
+
+<property>
+ <name>hive.stats.dbconnectionstring</name>
+ <value>jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true</value>
+</property>
+
+
+<property>
+ <name>javax.jdo.option.ConnectionDriverName</name>
+ <value>org.apache.derby.jdbc.EmbeddedDriver</value>
+</property>
+
+<property>
+ <name>javax.jdo.option.ConnectionUserName</name>
+ <value>APP</value>
+</property>
+
+<property>
+ <name>javax.jdo.option.ConnectionPassword</name>
+ <value>mine</value>
+</property>
+
+<property>
+ <!-- this should eventually be deprecated since the metastore should supply this -->
+ <name>hive.metastore.warehouse.dir</name>
+ <value>${test.warehouse.dir}</value>
+ <description></description>
+</property>
+
+<property>
+ <name>hive.metastore.metadb.dir</name>
+ <value>file://${test.tmp.dir}/metadb/</value>
+ <description>
+ Required by metastore server or if the uris argument below is not supplied
+ </description>
+</property>
+
+<property>
+ <name>test.log.dir</name>
+ <value>${test.tmp.dir}/log/</value>
+ <description></description>
+</property>
+
+<property>
+ <name>test.data.files</name>
+ <value>${hive.root}/data/files</value>
+ <description></description>
+</property>
+
+<property>
+ <name>hive.jar.path</name>
+ <value>${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar</value>
+ <description></description>
+</property>
+
+<property>
+ <name>hive.metastore.rawstore.impl</name>
+ <value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+ <description>Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. This class is used to store and retrieval of raw metadata objects such as table, database</description>
+</property>
+
+<property>
+ <name>hive.querylog.location</name>
+ <value>${test.tmp.dir}/tmp</value>
+ <description>Location of the structured hive logs</description>
+</property>
+
+<property>
+ <name>hive.exec.pre.hooks</name>
+ <value>org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables</value>
+ <description>Pre Execute Hook for Tests</description>
+</property>
+
+<property>
+ <name>hive.exec.post.hooks</name>
+ <value>org.apache.hadoop.hive.ql.hooks.PostExecutePrinter</value>
+ <description>Post Execute Hook for Tests</description>
+</property>
+
+<property>
+ <name>hive.task.progress</name>
+ <value>false</value>
+ <description>Track progress of a task</description>
+</property>
+
+<property>
+ <name>hive.support.concurrency</name>
+ <value>true</value>
+ <description>Whether hive supports concurrency or not. A zookeeper instance must be up and running for the default hive lock manager to support read-write locks.</description>
+</property>
+
+<property>
+ <name>fs.pfile.impl</name>
+ <value>org.apache.hadoop.fs.ProxyLocalFileSystem</value>
+ <description>A proxy for local file system used for cross file system testing</description>
+</property>
+
+<property>
+ <name>hive.exec.mode.local.auto</name>
+ <value>false</value>
+ <description>
+ Let hive determine whether to run in local mode automatically
+ Disabling this for tests so that minimr is not affected
+ </description>
+</property>
+
+<property>
+ <name>hive.auto.convert.join</name>
+ <value>false</value>
+ <description>Whether Hive enable the optimization about converting common join into mapjoin based on the input file size</description>
+</property>
+
+<property>
+ <name>hive.ignore.mapjoin.hint</name>
+ <value>false</value>
+ <description>Whether Hive ignores the mapjoin hint</description>
+</property>
+
+<property>
+ <name>hive.input.format</name>
+ <value>org.apache.hadoop.hive.ql.io.CombineHiveInputFormat</value>
+ <description>The default input format, if it is not specified, the system assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always overwrite it - if there is a bug in CombineHiveInputFormat, it can always be manually set to HiveInputFormat. </description>
+</property>
+
+<property>
+ <name>hive.default.rcfile.serde</name>
+ <value>org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe</value>
+ <description>The default SerDe hive will use for the rcfile format</description>
+</property>
+
+</configuration>
diff --git a/data/files/symlink1.txt b/data/files/symlink1.txt
index dc1a7c5..91d7346 100644
--- a/data/files/symlink1.txt
+++ b/data/files/symlink1.txt
@@ -1,2 +1,2 @@
-../data/files/T1.txt
-../data/files/T3.txt
+../../data/files/T1.txt
+../../data/files/T3.txt
diff --git a/data/files/symlink2.txt b/data/files/symlink2.txt
index 8436a30..487b05e 100644
--- a/data/files/symlink2.txt
+++ b/data/files/symlink2.txt
@@ -1 +1 @@
-../data/files/T2.txt
+../../data/files/T2.txt
diff --git a/hbase-handler/pom.xml b/hbase-handler/pom.xml
new file mode 100644
index 0000000..4f9c722
--- /dev/null
+++ b/hbase-handler/pom.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hbase-handler</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HBase Handler</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <classifier>tests</classifier>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ </build>
+
+</project>
diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
index e0918b0..d110245 100644
--- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
+++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
@@ -81,7 +81,7 @@ private void setUpFixtures(HiveConf conf) throws Exception {
return;
}
zooKeeperPort = zkPort;
- String tmpdir = System.getProperty("user.dir")+"/../build/ql/tmp";
+ String tmpdir = System.getProperty("test.tmp.dir");
this.tearDown();
conf.set("hbase.master", "local");
diff --git a/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q b/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q
index 565cb35..b2979ab 100644
--- a/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q
+++ b/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q
@@ -16,11 +16,11 @@
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
TBLPROPERTIES ("hbase.table.name" = "hbase_table_0");
-dfs -ls ../build/ql/tmp/hbase/hbase_table_0;
+dfs -ls target/tmp/hbase/hbase_table_0;
DROP DATABASE IF EXISTS hbaseDB CASCADE;
-dfs -ls ../build/ql/tmp/hbase/hbase_table_0;
+dfs -ls target/tmp/hbase/hbase/hbase_table_0;
diff --git a/hcatalog/core/.gitignore b/hcatalog/core/.gitignore
new file mode 100644
index 0000000..0a7a9c5
--- /dev/null
+++ b/hcatalog/core/.gitignore
@@ -0,0 +1 @@
+mapred
diff --git a/hcatalog/core/pom-new.xml b/hcatalog/core/pom-new.xml
new file mode 100644
index 0000000..576204f
--- /dev/null
+++ b/hcatalog/core/pom-new.xml
@@ -0,0 +1,177 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hcatalog-core</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HCatalog Core</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-cli</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
+ <version>${jackson.version}</version>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-tools</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ <!-- test -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-archives</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <!-- test -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <classifier>h2</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java b/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
index b041a14..512647c 100644
--- a/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
+++ b/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
@@ -585,13 +585,6 @@ private void discoverPartitions(JobContext context) throws IOException {
}
}
- // for (Entry<String,Map<String,String>> spec : partitionsDiscoveredByPath.entrySet()){
- // LOG.info("Partition "+ spec.getKey());
- // for (Entry<String,String> e : spec.getValue().entrySet()){
- // LOG.info(e.getKey() + "=>" +e.getValue());
- // }
- // }
-
this.partitionsDiscovered = true;
}
}
@@ -652,7 +645,6 @@ private void registerPartitions(JobContext context) throws IOException{
for(Partition ptn : partitionsToAdd){
ptnInfos.add(InternalUtil.createPtnKeyValueMap(new Table(tableInfo.getTable()), ptn));
}
-
//Publish the new partition(s)
if (dynamicPartitioningUsed && harProcessor.isEnabled() && (!partitionsToAdd.isEmpty())){
@@ -678,7 +670,7 @@ private void registerPartitions(JobContext context) throws IOException{
throw e;
}
- }else{
+ } else {
// no harProcessor, regular operation
updateTableSchema(client, table, jobInfo.getOutputSchema());
LOG.info("HAR not is not being used. The table {} has new partitions {}.", table.getTableName(), ptnInfos);
diff --git a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
index 2add2dc..1d622e0 100644
--- a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
@@ -108,6 +108,7 @@ public void run() {
HiveMetaStore.main(new String[]{"-v", "-p", msPort, "--hiveconf", warehouseConf});
} catch (Throwable t) {
System.err.println("Exiting. Got exception from metastore: " + t.getMessage());
+ t.printStackTrace();
}
}
@@ -163,13 +164,14 @@ private static class ColumnHolder {
@BeforeClass
public static void setup() throws Exception {
- String testDir = System.getProperty("test.data.dir", "./");
+ System.clearProperty("mapred.job.tracker");
+ String testDir = System.getProperty("test.tmp.dir", "./");
testDir = testDir + "/test_multitable_" + Math.abs(new Random().nextLong()) + "/";
workDir = new File(new File(testDir).getCanonicalPath());
FileUtil.fullyDelete(workDir);
workDir.mkdirs();
- warehousedir = new Path(workDir + "/warehouse");
+ warehousedir = new Path(System.getProperty("test.warehouse.dir"));
// Run hive metastore server
t = new Thread(new RunMS());
@@ -186,9 +188,10 @@ public static void setup() throws Exception {
mrCluster = new MiniMRCluster(1, fs.getUri().toString(), 1, null, null,
new JobConf(conf));
mrConf = mrCluster.createJobConf();
- fs.mkdirs(warehousedir);
initializeSetup();
+
+ warehousedir.getFileSystem(hiveConf).mkdirs(warehousedir);
}
private static void initializeSetup() throws Exception {
@@ -251,14 +254,15 @@ private static void createTable(String tableName, String tablePerm) throws Excep
tbl.setPartitionKeys(ColumnHolder.partitionCols);
hmsc.createTable(tbl);
- FileSystem fs = FileSystem.get(mrConf);
- fs.setPermission(new Path(warehousedir, tableName), new FsPermission(tablePerm));
+ Path path = new Path(warehousedir, tableName);
+ FileSystem fs = path.getFileSystem(hiveConf);
+ fs.setPermission(path, new FsPermission(tablePerm));
}
@AfterClass
public static void tearDown() throws IOException {
FileUtil.fullyDelete(workDir);
- FileSystem fs = FileSystem.get(mrConf);
+ FileSystem fs = warehousedir.getFileSystem(hiveConf);
if (fs.exists(warehousedir)) {
fs.delete(warehousedir, true);
}
@@ -367,14 +371,14 @@ private Path createInputFile() throws IOException {
* @throws Exception if any error occurs
*/
private List<String> getTableData(String table, String database) throws Exception {
- HiveConf conf = new HiveConf();
- conf.addResource("hive-site.xml");
ArrayList<String> results = new ArrayList<String>();
ArrayList<String> temp = new ArrayList<String>();
- Hive hive = Hive.get(conf);
+ Hive hive = Hive.get(hiveConf);
org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table);
FetchWork work;
- if (!tbl.getPartCols().isEmpty()) {
+ if (tbl.getPartCols().isEmpty()) {
+ work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl));
+ } else {
List<Partition> partitions = hive.getPartitions(tbl);
List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
List<String> partLocs = new ArrayList<String>();
@@ -384,12 +388,10 @@ private List<String> getTableData(String table, String database) throws Exceptio
}
work = new FetchWork(partLocs, partDesc, Utilities.getTableDesc(tbl));
work.setLimit(100);
- } else {
- work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl));
}
FetchTask task = new FetchTask();
task.setWork(work);
- task.initialize(conf, null, null);
+ task.initialize(hiveConf, null, null);
task.fetch(temp);
for (String str : temp) {
results.add(str.replace("\t", ","));
diff --git a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java
index 9a8f3c0..957ced6 100644
--- a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java
+++ b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java
@@ -51,6 +51,7 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.util.Shell;
import org.apache.hcatalog.NoExitSecurityManager;
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.data.DefaultHCatRecord;
@@ -74,10 +75,10 @@ public class TestHCatPartitionPublish {
private static HiveConf hcatConf;
private static HiveMetaStoreClient msc;
private static SecurityManager securityManager;
+ private static Configuration conf = new Configuration(true);
@BeforeClass
public static void setup() throws Exception {
- Configuration conf = new Configuration(true);
conf.set("yarn.scheduler.capacity.root.queues", "default");
conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
@@ -150,10 +151,13 @@ public void testPartitionPublish() throws Exception {
Assert.assertEquals(0, ptns.size());
Table table = msc.getTable(dbName, tableName);
Assert.assertTrue(table != null);
- // Also make sure that the directory has been deleted in the table
- // location.
- Assert.assertFalse(fs.exists(new Path(table.getSd().getLocation()
- + "/part1=p1value1/part0=p0value1")));
+ // In Windows, we cannot remove the output directory when job fail. See
+ // FileOutputCommitterContainer.abortJob
+ if (!Shell.WINDOWS) {
+ Path path = new Path(table.getSd().getLocation()
+ + "/part1=p1value1/part0=p0value1");
+ Assert.assertFalse(path.getFileSystem(conf).exists(path));
+ }
}
void runMRCreateFail(
diff --git a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java
index 8112f76..f6ddb88 100644
--- a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java
@@ -88,7 +88,7 @@ public static void setup() throws IOException {
}
private static void createWorkDir() throws IOException {
- String testDir = System.getProperty("test.data.dir", "./");
+ String testDir = System.getProperty("test.tmp.dir", "./");
testDir = testDir + "/test_multiout_" + Math.abs(new Random().nextLong()) + "/";
workDir = new File(new File(testDir).getCanonicalPath());
FileUtil.fullyDelete(workDir);
diff --git a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java
index 1aad829..6517826 100644
--- a/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java
+++ b/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java
@@ -51,7 +51,7 @@
*/
public class TestPassProperties {
private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
- "/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
+ "/build/test/data/" + TestPassProperties.class.getCanonicalName();
private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
@@ -96,7 +96,7 @@ public void testSequenceTableWriteReadMR() throws Exception {
conf.set("hive.metastore.uris", "thrift://no.such.machine:10888");
conf.set("hive.metastore.local", "false");
Job job = new Job(conf, "Write-hcat-seq-table");
- job.setJarByClass(TestSequenceFileReadWrite.class);
+ job.setJarByClass(TestPassProperties.class);
job.setMapperClass(Map.class);
job.setOutputKeyClass(NullWritable.class);
diff --git a/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java b/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
index f346782..d22e9a9 100644
--- a/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
@@ -68,7 +68,7 @@ public class TestRCFileMapReduceInputFormat extends TestCase {
static {
try {
fs = FileSystem.getLocal(conf);
- Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ Path dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test_rcfile");
fs.delete(dir, true);
// the SerDe part is from TestLazySimpleSerDe
@@ -203,7 +203,7 @@ private void splitAfterSync() throws IOException, InterruptedException {
private void writeThenReadByRecordReader(int intervalRecordCount,
int writeCount, int splitNumber, long maxSplitSize, CompressionCodec codec)
throws IOException, InterruptedException {
- Path testDir = new Path(System.getProperty("test.data.dir", ".")
+ Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
+ "/mapred/testsmallfirstsplit");
Path testFile = new Path(testDir, "test_rcfile");
fs.delete(testFile, true);
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
index 843240d..d9d8251 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
@@ -62,7 +62,9 @@ private void setupMiniDfsAndMrClusters() {
Configuration config = new Configuration();
// Builds and starts the mini dfs and mapreduce clusters
- System.setProperty("hadoop.log.dir", ".");
+ if(System.getProperty("hadoop.log.dir") == null) {
+ System.setProperty("hadoop.log.dir", "target/tmp/logs/");
+ }
m_dfs = new MiniDFSCluster(config, dataNodes, true, null);
m_fileSys = m_dfs.getFileSystem();
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
index 2ea5b9e..cf17ecf 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
@@ -105,6 +105,7 @@ public void run() {
HiveMetaStore.main(new String[]{"-v", "-p", msPort, "--hiveconf", warehouseConf});
} catch (Throwable t) {
System.err.println("Exiting. Got exception from metastore: " + t.getMessage());
+ t.printStackTrace();
}
}
@@ -160,13 +161,14 @@ private static class ColumnHolder {
@BeforeClass
public static void setup() throws Exception {
- String testDir = System.getProperty("test.data.dir", "./");
+ System.clearProperty("mapred.job.tracker");
+ String testDir = System.getProperty("test.tmp.dir", "./");
testDir = testDir + "/test_multitable_" + Math.abs(new Random().nextLong()) + "/";
workDir = new File(new File(testDir).getCanonicalPath());
FileUtil.fullyDelete(workDir);
workDir.mkdirs();
- warehousedir = new Path(workDir + "/warehouse");
+ warehousedir = new Path(System.getProperty("test.warehouse.dir"));
// Run hive metastore server
t = new Thread(new RunMS());
@@ -183,9 +185,10 @@ public static void setup() throws Exception {
mrCluster = new MiniMRCluster(1, fs.getUri().toString(), 1, null, null,
new JobConf(conf));
mrConf = mrCluster.createJobConf();
- fs.mkdirs(warehousedir);
initializeSetup();
+
+ warehousedir.getFileSystem(conf).mkdirs(warehousedir);
}
private static void initializeSetup() throws Exception {
@@ -248,14 +251,15 @@ private static void createTable(String tableName, String tablePerm) throws Excep
tbl.setPartitionKeys(ColumnHolder.partitionCols);
hmsc.createTable(tbl);
- FileSystem fs = FileSystem.get(mrConf);
- fs.setPermission(new Path(warehousedir, tableName), new FsPermission(tablePerm));
+ Path path = new Path(warehousedir, tableName);
+ FileSystem fs = path.getFileSystem(hiveConf);
+ fs.setPermission(path, new FsPermission(tablePerm));
}
@AfterClass
public static void tearDown() throws IOException {
FileUtil.fullyDelete(workDir);
- FileSystem fs = FileSystem.get(mrConf);
+ FileSystem fs = warehousedir.getFileSystem(hiveConf);
if (fs.exists(warehousedir)) {
fs.delete(warehousedir, true);
}
@@ -312,7 +316,8 @@ public void testOutputFormat() throws Throwable {
Assert.assertEquals("Comparing output of table " +
tableNames[0] + " is not correct", outputs.get(0), "a,a,1,ag");
Assert.assertEquals("Comparing output of table " +
- tableNames[1] + " is not correct", outputs.get(1), "a,1,ag");
+ tableNames[1] + " is not correct", outputs.get(1),
+ "a,1,ag");
Assert.assertEquals("Comparing output of table " +
tableNames[2] + " is not correct", outputs.get(2), "a,a,extra,1,ag");
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
index cebc840..f5a6138 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
@@ -74,15 +74,15 @@ public class TestHCatPartitionPublish {
private static HiveConf hcatConf;
private static HiveMetaStoreClient msc;
private static SecurityManager securityManager;
+ private static Configuration conf = new Configuration(true);
@BeforeClass
public static void setup() throws Exception {
- String testDir = System.getProperty("test.data.dir", "./");
+ String testDir = System.getProperty("test.tmp.dir", "./");
testDir = testDir + "/test_hcat_partitionpublish_" + Math.abs(new Random().nextLong()) + "/";
File workDir = new File(new File(testDir).getCanonicalPath());
FileUtil.fullyDelete(workDir);
workDir.mkdirs();
- Configuration conf = new Configuration(true);
conf.set("yarn.scheduler.capacity.root.queues", "default");
conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
@@ -158,8 +158,9 @@ public void testPartitionPublish() throws Exception {
// In Windows, we cannot remove the output directory when job fail. See
// FileOutputCommitterContainer.abortJob
if (!Shell.WINDOWS) {
- Assert.assertFalse(fs.exists(new Path(table.getSd().getLocation()
- + "/part1=p1value1/part0=p0value1")));
+ Path path = new Path(table.getSd().getLocation()
+ + "/part1=p1value1/part0=p0value1");
+ Assert.assertFalse(path.getFileSystem(conf).exists(path));
}
}
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java
index aaab95d..d70e5bf 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java
@@ -85,7 +85,7 @@ public static void setup() throws IOException {
}
private static void createWorkDir() throws IOException {
- String testDir = System.getProperty("test.data.dir", "./");
+ String testDir = System.getProperty("test.tmp.dir", "./");
testDir = testDir + "/test_multiout_" + Math.abs(new Random().nextLong()) + "/";
workDir = new File(new File(testDir).getCanonicalPath());
FileUtil.fullyDelete(workDir);
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
index 5e2b699..81df987 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
@@ -48,7 +48,7 @@
public class TestPassProperties {
private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
- "/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
+ "/build/test/data/" + TestPassProperties.class.getCanonicalName();
private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
@@ -93,7 +93,7 @@ public void testSequenceTableWriteReadMR() throws Exception {
conf.set("hive.metastore.uris", "thrift://no.such.machine:10888");
conf.set("hive.metastore.local", "false");
Job job = new Job(conf, "Write-hcat-seq-table");
- job.setJarByClass(TestSequenceFileReadWrite.class);
+ job.setJarByClass(TestPassProperties.class);
job.setMapperClass(Map.class);
job.setOutputKeyClass(NullWritable.class);
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
index d677466..5f835be 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
@@ -68,7 +68,7 @@ public class TestRCFileMapReduceInputFormat extends TestCase {
static {
try {
fs = FileSystem.getLocal(conf);
- Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ Path dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test_rcfile");
fs.delete(dir, true);
// the SerDe part is from TestLazySimpleSerDe
@@ -203,7 +203,7 @@ private void splitAfterSync() throws IOException, InterruptedException {
private void writeThenReadByRecordReader(int intervalRecordCount,
int writeCount, int splitNumber, long maxSplitSize, CompressionCodec codec)
throws IOException, InterruptedException {
- Path testDir = new Path(System.getProperty("test.data.dir", ".")
+ Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
+ "/mapred/testsmallfirstsplit");
Path testFile = new Path(testDir, "test_rcfile");
fs.delete(testFile, true);
diff --git a/hcatalog/hcatalog-pig-adapter/pom-new.xml b/hcatalog/hcatalog-pig-adapter/pom-new.xml
new file mode 100644
index 0000000..2f2e559
--- /dev/null
+++ b/hcatalog/hcatalog-pig-adapter/pom-new.xml
@@ -0,0 +1,100 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hcatalog-pig-adapter</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HCatalog Pig Adapter</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <classifier>h2</classifier>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+</project>
diff --git a/hcatalog/pom-new.xml b/hcatalog/pom-new.xml
new file mode 100644
index 0000000..0436adb
--- /dev/null
+++ b/hcatalog/pom-new.xml
@@ -0,0 +1,97 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog</artifactId>
+ <packaging>pom</packaging>
+ <name>Hive HCatalog</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <modules>
+ <module>core</module>
+ <module>hcatalog-pig-adapter</module>
+ <module>server-extensions</module>
+ <module>webhcat/java-client</module>
+ <module>webhcat/svr</module>
+ <module>storage-handlers/hbase</module>
+ </modules>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <classifier>h2</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+
+</project>
diff --git a/hcatalog/server-extensions/pom-new.xml b/hcatalog/server-extensions/pom-new.xml
new file mode 100644
index 0000000..985e9b1
--- /dev/null
+++ b/hcatalog/server-extensions/pom-new.xml
@@ -0,0 +1,125 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hcatalog-server-extensions</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HCatalog Server Extensions</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>javax.jms</groupId>
+ <artifactId>jms</artifactId>
+ <version>${jms.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
+ <version>${jackson.version}</version>
+ </dependency>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.activemq</groupId>
+ <artifactId>activemq-core</artifactId>
+ <version>${activemq.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.activemq</groupId>
+ <artifactId>kahadb</artifactId>
+ <version>${activemq.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+</project>
diff --git a/hcatalog/storage-handlers/hbase/pom-new.xml b/hcatalog/storage-handlers/hbase/pom-new.xml
new file mode 100644
index 0000000..61bf160
--- /dev/null
+++ b/hcatalog/storage-handlers/hbase/pom-new.xml
@@ -0,0 +1,202 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hbase-storage-handler</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HCatalog HBase Storage Handler</name>
+
+ <properties>
+ <hive.path.to.root>../../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-hbase-handler</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>${zookeeper.version}</version>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>${commons-io.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>${zookeeper.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ <!-- test -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <!-- test -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <resources>
+ <resource>
+ <directory>${basedir}/src/resources</directory>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/gen-java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
index f2c81dc..83dbb32 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
@@ -36,14 +36,13 @@
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hive.conf.HiveConf;
import org.junit.AfterClass;
-import org.junit.BeforeClass;
/**
* Base class for HBase Tests which need a mini cluster instance
*/
public abstract class SkeletonHBaseTest {
- protected static String TEST_DIR = "/tmp/build/test/data/";
+ protected static String TEST_DIR = System.getProperty("test.tmp.dir", "target/tmp/");
protected final static String DEFAULT_CONTEXT_HANDLE = "default";
@@ -56,20 +55,15 @@ public abstract class SkeletonHBaseTest {
*/
protected static Configuration testConf = null;
- protected void createTable(String tableName, String[] families) {
- try {
- HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
- HTableDescriptor tableDesc = new HTableDescriptor(tableName);
- for (String family : families) {
- HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
- tableDesc.addFamily(columnDescriptor);
- }
- admin.createTable(tableDesc);
- } catch (Exception e) {
- e.printStackTrace();
- throw new IllegalStateException(e);
+ protected void createTable(String tableName, String[] families) throws IOException {
+ HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
+ HTableDescriptor tableDesc = new HTableDescriptor(tableName);
+ for (String family : families) {
+ HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
+ tableDesc.addFamily(columnDescriptor);
}
-
+ admin.createTable(tableDesc);
+ admin.close();
}
protected String newTableName(String prefix) {
@@ -78,21 +72,20 @@ protected String newTableName(String prefix) {
do {
name = prefix + "_" + Math.abs(new Random().nextLong());
} while (tableNames.contains(name) && --tries > 0);
- if (tableNames.contains(name))
+ if (tableNames.contains(name)) {
throw new IllegalStateException("Couldn't find a unique table name, tableNames size: " + tableNames.size());
+ }
tableNames.add(name);
return name;
}
-
-
+
/**
* startup an hbase cluster instance before a test suite runs
*/
- @BeforeClass
- public static void setup() {
- if (!contextMap.containsKey(getContextHandle()))
+ public static void setupSkeletonHBaseTest() {
+ if (!contextMap.containsKey(getContextHandle())) {
contextMap.put(getContextHandle(), new Context(getContextHandle()));
-
+ }
contextMap.get(getContextHandle()).start();
}
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
index 32e8dd7..d97f499 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
@@ -66,6 +66,7 @@
import org.apache.hcatalog.mapreduce.HCatOutputFormat;
import org.apache.hcatalog.mapreduce.OutputJobInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -89,6 +90,11 @@ public class TestHBaseBulkOutputFormat extends SkeletonHBaseTest {
private final HiveConf allConf;
private final HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public TestHBaseBulkOutputFormat() {
allConf = getHiveConf();
allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
index 7ae762a..e359c64 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
@@ -63,6 +63,7 @@
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.HCatOutputFormat;
import org.apache.hcatalog.mapreduce.OutputJobInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
@@ -83,6 +84,11 @@ public class TestHBaseDirectOutputFormat extends SkeletonHBaseTest {
private final HiveConf allConf;
private final HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public TestHBaseDirectOutputFormat() {
allConf = getHiveConf();
allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
index 2c98b80..84cbfaf 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
@@ -41,6 +41,7 @@
import org.apache.hcatalog.hbase.snapshot.RevisionManager;
import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestHBaseHCatStorageHandler extends SkeletonHBaseTest {
@@ -49,6 +50,11 @@ public class TestHBaseHCatStorageHandler extends SkeletonHBaseTest {
private static HCatDriver hcatDriver;
private static Warehouse wh;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public void Initialize() throws Exception {
hcatConf = getHiveConf();
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
index b257fa9..e41000e 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
@@ -73,6 +73,7 @@
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.InputJobInfo;
import org.apache.hcatalog.mapreduce.PartInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestHCatHBaseInputFormat extends SkeletonHBaseTest {
@@ -83,6 +84,11 @@ public class TestHCatHBaseInputFormat extends SkeletonHBaseTest {
private final byte[] QUALIFIER1 = Bytes.toBytes("testQualifier1");
private final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public TestHCatHBaseInputFormat() throws Exception {
hcatConf = getHiveConf();
hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
index ff558f5..1c8454a 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
@@ -41,12 +41,18 @@
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestSnapshots extends SkeletonHBaseTest {
private static HiveConf hcatConf;
private static HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public void Initialize() throws Exception {
hcatConf = getHiveConf();
hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java
index ca046d5..6ca24c3 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java
@@ -26,10 +26,16 @@
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
import org.junit.Assert;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestIDGenerator extends SkeletonHBaseTest {
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
@Test
public void testIDGeneration() throws Exception {
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
index cafdcd1..dbba645 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
@@ -32,10 +32,16 @@
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestRevisionManager extends SkeletonHBaseTest {
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
@Test
public void testBasicZNodeCreation() throws IOException, KeeperException, InterruptedException {
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
index 3208fa6..e876719 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
@@ -31,24 +31,30 @@
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
import org.junit.Assert;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestRevisionManagerEndpoint extends SkeletonHBaseTest {
- static {
+ @BeforeClass
+ public static void setup() throws Throwable {
// test case specific mini cluster settings
testConf = new Configuration(false);
testConf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
"org.apache.hcatalog.hbase.snapshot.RevisionManagerEndpoint",
"org.apache.hadoop.hbase.coprocessor.GenericEndpoint");
testConf.set(RMConstants.REVISION_MGR_ENDPOINT_IMPL_CLASS, MockRM.class.getName());
+ setupSkeletonHBaseTest();
}
-
+
/**
* Mock implementation to test the protocol/serialization
*/
public static class MockRM implements RevisionManager {
+ public MockRM() {
+ Thread.dumpStack();
+ }
private static class Invocation {
Invocation(String methodName, Object ret, Object... args) {
this.methodName = methodName;
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
index 8122b14..e0ea30e 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
@@ -38,6 +38,7 @@
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
+import org.junit.BeforeClass;
import org.junit.Test;
@@ -46,6 +47,11 @@ public class TestZNodeSetUp extends SkeletonHBaseTest {
private static HiveConf hcatConf;
private static HCatDriver hcatDriver;
+ @BeforeClass
+ public static void setup() throws Throwable {
+ setupSkeletonHBaseTest();
+ }
+
public void Initialize() throws Exception {
hcatConf = getHiveConf();
diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
index 0957416..6bd278e 100644
--- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
+++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
@@ -73,6 +73,7 @@ public void Initialize() throws Exception {
URI fsuri = getFileSystem().getUri();
Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(),
getTestDir());
+ hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString());
diff --git a/hcatalog/webhcat/java-client/pom-new.xml b/hcatalog/webhcat/java-client/pom-new.xml
new file mode 100644
index 0000000..e1c738f
--- /dev/null
+++ b/hcatalog/webhcat/java-client/pom-new.xml
@@ -0,0 +1,88 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-webhcat-java-client</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HCatalog Webhcat Java Client</name>
+
+ <properties>
+ <hive.path.to.root>../../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+</project>
diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
index b33a8c3..e755947 100644
--- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
+++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
@@ -36,7 +36,6 @@
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatException;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
@@ -52,6 +51,7 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.fail;
/**
* @deprecated Use/modify {@link org.apache.hive.hcatalog.api.TestHCatClient} instead
@@ -123,9 +123,9 @@ public void testBasicDDLCommands() throws Exception {
assertTrue(testDb.getComment() == null);
assertTrue(testDb.getProperties().size() == 0);
String warehouseDir = System
- .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
- assertTrue(testDb.getLocation().equals(
- "file:" + warehouseDir + "/" + db + ".db"));
+ .getProperty("test.warehouse.dir", "/user/hive/warehouse");
+ String expectedDir = warehouseDir.replaceAll("\\\\", "/").replaceFirst("pfile:///", "pfile:/");
+ assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
@@ -145,6 +145,7 @@ public void testBasicDDLCommands() throws Exception {
// will result in an exception.
try {
client.createTable(tableDesc);
+ fail("Expected exception");
} catch (HCatException e) {
assertTrue(e.getMessage().contains(
"AlreadyExistsException while creating table."));
@@ -159,8 +160,7 @@ public void testBasicDDLCommands() throws Exception {
TextInputFormat.class.getName()));
assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
IgnoreKeyTextOutputFormat.class.getName()));
- assertTrue(table2.getLocation().equalsIgnoreCase(
- "file:" + warehouseDir + "/" + db + ".db/" + tableTwo));
+ assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
client.close();
}
diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
index b18f8c8..4fe1328 100644
--- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
+++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
@@ -36,7 +36,6 @@
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.hcatalog.common.HCatConstants;
import org.apache.hive.hcatalog.common.HCatException;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
@@ -48,6 +47,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.junit.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -120,13 +120,9 @@ public void testBasicDDLCommands() throws Exception {
assertTrue(testDb.getComment() == null);
assertTrue(testDb.getProperties().size() == 0);
String warehouseDir = System
- .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
- String expectedDir = warehouseDir.replaceAll("\\\\", "/");
- if (!expectedDir.startsWith("/")) {
- expectedDir = "/" + expectedDir;
- }
- assertTrue(testDb.getLocation().equals(
- "file:" + expectedDir + "/" + db + ".db"));
+ .getProperty("test.warehouse.dir", "/user/hive/warehouse");
+ String expectedDir = warehouseDir.replaceAll("\\\\", "/").replaceFirst("pfile:///", "pfile:/");
+ assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
@@ -146,6 +142,7 @@ public void testBasicDDLCommands() throws Exception {
// will result in an exception.
try {
client.createTable(tableDesc);
+ fail("Expected exception");
} catch (HCatException e) {
assertTrue(e.getMessage().contains(
"AlreadyExistsException while creating table."));
@@ -160,8 +157,7 @@ public void testBasicDDLCommands() throws Exception {
TextInputFormat.class.getName()));
assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
IgnoreKeyTextOutputFormat.class.getName()));
- assertTrue(table2.getLocation().equalsIgnoreCase(
- "file:" + expectedDir + "/" + db + ".db/" + tableTwo));
+ assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
client.close();
}
diff --git a/hcatalog/webhcat/svr/pom-new.xml b/hcatalog/webhcat/svr/pom-new.xml
new file mode 100644
index 0000000..30c987f
--- /dev/null
+++ b/hcatalog/webhcat/svr/pom-new.xml
@@ -0,0 +1,184 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-webhcat</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HCatalog Webhcat</name>
+
+ <properties>
+ <hive.path.to.root>../../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ <version>${jersey.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-servlet</artifactId>
+ <version>${jersey.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey.contribs</groupId>
+ <artifactId>wadl-resourcedoc-doclet</artifactId>
+ <version>${wadl-resourcedoc-doclet.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-exec</artifactId>
+ <version>${commons-exec.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>${zookeeper.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-core-asl</artifactId>
+ <version>${jackson.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
+ <version>${jackson.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty.aggregate</groupId>
+ <artifactId>jetty-all-server</artifactId>
+ <version>${jetty.webhcat.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>jul-to-slf4j</artifactId>
+ <version>${slf4j.version}</version>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-auth</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <version>${maven-javadoc-plugin.version}</version>
+ <executions>
+ <execution>
+ <id>resourcesdoc.xml</id>
+ <goals>
+ <goal>javadoc</goal>
+ </goals>
+ <phase>compile</phase>
+ <configuration>
+ <encoding>${project.build.sourceEncoding}</encoding>
+ <verbose>true</verbose>
+ <show>public</show>
+ <doclet>com.sun.jersey.wadl.resourcedoc.ResourceDoclet</doclet>
+ <docletArtifacts>
+ <docletArtifact>
+ <groupId>com.sun.jersey.contribs</groupId>
+ <artifactId>wadl-resourcedoc-doclet</artifactId>
+ <version>${wadl-resourcedoc-doclet.version}</version>
+ </docletArtifact>
+ <!--
+ Also specify jersey and xerces as doclet artifacts as the ResourceDoclet
+ uses classes provided by them to generate the resourcedoc.
+ -->
+ <docletArtifact>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-server</artifactId>
+ <version>${jersey.version}</version>
+ </docletArtifact>
+ <docletArtifact>
+ <groupId>xerces</groupId>
+ <artifactId>xercesImpl</artifactId>
+ <version>${xerces.version}</version>
+ </docletArtifact>
+ </docletArtifacts>
+ <additionalparam>-output ${project.build.outputDirectory}/resourcedoc.xml</additionalparam>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java
index 3ea10df..8d42ddc 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java
@@ -107,12 +107,12 @@ private List<String> makeArgs(String exec, boolean format,
}
LOG.info("Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE)=" +
Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE));
- if(System.getProperty("hive.metastore.warehouse.dir") != null) {
+ if(System.getProperty("test.warehouse.dir") != null) {
/*when running in unit test mode, pass this property to HCat,
which will in turn pass it to Hive to make sure that Hive
tries to write to a directory that exists.*/
args.add("-D");
- args.add("hive.metastore.warehouse.dir=" + System.getProperty("hive.metastore.warehouse.dir"));
+ args.add("hive.metastore.warehouse.dir=" + System.getProperty("test.warehouse.dir"));
}
return args;
}
diff --git a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java
index e6fb5ee..083c881 100644
--- a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java
+++ b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java
@@ -156,7 +156,7 @@ public void dropTableIfExists() throws IOException {
public void createDataBase() throws IOException {
Map<String, Object> props = new HashMap<String, Object>();
props.put("comment", "Hello, there");
- props.put("location", "file://" + System.getProperty("hive.metastore.warehouse.dir"));
+ props.put("location", System.getProperty("test.warehouse.dir"));
Map<String, String> props2 = new HashMap<String, String>();
props2.put("prop", "val");
props.put("properties", props2);
diff --git a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java
index 1144d11..d24fe69 100644
--- a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java
+++ b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java
@@ -29,7 +29,7 @@
public class TestJobIDParser {
@Test
public void testParsePig() throws IOException {
- String errFileName = "../../src/test/data/status/pig";
+ String errFileName = "src/test/data/status/pig";
PigJobIDParser pigJobIDParser = new PigJobIDParser(errFileName, new Configuration());
List<String> jobs = pigJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
@@ -37,7 +37,7 @@ public void testParsePig() throws IOException {
@Test
public void testParseHive() throws IOException {
- String errFileName = "../../src/test/data/status/hive";
+ String errFileName = "src/test/data/status/hive";
HiveJobIDParser hiveJobIDParser = new HiveJobIDParser(errFileName, new Configuration());
List<String> jobs = hiveJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
@@ -45,7 +45,7 @@ public void testParseHive() throws IOException {
@Test
public void testParseJar() throws IOException {
- String errFileName = "../../src/test/data/status/jar";
+ String errFileName = "src/test/data/status/jar";
JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration());
List<String> jobs = jarJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
@@ -53,7 +53,7 @@ public void testParseJar() throws IOException {
@Test
public void testParseStreaming() throws IOException {
- String errFileName = "../../src/test/data/status/streaming";
+ String errFileName = "src/test/data/status/streaming";
JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration());
List<String> jobs = jarJobIDParser.parseJobID();
Assert.assertEquals(jobs.size(), 1);
diff --git a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
index c72a8b2..33e9d42 100644
--- a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
+++ b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
@@ -35,7 +35,7 @@ public class TestTempletonUtils {
"2011-12-15 18:12:21,758 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - More information at: http://localhost:50030/jobdetails.jsp?jobid=job_201112140012_0047",
"2011-12-15 18:12:46,907 [main] INFO org.apache.pig.tools.pigstats.SimplePigStats - Script Statistics: "
};
- public static final String testDataDir = System.getProperty("test.data.dir");
+ public static final String testDataDir = System.getProperty("test.tmp.dir");
File tmpFile;
File usrFile;
diff --git a/hwi/pom.xml b/hwi/pom.xml
new file mode 100644
index 0000000..061fa39
--- /dev/null
+++ b/hwi/pom.xml
@@ -0,0 +1,134 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hwi</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive HWI</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-cli</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>${commons-httpclient.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-war-plugin</artifactId>
+ <configuration>
+ <warSourceDirectory>${basedir}/web</warSourceDirectory>
+ <packagingExcludes>WEB-INF/lib/*</packagingExcludes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/itests/custom-serde/pom.xml b/itests/custom-serde/pom.xml
new file mode 100644
index 0000000..414b729
--- /dev/null
+++ b/itests/custom-serde/pom.xml
@@ -0,0 +1,72 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-it-custom-serde</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Integration - Custom Serde</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+</project>
diff --git a/itests/hcatalog-unit/pom.xml b/itests/hcatalog-unit/pom.xml
new file mode 100644
index 0000000..fb1b137
--- /dev/null
+++ b/itests/hcatalog-unit/pom.xml
@@ -0,0 +1,227 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-hcatalog-it-unit</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Integration - HCatalog Unit Tests</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-core</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hbase-storage-handler</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.hcatalog</groupId>
+ <artifactId>hive-hcatalog-pig-adapter</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-cli</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>${commons-io.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <!-- test -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-tools</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <!-- test -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-archives</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pig.version}</version>
+ <classifier>h2</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml
new file mode 100644
index 0000000..1a76733
--- /dev/null
+++ b/itests/hive-unit/pom.xml
@@ -0,0 +1,207 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-it-unit</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Integration - Unit Tests</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-beeline</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-cli</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it-util</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-jdbc</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>setup-metastore-scripts</id>
+ <phase>process-test-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <mkdir dir="${test.tmp.dir}/scripts/metastore" />
+ <copy todir="${test.tmp.dir}/scripts/metastore">
+ <fileset dir="${basedir}/${hive.path.to.root}/metastore/scripts/"/>
+ </copy>
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+
+</project>
diff --git a/itests/pom.xml b/itests/pom.xml
new file mode 100644
index 0000000..aa322bf
--- /dev/null
+++ b/itests/pom.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-it</artifactId>
+ <packaging>pom</packaging>
+ <name>Hive Integration - Parent</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <modules>
+ <module>custom-serde</module>
+ <module>hive-unit</module>
+ <module>hcatalog-unit</module>
+ <module>util</module>
+ <module>test-serde</module>
+ <module>qtest</module>
+ </modules>
+
+</project>
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
new file mode 100644
index 0000000..7dc9099
--- /dev/null
+++ b/itests/qtest/pom.xml
@@ -0,0 +1,431 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-it-qfile</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Integration - QFile Tests</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+
+ <qfile></qfile>
+ <qfile_regex></qfile_regex>
+ <run_disabled>false</run_disabled>
+ <clustermode></clustermode>
+ <execute.beeline.tests>false</execute.beeline.tests>
+ <minimr.query.files>list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q</minimr.query.files>
+ <minimr.query.negative.files>cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q</minimr.query.negative.files>
+ <beeline.positive.exclude>add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q</beeline.positive.exclude>
+ </properties>
+
+ <dependencies>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-ant</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-contrib</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it-custom-serde</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it-util</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <!-- replaces -Dclustermode=minimr which I am sure sure was used? -->
+ <id>minimr</id>
+ <properties>
+ <test.warehouse.scheme></test.warehouse.scheme>
+ </properties>
+ </profile>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <properties>
+ <active.hadoop.version>${hadoop-20S.version}</active.hadoop.version>
+ <test.dfs.mkdir>-mkdir</test.dfs.mkdir>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-tools</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <properties>
+ <active.hadoop.version>${hadoop-23.version}</active.hadoop.version>
+ <test.dfs.mkdir>-mkdir -p</test.dfs.mkdir>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-tests-sources</id>
+ <phase>generate-test-sources</phase>
+ <configuration>
+ <target>
+ <property name="test.classpath" refid="maven.test.classpath"/>
+ <taskdef resource="net/sf/antcontrib/antcontrib.properties"
+ classpathref="maven.plugin.classpath" />
+ <taskdef name="qtestgen" classname="org.apache.hadoop.hive.ant.QTestGenTask"
+ classpath="${test.classpath}" />
+ <mkdir dir="${project.build.directory}/qfile-results/clientpositive/" />
+ <mkdir dir="${project.build.directory}/qfile-results/clientnegative/" />
+ <mkdir dir="${project.build.directory}/qfile-results/positive/" />
+ <mkdir dir="${project.build.directory}/qfile-results/negative/" />
+ <mkdir dir="${project.build.directory}/qfile-results/hbase-handler/positive/" />
+ <mkdir dir="${project.build.directory}/qfile-results/hbase-handler/negative/" />
+ <mkdir dir="${project.build.directory}/qfile-results/hbase-handler/minimrpositive/" />
+
+ <mkdir dir="${project.build.directory}/qfile-results/contribpositive"/>
+ <mkdir dir="${project.build.directory}/qfile-results/contribnegative"/>
+ <mkdir dir="${project.build.directory}/qfile-results/contribclientpositive"/>
+ <mkdir dir="${project.build.directory}/qfile-results/contribclientnegative"/>
+
+
+ <!-- Parse -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/ql/parse/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestParse.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/positive/"
+ queryFile="${qfile}"
+ queryFileRegex="${qfile_regex}"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/compiler/" className="TestParse"
+ logFile="${project.build.directory}/testparsegen.log"
+ hadoopVersion="${active.hadoop.version}"
+ logDirectory="${project.build.directory}/qfile-results/positive/"/>
+
+ <!-- Negative Parse -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/ql/parse/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestParseNegative.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/negative/"
+ queryFile="${qfile}"
+ queryFileRegex="${qfile_regex}"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/compiler/errors/" className="TestParseNegative"
+ logFile="${project.build.directory}/testparseneggen.log"
+ hadoopVersion="${active.hadoop.version}"
+ logDirectory="${project.build.directory}/qfile-results/negative/"/>
+
+ <!-- Cli -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+ queryFile="${qfile}"
+ excludeQueryFile="${minimr.query.files}"
+ queryFileRegex="${qfile_regex}"
+ clusterMode="${clustermode}"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestCliDriver"
+ logFile="${project.build.directory}/testclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/clientpositive/"
+ hadoopVersion="${active.hadoop.version}"/>
+
+ <!-- Negative Cli -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestNegativeCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientnegative/"
+ queryFile="${qfile}"
+ excludeQueryFile="${minimr.query.negative.files}"
+ queryFileRegex="${qfile_regex}"
+ clusterMode="${clustermode}"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientnegative/" className="TestNegativeCliDriver"
+ logFile="${project.build.directory}/testnegativeclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/clientnegative/"
+ hadoopVersion="${active.hadoop.version}"/>
+
+ <!-- Minimr -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+ queryFile="${qfile}"
+ includeQueryFile="${minimr.query.files}"
+ queryFileRegex="${qfile_regex}"
+ clusterMode="miniMR"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestMinimrCliDriver"
+ logFile="${project.build.directory}/testminimrclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/clientpositive/"
+ hadoopVersion="${hadoopVersion}"
+ />
+
+ <!-- Negative Minimr -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestNegativeCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientnegative/"
+ queryFile="${qfile}"
+ includeQueryFile="${minimr.query.negative.files}"
+ queryFileRegex="${qfile_regex}"
+ clusterMode="miniMR"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientnegative/" className="TestNegativeMinimrCliDriver"
+ logFile="${project.build.directory}/testnegativeminimrclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/clientnegative/"
+ hadoopVersion="${hadoopVersion}"
+ />
+
+ <!-- HBase Positive -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/hbase-handler/src/test/templates/" template="TestHBaseCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/hbase-handler/src/test/queries/positive/"
+ queryFile="${qfile}"
+ runDisabled="${run_disabled}"
+ clusterMode="${clustermode}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/hbase-handler/src/test/results/positive/" className="TestHBaseCliDriver"
+ logFile="${project.build.directory}/testhbaseclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/hbase-handler/positive/"/>
+
+ <!-- HBase Minimr -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/hbase-handler/src/test/templates/" template="TestHBaseCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/hbase-handler/src/test/queries/positive/"
+ queryFile="hbase_bulk.m"
+ runDisabled="${run_disabled}"
+ clusterMode="miniMR"
+ resultsDirectory="${basedir}/${hive.path.to.root}/hbase-handler/src/test/results/positive/" className="TestHBaseMinimrCliDriver"
+ logFile="${project.build.directory}/testhbaseminimrclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/hbase-handler/minimrpositive/"/>
+
+ <!-- HBase Negative -->
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/hbase-handler/src/test/templates/" template="TestHBaseNegativeCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/hbase-handler/src/test/queries/negative/"
+ queryFile="${qfile}"
+ runDisabled="${run_disabled}"
+ clusterMode="${clustermode}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/hbase-handler/src/test/results/negative/" className="TestHBaseNegativeCliDriver"
+ logFile="${project.build.directory}/testhbasenegativeclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/hbase-handler/negative"/>
+
+
+ <!-- Beeline -->
+ <if>
+ <equals arg1="${execute.beeline.tests}" arg2="true" />
+ <then>
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hive/beeline/util/"
+ templatePath="${ql.test.template.dir}" template="TestBeeLineDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+ queryFile="${qfile}"
+ excludeQueryFile="${beeline.positive.exclude}"
+ queryFileRegex="${qfile_regex}"
+ clusterMode="${clustermode}"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestBeeLineDriver"
+ logFile="${project.build.directory}/testbeelinedrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/beelinepositive/"
+ hadoopVersion="${hadoopVersion}" />
+ </then>
+ </if>
+
+
+
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/contrib/src/test/queries/clientpositive"
+ queryFile="${qfile}"
+ queryFileRegex="${qfile_regex}"
+ runDisabled="${run_disabled}"
+ clusterMode="${clustermode}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/contrib/src/test/results/clientpositive/" className="TestContribCliDriver"
+ logFile="${project.build.directory}/testcontribclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/contribclientpositive"
+ hadoopVersion="${hadoopVersion}"
+ />
+
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestNegativeCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/contrib/src/test/queries/clientnegative"
+ queryFile="${qfile}"
+ queryFileRegex="${qfile_regex}"
+ runDisabled="${run_disabled}"
+ resultsDirectory="${basedir}/${hive.path.to.root}/contrib/src/test/results/clientnegative/" className="TestContribNegativeCliDriver"
+ logFile="${project.build.directory}/testcontribnegclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/contribclientnegative"/>
+
+
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>1.8</version>
+ <executions>
+ <execution>
+ <id>add-test-sources</id>
+ <phase>generate-test-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>target/generated-test-sources/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/itests/qtest/src/test/java/org/apache/hive/TestDummy.java b/itests/qtest/src/test/java/org/apache/hive/TestDummy.java
new file mode 100644
index 0000000..0f83345
--- /dev/null
+++ b/itests/qtest/src/test/java/org/apache/hive/TestDummy.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive;
+
+
+import org.junit.Test;
+
+
+public class TestDummy {
+
+ @Test
+ public void testDummy() throws Exception {
+
+ }
+}
diff --git a/itests/test-serde/pom.xml b/itests/test-serde/pom.xml
new file mode 100644
index 0000000..3f1f198
--- /dev/null
+++ b/itests/test-serde/pom.xml
@@ -0,0 +1,72 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-it-test-serde</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Integration - Test Serde</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+</project>
diff --git a/itests/util/pom.xml b/itests/util/pom.xml
new file mode 100644
index 0000000..b0d77b3
--- /dev/null
+++ b/itests/util/pom.xml
@@ -0,0 +1,130 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-it-util</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Integration - Testing Utilities</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-cli</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-hbase-handler</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <classifier>tests</classifier>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+
+</project>
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
new file mode 100644
index 0000000..57bd4a3
--- /dev/null
+++ b/jdbc/pom.xml
@@ -0,0 +1,107 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-jdbc</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive JDBC</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ </build>
+
+</project>
diff --git a/maven-delete-ant.sh b/maven-delete-ant.sh
new file mode 100644
index 0000000..127d6ae
--- /dev/null
+++ b/maven-delete-ant.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+function delete() {
+ rm -rf "$@"
+}
+delete build.properties
+delete build.xml
+delete build-common.xml
+delete build-offline.xml
+delete ant/build.xml
+delete beeline/build.xml
+delete cli/build.xml
+delete common/build.xml
+delete contrib/build.xml
+delete hbase-handler/build.xml
+delete hcatalog/build.xml
+delete hcatalog/core/build.xml
+delete hcatalog/hcatalog-pig-adapter/build.xml
+delete hcatalog/server-extensions/build.xml
+delete hcatalog/src/test/e2e/hcatalog/build.xml
+delete hcatalog/src/test/e2e/hcatalog/tools/generate/java/build.xml
+delete hcatalog/src/test/e2e/hcatalog/udfs/java/build.xml
+delete hcatalog/src/test/e2e/templeton/build.xml
+delete hcatalog/storage-handlers/hbase/build.xml
+delete hcatalog/webhcat/java-client/build.xml
+delete hcatalog/webhcat/svr/build.xml
+delete hwi/build.xml
+delete jdbc/build.xml
+delete metastore/build.xml
+delete odbc/build.xml
+delete ql/build.xml
+delete serde/build.xml
+delete service/build.xml
+delete shims/build.xml
+delete testutils/build.xml
+delete hcatalog/core/pom-old.xml
+delete hcatalog/hcatalog-pig-adapter/pom-old.xml
+delete hcatalog/pom-old.xml
+delete hcatalog/server-extensions/pom-old.xml
+delete hcatalog/storage-handlers/hbase/pom-old.xml
+delete hcatalog/webhcat/java-client/pom-old.xml
+delete hcatalog/webhcat/svr/pom-old.xml
+delete hcatalog/build-support/ant/build-command.xml
+delete hcatalog/build-support/ant/deploy.xml
+delete hcatalog/build-support/ant/test.xml
+delete ivy
+delete maven-rollback.sh
+delete maven-rollforward.sh
+delete maven-delete-ant.sh
diff --git a/maven-rollback.sh b/maven-rollback.sh
new file mode 100644
index 0000000..dbeb9f7
--- /dev/null
+++ b/maven-rollback.sh
@@ -0,0 +1,169 @@
+# rollback file, generated with:
+set -e
+move_source() {
+ source=$1
+ target=$2
+ mkdir -p $(dirname $target)
+ mv $source $target
+}
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java serde/src/test/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java
+move_source itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java service/src/test/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java
+move_source itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java service/src/test/org/apache/hive/service/auth/TestCustomAuthentication.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java metastore/src/test/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartition.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestEmbeddedHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestEmbeddedHiveMetaStore.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRawStoreTxn.java metastore/src/test/org/apache/hadoop/hive/metastore/TestRawStoreTxn.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java metastore/src/test/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteUGIHiveMetaStoreIpAddress.java metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteUGIHiveMetaStoreIpAddress.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java ql/src/test/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java ql/src/test/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java ql/src/test/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java ql/src/test/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
+move_source itests/hive-unit/src/test/java/org/apache/hive/service/server/TestHiveServer2Concurrency.java service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/BaseTestQueries.java ql/src/test/org/apache/hadoop/hive/ql/BaseTestQueries.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
+
+move_source hcatalog/hcatalog-pig-adapter/pom.xml hcatalog/hcatalog-pig-adapter/pom-new.xml
+move_source hcatalog/pom.xml hcatalog/pom-new.xml
+move_source hcatalog/storage-handlers/hbase/pom.xml hcatalog/storage-handlers/hbase/pom-new.xml
+move_source hcatalog/server-extensions/pom.xml hcatalog/server-extensions/pom-new.xml
+move_source hcatalog/core/pom.xml hcatalog/core/pom-new.xml
+move_source hcatalog/webhcat/java-client/pom.xml hcatalog/webhcat/java-client/pom-new.xml
+move_source hcatalog/webhcat/svr/pom.xml hcatalog/webhcat/svr/pom-new.xml
+
+move_source hcatalog/hcatalog-pig-adapter/pom-old.xml hcatalog/hcatalog-pig-adapter/pom.xml
+move_source hcatalog/pom-old.xml hcatalog/pom.xml
+move_source hcatalog/storage-handlers/hbase/pom-old.xml hcatalog/storage-handlers/hbase/pom.xml
+move_source hcatalog/server-extensions/pom-old.xml hcatalog/server-extensions/pom.xml
+move_source hcatalog/core/pom-old.xml hcatalog/core/pom.xml
+move_source hcatalog/webhcat/java-client/pom-old.xml hcatalog/webhcat/java-client/pom.xml
+move_source hcatalog/webhcat/svr/pom-old.xml hcatalog/webhcat/svr/pom.xml
+
+move_source data/conf/hive-site.xml data/conf/hive-site-new.xml
+move_source data/conf/hive-site-old.xml data/conf/hive-site.xml
+move_source data/conf/hive-log4j.properties data/conf/hive-log4j-new.properties
+move_source data/conf/hive-log4j-old.properties data/conf/hive-log4j.properties
+
+move_source shims/0.20/src/main/java shims/src/0.20/java
+move_source shims/0.20S/src/main/java shims/src/0.20S/java
+move_source shims/0.23/src/main/java shims/src/0.23/java
+move_source shims/common/src/main/java shims/src/common/java
+move_source shims/common-secure/src/main/java shims/src/common-secure/java
+
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
+
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
+move_source itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
+
+move_source itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java
+move_source itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftBinaryCLIService.java service/src/test/org/apache/hive/service/cli/thrift/TestThriftBinaryCLIService.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/service/TestHiveServer.java service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/scripts/extracturl.java ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java
+
+move_source itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java
+
+move_source beeline/src/main/resources/sql-keywords.properties beeline/src/java/org/apache/hive/beeline/sql-keywords.properties
+move_source beeline/src/main/resources/BeeLine.properties beeline/src/java/org/apache/hive/beeline/BeeLine.properties
+
+move_source ql/src/main/resources/hive-exec-log4j.properties ql/src/java/conf/hive-exec-log4j.properties
+move_source common/src/main/resources/hive-log4j.properties common/src/java/conf/hive-log4j.properties
+
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java ql/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
+move_source ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
+move_source ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
+move_source ql/src/java/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java ql/src/test/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java
+
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckTableAccessHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/CheckTableAccessHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyContentSummaryCacheHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyContentSummaryCacheHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifySessionStateLocalErrorsHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifySessionStateLocalErrorsHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifySessionStateStackTracesHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifySessionStateStackTracesHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/MapJoinCounterHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/MapJoinCounterHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckQueryPropertiesHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/CheckQueryPropertiesHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyOutputTableLocationSchemeIsFileHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyOutputTableLocationSchemeIsFileHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyIsLocalModeHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyIsLocalModeHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyCachingPrintStreamHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyCachingPrintStreamHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyNumReducersHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyNumReducersHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsNotSubdirectoryOfTableHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsNotSubdirectoryOfTableHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyOverriddenConfigsHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyOverriddenConfigsHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsSubdirectoryOfTableHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsSubdirectoryOfTableHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/OptrStatGroupByHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/OptrStatGroupByHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyTableDirectoryIsEmptyHook.java ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyTableDirectoryIsEmptyHook.java
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/stats/DummyStatsPublisher.java ql/src/test/org/apache/hadoop/hive/ql/stats/DummyStatsPublisher.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/stats/DummyStatsAggregator.java ql/src/test/org/apache/hadoop/hive/ql/stats/DummyStatsAggregator.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/stats/KeyVerifyingStatsAggregator.java ql/src/test/org/apache/hadoop/hive/ql/stats/KeyVerifyingStatsAggregator.java
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/io/udf/Rot13InputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13InputFormat.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
+
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java
+move_source itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java
+move_source itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
+
+move_source itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
+move_source itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
+
+move_source itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java
+move_source itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
+move_source itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
+move_source itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
+
+move_source itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
+
+move_source itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableUnionObjectInspector1.java ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableUnionObjectInspector1.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe1.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe3.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe4.java
+move_source itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe5.java
+
diff --git a/maven-rollforward.sh b/maven-rollforward.sh
new file mode 100644
index 0000000..e80a6c1
--- /dev/null
+++ b/maven-rollforward.sh
@@ -0,0 +1,169 @@
+set -e
+move_source() {
+ source=$1
+ target=$2
+ mkdir -p $(dirname $target)
+ mv $source $target
+}
+move_source serde/src/test/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java
+move_source serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java
+move_source service/src/test/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java
+move_source service/src/test/org/apache/hive/service/auth/TestCustomAuthentication.java itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartition.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestEmbeddedHiveMetaStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestEmbeddedHiveMetaStore.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestSetUGIOnBothClientServer.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyServer.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestSetUGIOnOnlyClient.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestRawStoreTxn.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRawStoreTxn.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreListenersError.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteUGIHiveMetaStoreIpAddress.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteUGIHiveMetaStoreIpAddress.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java
+move_source metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+move_source service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java itests/hive-unit/src/test/java/org/apache/hive/service/server/TestHiveServer2Concurrency.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/BaseTestQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/BaseTestQueries.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
+move_source hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
+move_source hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
+# move existing pom.xml to pom-old.xml
+move_source hcatalog/hcatalog-pig-adapter/pom.xml hcatalog/hcatalog-pig-adapter/pom-old.xml
+move_source hcatalog/pom.xml hcatalog/pom-old.xml
+move_source hcatalog/storage-handlers/hbase/pom.xml hcatalog/storage-handlers/hbase/pom-old.xml
+move_source hcatalog/server-extensions/pom.xml hcatalog/server-extensions/pom-old.xml
+move_source hcatalog/core/pom.xml hcatalog/core/pom-old.xml
+move_source hcatalog/webhcat/java-client/pom.xml hcatalog/webhcat/java-client/pom-old.xml
+move_source hcatalog/webhcat/svr/pom.xml hcatalog/webhcat/svr/pom-old.xml
+# move pom-new.xml to pom.xml
+move_source hcatalog/hcatalog-pig-adapter/pom-new.xml hcatalog/hcatalog-pig-adapter/pom.xml
+move_source hcatalog/pom-new.xml hcatalog/pom.xml
+move_source hcatalog/storage-handlers/hbase/pom-new.xml hcatalog/storage-handlers/hbase/pom.xml
+move_source hcatalog/server-extensions/pom-new.xml hcatalog/server-extensions/pom.xml
+move_source hcatalog/core/pom-new.xml hcatalog/core/pom.xml
+move_source hcatalog/webhcat/java-client/pom-new.xml hcatalog/webhcat/java-client/pom.xml
+move_source hcatalog/webhcat/svr/pom-new.xml hcatalog/webhcat/svr/pom.xml
+
+move_source data/conf/hive-site.xml data/conf/hive-site-old.xml
+move_source data/conf/hive-site-new.xml data/conf/hive-site.xml
+move_source data/conf/hive-log4j.properties data/conf/hive-log4j-old.properties
+move_source data/conf/hive-log4j-new.properties data/conf/hive-log4j.properties
+
+# eclipse doesn't like .. references in it's path to src
+move_source shims/src/0.20/java shims/0.20/src/main/java
+move_source shims/src/0.20S/java shims/0.20S/src/main/java
+move_source shims/src/0.23/java shims/0.23/src/main/java
+move_source shims/src/common/java shims/common/src/main/java
+move_source shims/src/common-secure/java shims/common-secure/src/main/java
+# cyclic deps
+move_source shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
+move_source shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
+move_source shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
+
+move_source jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
+move_source jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+
+move_source service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java
+move_source service/src/test/org/apache/hive/service/cli/thrift/TestThriftBinaryCLIService.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftBinaryCLIService.java
+move_source service/src/test/org/apache/hadoop/hive/service/TestHiveServer.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/service/TestHiveServer.java
+
+move_source ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java itests/util/src/main/java/org/apache/hadoop/hive/scripts/extracturl.java
+
+move_source beeline/src/test/org/apache/hive/beeline/src/test/TestSchemaTool.java itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
+
+move_source beeline/src/java/org/apache/hive/beeline/sql-keywords.properties beeline/src/main/resources/sql-keywords.properties
+move_source beeline/src/java/org/apache/hive/beeline/BeeLine.properties beeline/src/main/resources/BeeLine.properties
+
+move_source ql/src/java/conf/hive-exec-log4j.properties ql/src/main/resources/hive-exec-log4j.properties
+move_source common/src/java/conf/hive-log4j.properties common/src/main/resources/hive-log4j.properties
+
+move_source ql/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java ql/src/java/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyAuthenticator.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java itests/util/src/main/java/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java
+
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/CheckTableAccessHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckTableAccessHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyContentSummaryCacheHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyContentSummaryCacheHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifySessionStateLocalErrorsHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifySessionStateLocalErrorsHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifySessionStateStackTracesHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifySessionStateStackTracesHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/MapJoinCounterHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/MapJoinCounterHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/CheckQueryPropertiesHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckQueryPropertiesHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyOutputTableLocationSchemeIsFileHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyOutputTableLocationSchemeIsFileHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyIsLocalModeHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyIsLocalModeHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyCachingPrintStreamHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyCachingPrintStreamHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyNumReducersHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyNumReducersHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsNotSubdirectoryOfTableHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsNotSubdirectoryOfTableHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyOverriddenConfigsHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyOverriddenConfigsHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsSubdirectoryOfTableHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyPartitionIsSubdirectoryOfTableHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/OptrStatGroupByHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/OptrStatGroupByHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyTableDirectoryIsEmptyHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyTableDirectoryIsEmptyHook.java
+
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
+
+move_source ql/src/test/org/apache/hadoop/hive/ql/stats/DummyStatsPublisher.java itests/util/src/main/java/org/apache/hadoop/hive/ql/stats/DummyStatsPublisher.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/stats/DummyStatsAggregator.java itests/util/src/main/java/org/apache/hadoop/hive/ql/stats/DummyStatsAggregator.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/stats/KeyVerifyingStatsAggregator.java itests/util/src/main/java/org/apache/hadoop/hive/ql/stats/KeyVerifyingStatsAggregator.java
+
+move_source ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13InputFormat.java itests/util/src/main/java/org/apache/hadoop/hive/ql/io/udf/Rot13InputFormat.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java itests/util/src/main/java/org/apache/hadoop/hive/ql/io/udf/Rot13OutputFormat.java
+
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
+
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
+
+
+move_source ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java
+move_source ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
+
+move_source hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
+move_source hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
+
+move_source hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java
+move_source hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
+move_source hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
+move_source hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
+
+move_source hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
+
+move_source ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableUnionObjectInspector1.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableUnionObjectInspector1.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe1.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe3.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe4.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java
+move_source ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe5.java itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java
+
diff --git a/metastore/pom.xml b/metastore/pom.xml
new file mode 100644
index 0000000..c550ff1
--- /dev/null
+++ b/metastore/pom.xml
@@ -0,0 +1,235 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-metastore</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Metastore</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>com.jolbox</groupId>
+ <artifactId>bonecp</artifactId>
+ <version>${bonecp.version}</version>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>${commons-cli.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <!-- XXX we probably shouldn't be shipping this but the tests depend on it -->
+ <dependency>
+ <groupId>org.apache.derby</groupId>
+ <artifactId>derby</artifactId>
+ <version>${derby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-api-jdo</artifactId>
+ <version>${datanucleus-api-jdo.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-core</artifactId>
+ <version>${datanucleus-core.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-rdbms</artifactId>
+ <version>${datanucleus-rdbms.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.jdo</groupId>
+ <artifactId>jdo-api</artifactId>
+ <version>${jdo-api.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr-runtime</artifactId>
+ <version>${antlr.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libfb303</artifactId>
+ <version>${libfb303.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <resources>
+ <resource>
+ <directory>${basedir}/src/model</directory>
+ <includes>
+ <include>package.jdo</include>
+ </includes>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr3-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>antlr</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/model</source>
+ <source>src/gen/thrift/gen-javabean</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-maven-plugin</artifactId>
+ <configuration>
+ <api>JDO</api>
+ <verbose>true</verbose>
+ <metadataIncludes>**/*.jdo</metadataIncludes>
+ </configuration>
+ <executions>
+ <execution>
+ <phase>process-classes</phase>
+ <goals>
+ <goal>enhance</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 5bb21c7..bba311b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -334,9 +334,7 @@ private boolean init() throws MetaException {
} catch (Exception e) {
// log exception, but ignore inability to start
LOG.error("error in Metrics init: " + e.getClass().getName() + " "
- + e.getMessage());
- MetaStoreUtils.printStackTrace(e);
-
+ + e.getMessage(), e);
}
}
@@ -466,8 +464,7 @@ public String startFunction(String function, String extraLogInfo) {
Metrics.startScope(function);
} catch (IOException e) {
LOG.debug("Exception when starting metrics scope"
- + e.getClass().getName() + " " + e.getMessage());
- MetaStoreUtils.printStackTrace(e);
+ + e.getClass().getName() + " " + e.getMessage(), e);
}
return function;
}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 6d477eb..ffcdc02 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -76,21 +76,6 @@ public class MetaStoreUtils {
public static final String DATABASE_WAREHOUSE_SUFFIX = ".db";
- /**
- * printStackTrace
- *
- * Helper function to print an exception stack trace to the log and not stderr
- *
- * @param e
- * the exception
- *
- */
- static public void printStackTrace(Exception e) {
- for (StackTraceElement s : e.getStackTrace()) {
- LOG.error(s);
- }
- }
-
public static Table createColumnsetSchema(String name, List<String> columns,
List<String> partCols, Configuration conf) throws MetaException {
@@ -175,14 +160,17 @@ static public void recursiveDelete(File f) throws IOException {
static public Deserializer getDeserializer(Configuration conf,
Properties schema) throws MetaException {
try {
- Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(
- schema.getProperty(serdeConstants.SERIALIZATION_LIB)).asSubclass(Deserializer.class), conf);
+ String clazzName = schema.getProperty(serdeConstants.SERIALIZATION_LIB);
+ if(clazzName == null) {
+ throw new IllegalStateException("Property " + serdeConstants.SERIALIZATION_LIB + " cannot be null");
+ }
+ Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(clazzName)
+ .asSubclass(Deserializer.class), conf);
deserializer.initialize(conf, schema);
return deserializer;
} catch (Exception e) {
LOG.error("error in initSerDe: " + e.getClass().getName() + " "
- + e.getMessage());
- MetaStoreUtils.printStackTrace(e);
+ + e.getMessage(), e);
throw new MetaException(e.getClass().getName() + " " + e.getMessage());
}
}
@@ -221,8 +209,7 @@ static public Deserializer getDeserializer(Configuration conf,
throw e;
} catch (Exception e) {
LOG.error("error in initSerDe: " + e.getClass().getName() + " "
- + e.getMessage());
- MetaStoreUtils.printStackTrace(e);
+ + e.getMessage(), e);
throw new MetaException(e.getClass().getName() + " " + e.getMessage());
}
}
@@ -258,8 +245,7 @@ static public Deserializer getDeserializer(Configuration conf,
throw e;
} catch (Exception e) {
LOG.error("error in initSerDe: " + e.getClass().getName() + " "
- + e.getMessage());
- MetaStoreUtils.printStackTrace(e);
+ + e.getMessage(), e);
throw new MetaException(e.getClass().getName() + " " + e.getMessage());
}
}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 010cbd4..93535d4 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -1732,7 +1732,7 @@ private List<MPartition> listMPartitions(String dbName, String tableName,
LOG.debug("Done executing query for listMPartitions");
pm.retrieveAll(mparts);
success = commitTransaction();
- LOG.debug("Done retrieving all objects for listMPartitions");
+ LOG.debug("Done retrieving all objects for listMPartitions " + mparts);
} finally {
if (!success) {
rollbackTransaction();
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java b/metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java
index 3e48a42..7576f39 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestMarkPartitionRemote.java
@@ -19,15 +19,21 @@
package org.apache.hadoop.hive.metastore;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-public class TestMarkPartitionRemote extends TestMarkPartition{
+public class TestMarkPartitionRemote extends TestMarkPartition {
private static class RunMS implements Runnable {
+ private final int port;
+
+ public RunMS(int port) {
+ this.port = port;
+ }
@Override
public void run() {
try {
- HiveMetaStore.main(new String[] { "29111" });
+ HiveMetaStore.main(new String[] { String.valueOf(port) });
} catch (Throwable e) {
e.printStackTrace(System.err);
assert false;
@@ -38,10 +44,11 @@ public void run() {
@Override
protected void setUp() throws Exception {
super.setUp();
- Thread t = new Thread(new RunMS());
+ int port = MetaStoreUtils.findFreePort();
+ Thread t = new Thread(new RunMS(port));
t.setDaemon(true);
t.start();
- hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:29111");
+ hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
Thread.sleep(30000);
}
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java b/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
index 967c40d..6206c5c 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
@@ -35,7 +35,7 @@ public class TestMetastoreVersion extends TestCase {
protected HiveConf hiveConf;
private Driver driver;
- private String hiveHome;
+ private String metaStoreRoot;
private String testMetastoreDB;
Random randomNum = new Random();
@@ -54,7 +54,7 @@ protected void setUp() throws Exception {
File.separator + "test_metastore-" + randomNum.nextInt();
System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
"jdbc:derby:" + testMetastoreDB + ";create=true");
- hiveHome = System.getProperty("hive.home");
+ metaStoreRoot = System.getProperty("test.tmp.dir");
}
@Override
@@ -153,13 +153,13 @@ public void testVersionMisMatch () throws Exception {
// write the given version to metastore
private String getVersion(HiveConf conf) throws HiveMetaException {
- MetaStoreSchemaInfo schemInfo = new MetaStoreSchemaInfo(hiveHome, conf, "derby");
+ MetaStoreSchemaInfo schemInfo = new MetaStoreSchemaInfo(metaStoreRoot, conf, "derby");
return getMetaStoreVersion();
}
// write the given version to metastore
private void setVersion(HiveConf conf, String version) throws HiveMetaException {
- MetaStoreSchemaInfo schemInfo = new MetaStoreSchemaInfo(hiveHome, conf, "derby");
+ MetaStoreSchemaInfo schemInfo = new MetaStoreSchemaInfo(metaStoreRoot, conf, "derby");
setMetaStoreVersion(version, "setVersion test");
}
diff --git a/odbc/Makefile b/odbc/Makefile
index 2c55903..f562705 100644
--- a/odbc/Makefile
+++ b/odbc/Makefile
@@ -53,7 +53,7 @@
LIBTOOL = $(SHELL) /usr/bin/libtool
LINK = ln -sf
-BUILD_DIR = $(HIVE_ROOT)/build
+BUILD_DIR = $(BASE_DIR)/target
ODBC_BUILD_DIR = $(BUILD_DIR)/odbc
OBJ_SERVICE_BUILD_DIR = $(BUILD_DIR)/service/objs
OBJ_QL_BUILD_DIR = $(BUILD_DIR)/ql/objs
diff --git a/odbc/pom.xml b/odbc/pom.xml
new file mode 100644
index 0000000..0ddf12d
--- /dev/null
+++ b/odbc/pom.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-odbc</artifactId>
+ <packaging>pom</packaging>
+ <name>Hive ODBC</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ <make.cmd>make</make.cmd>
+ </properties>
+
+ <!-- odbc code depends on code generated in these modules -->
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+
+ <profiles>
+ <profile>
+ <id>odbc</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>compile-odbc</id>
+ <phase>compile</phase>
+ <configuration>
+ <target>
+ <condition property="word.size" value="64" else="32">
+ <contains string="${os.arch}" substring="64"/>
+ </condition>
+ <exec executable="${make.cmd}" failonerror="true">
+ <arg value="clean"/>
+ <env key="HIVE_ROOT" value="${basedir}/${hive.path.to.root}/"/>
+ <env key="BASE_DIR" value="${basedir}"/>
+ </exec>
+ <exec executable="${make.cmd}" failonerror="true">
+ <env key="HIVE_ROOT" value="${basedir}/${hive.path.to.root}/"/>
+ <env key="BASE_DIR" value="${basedir}"/>
+ <env key="WORD_SIZE" value="${word.size}"/>
+ <env key="THRIFT_HOME" value="${thrift.home}"/>
+ <env key="BOOST_HOME" value="${boost.home}"/>
+ </exec>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ <execution>
+ <id>test-odbc</id>
+ <phase>test</phase>
+ <configuration>
+ <target>
+ <condition property="word.size" value="64" else="32">
+ <contains string="${os.arch}" substring="64"/>
+ </condition>
+ <exec executable="${make.cmd}" failonerror="true">
+ <arg value="test"/>
+ <env key="HIVE_ROOT" value="${basedir}/${hive.path.to.root}/"/>
+ <env key="BASE_DIR" value="${basedir}"/>
+ <env key="WORD_SIZE" value="${word.size}"/>
+ <env key="THRIFT_HOME" value="${thrift.home}"/>
+ </exec>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>enforce-property</id>
+ <goals>
+ <goal>enforce</goal>
+ </goals>
+ <configuration>
+ <rules>
+ <requireProperty>
+ <property>boost.home</property>
+ </requireProperty>
+ <requireProperty>
+ <property>thrift.home</property>
+ </requireProperty>
+ </rules>
+ <fail>true</fail>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+</project>
diff --git a/packaging/pom.xml b/packaging/pom.xml
new file mode 100644
index 0000000..973b351
--- /dev/null
+++ b/packaging/pom.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-packaging</artifactId>
+ <packaging>pom</packaging>
+ <name>Hive Packaging</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <modules>
+ </modules>
+
+</project>
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..0f115c4
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,457 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <packaging>pom</packaging>
+
+ <name>Hive</name>
+ <url>http://hive.apache.org</url>
+ <prerequisites>
+ <maven>2.2.1</maven>
+ </prerequisites>
+
+ <modules>
+ <module>ant</module>
+ <module>beeline</module>
+ <module>cli</module>
+ <module>common</module>
+ <module>contrib</module>
+ <module>hbase-handler</module>
+ <module>hcatalog</module>
+ <module>hwi</module>
+ <module>jdbc</module>
+ <module>metastore</module>
+ <module>odbc</module>
+ <module>ql</module>
+ <module>serde</module>
+ <module>service</module>
+ <module>shims</module>
+ <module>testutils</module>
+ <module>packaging</module>
+ </modules>
+
+ <properties>
+ <hive.version.shortname>0.13.0</hive.version.shortname>
+
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <maven.repo.local>${user.home}/.m2/repository</maven.repo.local>
+ <hive.path.to.root>.</hive.path.to.root>
+ <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
+ <test.warehouse.dir>${project.build.directory}/warehouse</test.warehouse.dir>
+ <test.warehouse.scheme>pfile://</test.warehouse.scheme>
+
+ <!-- the versions of libraries that we use -->
+ <activemq.version>5.5.0</activemq.version>
+ <ant.version>1.9.1</ant.version>
+ <antlr.version>3.4</antlr.version>
+ <avro.version>1.7.1</avro.version>
+ <bonecp.version>0.7.1.RELEASE</bonecp.version>
+ <datanucleus-api-jdo.version>3.2.1</datanucleus-api-jdo.version>
+ <datanucleus-core.version>3.2.2</datanucleus-core.version>
+ <datanucleus-rdbms.version>3.2.1</datanucleus-rdbms.version>
+ <checkstyle.version>5.0</checkstyle.version>
+ <findbugs.version>1.3.9</findbugs.version>
+ <commons-cli.version>1.2</commons-cli.version>
+ <commons-codec.version>1.4</commons-codec.version>
+ <commons-collections.version>3.2.1</commons-collections.version>
+ <commons-compress.version>1.4.1</commons-compress.version>
+ <commons-configuration.version>1.6</commons-configuration.version>
+ <commons-exec.version>1.1</commons-exec.version>
+ <commons-httpclient.version>3.0.1</commons-httpclient.version>
+ <commons-io.version>2.4</commons-io.version>
+ <commons-lang.version>2.4</commons-lang.version>
+ <commons-logging.version>1.0.4</commons-logging.version>
+ <commons-pool.version>1.5.4</commons-pool.version>
+ <derby.version>10.4.2.0</derby.version>
+ <guava.version>11.0.2</guava.version>
+ <groovy.version>2.1.6</groovy.version>
+ <hadoop-20.version>0.20.2</hadoop-20.version>
+ <hadoop-20S.version>1.1.2</hadoop-20S.version>
+ <hadoop-23.version>2.0.5-alpha</hadoop-23.version>
+ <hbase.version>0.94.6.1</hbase.version>
+ <jackson.version>1.9.2</jackson.version>
+ <javaewah.version>0.3.2</javaewah.version>
+ <javolution.version>5.5.1</javolution.version>
+ <jersey.version>1.14</jersey.version>
+ <jetty.webhcat.version>7.6.0.v20120127</jetty.webhcat.version>
+ <jdo-api.version>3.0.1</jdo-api.version>
+ <jdom.version>1.1</jdom.version>
+ <jetty.version>6.1.26</jetty.version>
+ <jline.version>0.9.94</jline.version>
+ <jms.version>1.1</jms.version>
+ <json.version>20090211</json.version>
+ <junit.version>4.10</junit.version>
+ <kryo.version>2.22</kryo.version>
+ <libfb303.version>0.9.0</libfb303.version>
+ <libthrift.version>0.9.0</libthrift.version>
+ <log4j.version>1.2.16</log4j.version>
+ <maven-javadoc-plugin.version>2.4</maven-javadoc-plugin.version>
+ <mockito-all.version>1.8.2</mockito-all.version>
+ <mina.version>2.0.0-M5</mina.version>
+ <pig.version>0.10.1</pig.version>
+ <protobuf.version>2.5.0</protobuf.version>
+ <rat.version>0.8</rat.version>
+ <slf4j.version>1.6.1</slf4j.version>
+ <ST4.version>4.0.4</ST4.version>
+ <tempus-fugit.version>1.1</tempus-fugit.version>
+ <snappy.version>0.2</snappy.version>
+ <wadl-resourcedoc-doclet.version>1.4</wadl-resourcedoc-doclet.version>
+ <velocity.version>1.5</velocity.version>
+ <xerces.version>2.9.1</xerces.version>
+ <zookeeper.version>3.4.3</zookeeper.version>
+ </properties>
+
+ <repositories>
+ <repository>
+ <id>datanucleus</id>
+ <name>datanucleus maven repository</name>
+ <url>http://www.datanucleus.org/downloads/maven2</url>
+ <layout>default</layout>
+ <releases>
+ <enabled>true</enabled>
+ <checksumPolicy>warn</checksumPolicy>
+ </releases>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ <repository>
+ <id>glassfish-repository</id>
+ <url>http://maven.glassfish.org/content/groups/glassfish</url>
+ <releases>
+ <enabled>false</enabled>
+ </releases>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ <repository>
+ <id>glassfish-repo-archive</id>
+ <url>http://maven.glassfish.org/content/groups/glassfish</url>
+ <releases>
+ <enabled>false</enabled>
+ </releases>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ <repository>
+ <id>sonatype-snapshot</id>
+ <url>https://oss.sonatype.org/content/repositories/snapshots</url>
+ <releases>
+ <enabled>false</enabled>
+ </releases>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ </repositories>
+
+ <dependencies>
+ <!-- global dependencies -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>${slf4j.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <version>${slf4j.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr3-maven-plugin</artifactId>
+ <version>${antlr.version}</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <version>1.7</version>
+ <dependencies>
+ <dependency>
+ <groupId>ant-contrib</groupId>
+ <artifactId>ant-contrib</artifactId>
+ <version>1.0b3</version>
+ <exclusions>
+ <exclusion>
+ <groupId>ant</groupId>
+ <artifactId>ant</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ </dependencies>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.4</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <version>1.3.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-install-plugin</artifactId>
+ <version>2.4</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>2.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>2.16</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-war-plugin</artifactId>
+ <version>2.4</version>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>1.8</version>
+ </plugin>
+ <plugin>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-maven-plugin</artifactId>
+ <version>3.3.0-release</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-core</artifactId>
+ <version>${datanucleus-core.version}</version>
+ </dependency>
+ </dependencies>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>define-classpath</id>
+ <phase>process-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <exportAntProperties>true</exportAntProperties>
+ <target>
+ <property name="maven.test.classpath" refid="maven.test.classpath"/>
+ </target>
+ </configuration>
+ </execution>
+ <execution>
+ <id>setup-test-dirs</id>
+ <phase>process-test-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <delete dir="${test.tmp.dir}" />
+ <delete dir="${test.warehouse.dir}" />
+ <mkdir dir="${test.tmp.dir}" />
+ <mkdir dir="${test.warehouse.dir}" />
+ <mkdir dir="${test.tmp.dir}/conf" />
+ <!-- copies hive-site.xml so it can be modified -->
+ <copy todir="${test.tmp.dir}/conf/">
+ <fileset dir="${basedir}/${hive.path.to.root}/data/conf/"/>
+ </copy>
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-clean-plugin</artifactId>
+ <version>2.5</version>
+ <configuration>
+ <filesets>
+ <fileset>
+ <directory>./</directory>
+ <includes>
+ <include>datanucleus.log</include>
+ <include>derby.log</include>
+ </includes>
+ <followSymlinks>false</followSymlinks>
+ </fileset>
+ <fileset>
+ <directory>build</directory>
+ <followSymlinks>false</followSymlinks>
+ </fileset>
+ </filesets>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <excludes>
+ <exclude>**/TestSerDe.java</exclude>
+ <exclude>**/TestHiveMetaStore.java</exclude>
+ <exclude>**/ql/exec/vector/util/*.java</exclude>
+ <exclude>**/ql/exec/vector/udf/legacy/*.java</exclude>
+ <exclude>**/ql/exec/vector/udf/generic/*.java</exclude>
+ <exclude>**/TestHiveServer2Concurrency.java</exclude>
+ <exclude>**/TestHiveMetaStore.java</exclude>
+ </excludes>
+ <redirectTestOutputToFile>true</redirectTestOutputToFile>
+ <reuseForks>false</reuseForks>
+ <failIfNoTests>false</failIfNoTests>
+ <argLine></argLine>
+ <additionalClasspathElements>
+ <additionalClasspathElement>${test.tmp.dir}/conf</additionalClasspathElement>
+ <additionalClasspathElement>${basedir}/${hive.path.to.root}/conf</additionalClasspathElement>
+ </additionalClasspathElements>
+ <environmentVariables>
+ <TZ>US/Pacific</TZ>
+ <LANG>en_US.UTF-8</LANG>
+ <HADOOP_CLASSPATH>${test.tmp.dir}/conf:${basedir}/${hive.path.to.root}/conf</HADOOP_CLASSPATH>
+ <HIVE_HADOOP_TEST_CLASSPATH>${maven.test.classpath}</HIVE_HADOOP_TEST_CLASSPATH>
+ </environmentVariables>
+ <systemPropertyVariables>
+ <build.dir>${project.build.directory}</build.dir>
+ <!-- required by zk test ClientBase -->
+ <build.test.dir>${test.tmp.dir}</build.test.dir>
+ <!-- required by a few tests to find the derby jar -->
+ <derby.version>${derby.version}</derby.version>
+ <derby.stream.error.file>${test.tmp.dir}/derby.log</derby.stream.error.file>
+ <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
+ <!-- required by Hadoop's JobHistory -->
+ <hadoop.log.dir>${test.tmp.dir}</hadoop.log.dir>
+ <hive.root>${basedir}/${hive.path.to.root}/</hive.root>
+ <hive.version>${project.version}</hive.version>
+ <!-- required for hive-exec jar path and tests which reference a jar -->
+ <maven.local.repository>${maven.repo.local}</maven.local.repository>
+ <mapred.job.tracker>local</mapred.job.tracker>
+ <log4j.configuration>file://${test.tmp.dir}/conf/hive-log4j.properties</log4j.configuration>
+ <log4j.debug>true</log4j.debug>
+ <!-- don't diry up /tmp -->
+ <java.io.tmpdir>${test.tmp.dir}</java.io.tmpdir>
+ <!-- Hadoop's minidfs class uses this -->
+ <test.build.data>${test.tmp.dir}</test.build.data>
+ <!-- required by QTestUtil -->
+ <test.data.files>${basedir}/${hive.path.to.root}/data/files</test.data.files>
+ <test.data.dir>${basedir}/${hive.path.to.root}/data/files</test.data.dir>
+ <test.tmp.dir>${test.tmp.dir}</test.tmp.dir>
+ <test.dfs.mkdir>${test.dfs.mkdir}</test.dfs.mkdir>
+ <test.output.overwrite>${test.output.overwrite}</test.output.overwrite>
+ <test.warehouse.dir>${test.warehouse.scheme}${test.warehouse.dir}</test.warehouse.dir>
+ <!-- EnforceReadOnlyTables hook and QTestUtil -->
+ <test.src.tables>src,src1,srcbucket,srcbucket2,src_json,src_thrift,src_sequencefile,srcpart,alltypesorc</test.src.tables>
+ </systemPropertyVariables>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>thriftif</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-thrift-sources</id>
+ <phase>generate-sources</phase>
+ <configuration>
+ <target>
+ <taskdef name="for" classname="net.sf.antcontrib.logic.ForTask"
+ classpathref="maven.plugin.classpath" />
+ <property name="thrift.args" value="-I ${thrift.home} --gen java:beans,hashcode --gen cpp --gen php --gen py --gen rb"/>
+ <property name="thrift.gen.dir" value="${basedir}/src/gen/thrift"/>
+ <delete dir="${thrift.gen.dir}"/>
+ <mkdir dir="${thrift.gen.dir}"/>
+ <for param="thrift.file">
+ <path>
+ <fileset dir="." includes="if/*.thrift,if/test/*.thrift" />
+ </path>
+ <sequential>
+ <echo message="Generating Thrift code for @{thrift.file}"/>
+ <exec executable="${thrift.home}/bin/thrift" failonerror="true" dir=".">
+ <arg line="${thrift.args} -I ${basedir}/include -I ${basedir}/.. -o ${thrift.gen.dir} @{thrift.file} " />
+ </exec>
+ </sequential>
+ </for>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>enforce-property</id>
+ <goals>
+ <goal>enforce</goal>
+ </goals>
+ <configuration>
+ <rules>
+ <requireProperty>
+ <property>thrift.home</property>
+ </requireProperty>
+ </rules>
+ <fail>true</fail>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+</project>
diff --git a/ql/.gitignore b/ql/.gitignore
new file mode 100644
index 0000000..916e17c
--- /dev/null
+++ b/ql/.gitignore
@@ -0,0 +1 @@
+dependency-reduced-pom.xml
diff --git a/ql/pom.xml b/ql/pom.xml
new file mode 100644
index 0000000..a2ad854
--- /dev/null
+++ b/ql/pom.xml
@@ -0,0 +1,386 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-exec</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Query Language</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-ant</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>com.esotericsoftware.kryo</groupId>
+ <artifactId>kryo</artifactId>
+ <version>${kryo.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>${commons-codec.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
+ <version>${commons-collections.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-configuration</groupId>
+ <artifactId>commons-configuration</artifactId>
+ <version>${commons-configuration.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>${commons-io.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>javolution</groupId>
+ <artifactId>javolution</artifactId>
+ <version>${javolution.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>${log4j.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr-runtime</artifactId>
+ <version>${antlr.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.antlr</groupId>
+ <artifactId>ST4</artifactId>
+ <version>${ST4.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.avro</groupId>
+ <artifactId>avro-mapred</artifactId>
+ <version>${avro.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.ant</groupId>
+ <artifactId>ant</artifactId>
+ <version>${ant.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libfb303</artifactId>
+ <version>${libfb303.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>${zookeeper.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.groovy</groupId>
+ <artifactId>groovy-all</artifactId>
+ <version>${groovy.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-core</artifactId>
+ <version>${datanucleus-core.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ <version>${protobuf.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.googlecode.javaewah</groupId>
+ <artifactId>JavaEWAH</artifactId>
+ <version>${javaewah.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.iq80.snappy</groupId>
+ <artifactId>snappy</artifactId>
+ <version>${snappy.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>${json.version}</version>
+ </dependency>
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-testutils</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase</artifactId>
+ <version>${hbase.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.mina</groupId>
+ <artifactId>mina-core</artifactId>
+ <version>${mina.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>protobuf</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-protobuf-sources</id>
+ <phase>generate-sources</phase>
+ <configuration>
+ <target>
+ <property name="protobuf.src.dir" location="${basedir}/src/protobuf"/>
+ <property name="protobuf.build.dir" location="${basedir}/src/gen/protobuf/gen-java"/>
+ <echo>Building ORC Protobuf</echo>
+ <mkdir dir="${protobuf.build.dir}"/>
+ <exec executable="protoc" failonerror="true">
+ <arg value="--java_out=${protobuf.build.dir}"/>
+ <arg value="-I=${protobuf.src.dir}/org/apache/hadoop/hive/ql/io/orc"/>
+ <arg value="${protobuf.src.dir}/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto"/>
+ </exec>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <plugins>
+ <plugin>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr3-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>antlr</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <includes>
+ <include>**/HiveLexer.g</include>
+ <include>**/HiveParser.g</include>
+ </includes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-sources</id>
+ <phase>generate-sources</phase>
+ <configuration>
+ <target>
+ <property name="compile.classpath" refid="maven.compile.classpath"/>
+ <taskdef name="vectorcodegen" classname="org.apache.hadoop.hive.ant.GenVectorCode"
+ classpath="${compile.classpath}"/>
+ <mkdir dir="${project.build.directory}/generated-sources/java/org/apache/hadoop/hive/ql/exec/vector/expressions/gen/"/>
+ <mkdir dir="${project.build.directory}/generated-sources/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/"/>
+ <mkdir dir="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/ql/exec/vector/expressions/gen/"/>
+ <vectorcodegen templateBaseDir="${basedir}/src/gen/vectorization/" buildDir="${project.build.directory}" />
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>build-exec-bundle</id>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <artifactSet>
+ <includes>
+ <!-- order is meant to be the same as the ant build -->
+ <include>org.apache.hive:hive-common</include>
+ <include>org.apache.hive:hive-exec</include>
+ <include>org.apache.hive:hive-serde</include>
+ <include>com.esotericsoftware.kryo:kryo</include>
+ <include>org.apache.thrift:libthrift</include>
+ <include>commons-lang:commons-lang</include>
+ <include>org.json:json</include>
+ <include>org.apache.avro:arvro-mapred</include>
+ <include>org.apache.hive:hive-shims:*:uberjar</include>
+ <include>com.googlecode.javaewah:JavaEWAH</include>
+ <include>javolution:javolution</include>
+ <include>com.google.protobuf:protobuf-java</include>
+ <include>org.iq80.snappy:snappy</include>
+ <include>org.codehaus.jackson:jackson-core-asl</include>
+ <include>org.codehaus.jackson:jackson-mapper-asl</include>
+ <include>com.google.guava:guava</include>
+ </includes>
+ </artifactSet>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/gen/protobuf/gen-java</source>
+ <source>src/gen/thrift/gen-javabean</source>
+ <source>${project.build.directory}/generated-sources/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>add-test-sources</id>
+ <phase>generate-test-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.build.directory}/generated-test-sources/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/ql/src/java/conf/hive-exec-log4j.properties b/ql/src/java/conf/hive-exec-log4j.properties
index 7121379..74d62d5 100644
--- a/ql/src/java/conf/hive-exec-log4j.properties
+++ b/ql/src/java/conf/hive-exec-log4j.properties
@@ -17,7 +17,8 @@
# Define some default values that can be overridden by system properties
hive.log.threshold=ALL
hive.root.logger=INFO,FA
-hive.log.dir=/tmp/${user.name}
+hive.log.dir=${java.io.tmpdir}/${user.name}
+hive.query.id=hadoop
hive.log.file=${hive.query.id}.log
# Define the root logger to the system property "hadoop.root.logger".
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index bcf3795..c09ffde 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -140,55 +140,45 @@ public class Driver implements CommandProcessor {
private String userName;
- private boolean checkLockManager() {
+ private boolean checkConcurrency() throws SemanticException {
boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
if (!supportConcurrency) {
+ LOG.info("Concurrency mode is disabled, not creating a lock manager");
return false;
}
- if ((hiveLockMgr == null)) {
- try {
- setLockManager();
- } catch (SemanticException e) {
- errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
- SQLState = ErrorMsg.findSQLState(e.getMessage());
- downstreamError = e;
- console.printError(errorMessage, "\n"
- + org.apache.hadoop.util.StringUtils.stringifyException(e));
- return false;
- }
- }
+ createLockManager();
// the reason that we set the lock manager for the cxt here is because each
// query has its own ctx object. The hiveLockMgr is shared accross the
// same instance of Driver, which can run multiple queries.
ctx.setHiveLockMgr(hiveLockMgr);
- return hiveLockMgr != null;
+ return true;
}
- private void setLockManager() throws SemanticException {
- boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
- if (supportConcurrency) {
- String lockMgr = conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
- if ((lockMgr == null) || (lockMgr.isEmpty())) {
- throw new SemanticException(ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
- }
-
- try {
- hiveLockMgr = (HiveLockManager) ReflectionUtils.newInstance(conf.getClassByName(lockMgr),
- conf);
- hiveLockMgr.setContext(new HiveLockManagerCtx(conf));
- } catch (Exception e) {
- // set hiveLockMgr to null just in case this invalid manager got set to
- // next query's ctx.
- if (hiveLockMgr != null) {
- try {
- hiveLockMgr.close();
- } catch (LockException e1) {
- //nothing can do here
- }
- hiveLockMgr = null;
+ private void createLockManager() throws SemanticException {
+ if (hiveLockMgr != null) {
+ return;
+ }
+ String lockMgr = conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
+ LOG.info("Creating lock manager of type " + lockMgr);
+ if ((lockMgr == null) || (lockMgr.isEmpty())) {
+ throw new SemanticException(ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
+ }
+ try {
+ hiveLockMgr = (HiveLockManager) ReflectionUtils.newInstance(conf.getClassByName(lockMgr),
+ conf);
+ hiveLockMgr.setContext(new HiveLockManagerCtx(conf));
+ } catch (Exception e1) {
+ // set hiveLockMgr to null just in case this invalid manager got set to
+ // next query's ctx.
+ if (hiveLockMgr != null) {
+ try {
+ hiveLockMgr.close();
+ } catch (LockException e2) {
+ //nothing can do here
}
- throw new SemanticException(ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg() + e.getMessage());
+ hiveLockMgr = null;
}
+ throw new SemanticException(ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg() + e1.getMessage(), e1);
}
}
@@ -1009,7 +999,18 @@ private CommandProcessorResponse runInternal(String command) throws CommandNeedR
}
boolean requireLock = false;
- boolean ckLock = checkLockManager();
+ boolean ckLock = false;
+ try {
+ ckLock = checkConcurrency();
+ } catch (SemanticException e) {
+ errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
+ SQLState = ErrorMsg.findSQLState(e.getMessage());
+ downstreamError = e;
+ console.printError(errorMessage, "\n"
+ + org.apache.hadoop.util.StringUtils.stringifyException(e));
+ ret = 10;
+ return new CommandProcessorResponse(ret, errorMessage, SQLState);
+ }
if (ckLock) {
boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index 5bfff32..2cd6c92 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -666,6 +666,12 @@ public static void main(String[] args) throws IOException, HiveException {
boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
+ String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim();
+ if(queryId.isEmpty()) {
+ queryId = "unknown-" + System.currentTimeMillis();
+ }
+ System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
+
if (noLog) {
// If started from main(), and noLog is on, we should not output
// any logs. To turn the log on, please set -Dtest.silent=false
@@ -760,8 +766,7 @@ public static String generateCmdLine(HiveConf hconf, Context ctx)
if (hadoopLocalMode && (oneProp.equals(hadoopSysDir) || oneProp.equals(hadoopWorkDir))) {
continue;
}
-
- tempConf.set(oneProp, deltaP.getProperty(oneProp));
+ tempConf.set(oneProp, hconf.get(oneProp));
}
// Multiple concurrent local mode job submissions can cause collisions in
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index fdf1629..99ec216 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -61,7 +61,7 @@ public class MapRedTask extends ExecDriver implements Serializable {
static final String HIVE_DEBUG_RECURSIVE = "HIVE_DEBUG_RECURSIVE";
static final String HIVE_MAIN_CLIENT_DEBUG_OPTS = "HIVE_MAIN_CLIENT_DEBUG_OPTS";
static final String HIVE_CHILD_CLIENT_DEBUG_OPTS = "HIVE_CHILD_CLIENT_DEBUG_OPTS";
- static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive"};
+ static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive", "hive.query.id"};
private transient ContentSummary inputSummary = null;
private transient boolean runningViaChild = false;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index ac451d4..540a9a1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -87,7 +87,7 @@ public class MapredLocalTask extends Task<MapredLocalWork> implements Serializab
public static transient final Log l4j = LogFactory.getLog(MapredLocalTask.class);
static final String HADOOP_MEM_KEY = "HADOOP_HEAPSIZE";
static final String HADOOP_OPTS_KEY = "HADOOP_OPTS";
- static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive"};
+ static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive", "hive.query.id"};
public static MemoryMXBean memoryMXBean;
private static final Log LOG = LogFactory.getLog(MapredLocalTask.class);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataPrettyFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataPrettyFormatUtils.java
index e35249c..86da780 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataPrettyFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataPrettyFormatUtils.java
@@ -25,8 +25,6 @@
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import jline.Terminal;
-
/**
* This class provides methods to format the output of DESCRIBE PRETTY
* in a human-readable way.
@@ -116,8 +114,8 @@ private static String breakCommentIntoMultipleLines(String comment,
int columnsAlreadyConsumed, int prettyOutputNumCols) {
if (prettyOutputNumCols == -1) {
- Terminal terminal = Terminal.getTerminal();
- prettyOutputNumCols = terminal.getTerminalWidth() - 1;
+ // XXX fixed to 80 to remove jline dep
+ prettyOutputNumCols = 80 - 1;
}
int commentNumCols = prettyOutputNumCols - columnsAlreadyConsumed;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
index 0f48674..889f517 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
@@ -274,6 +274,8 @@ private void writeFileSystemStats(DataOutputStream outStream,
int numOfFiles = 0;
boolean unknown = false;
+ System.err.println("XXX tblPath " + tblPath);
+ System.err.println("XXX locations " + locations);
FileSystem fs = tblPath.getFileSystem(conf);
// in case all files in locations do not exist
try {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
index 3031d1c..a3035ba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -75,11 +75,11 @@ static URI getValidatedURI(HiveConf conf, String dcPath) throws SemanticExceptio
String scheme = uri.getScheme();
String authority = uri.getAuthority();
String path = uri.getPath();
- LOG.debug("Path before norm :" + path);
+ LOG.info("Path before norm :" + path);
// generate absolute path relative to home directory
if (!path.startsWith("/")) {
if (testMode) {
- path = (new Path(System.getProperty("build.dir.hive"),
+ path = (new Path(System.getProperty("test.tmp.dir"),
path)).toUri().getPath();
} else {
path = (new Path(new Path("/user/" + System.getProperty("user.name")),
@@ -102,7 +102,7 @@ static URI getValidatedURI(HiveConf conf, String dcPath) throws SemanticExceptio
authority = defaultURI.getAuthority();
}
- LOG.debug("Scheme:" + scheme + ", authority:" + authority + ", path:" + path);
+ LOG.info("Scheme:" + scheme + ", authority:" + authority + ", path:" + path);
Collection<String> eximSchemes = conf.getStringCollection(
HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname);
if (!eximSchemes.contains(scheme)) {
@@ -144,7 +144,7 @@ public static String relativeToAbsolutePath(HiveConf conf, String location) thro
String authority = uri.getAuthority();
String path = uri.getPath();
if (!path.startsWith("/")) {
- path = (new Path(System.getProperty("build.dir.hive"),
+ path = (new Path(System.getProperty("test.tmp.dir"),
path)).toUri().getPath();
}
if (StringUtils.isEmpty(scheme)) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
index 27fd81d..42f1d77 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
@@ -46,23 +46,43 @@
*
*/
public class GenericUDFBridge extends GenericUDF implements Serializable {
+ private static final long serialVersionUID = 4994861742809511113L;
+
/**
* The name of the UDF.
*/
- String udfName;
+ private String udfName;
/**
* Whether the UDF is an operator or not. This controls how the display string
* is generated.
*/
- boolean isOperator;
+ private boolean isOperator;
/**
* The underlying UDF class Name.
*/
- String udfClassName;
+ private String udfClassName;
/**
+ * The underlying method of the UDF class.
+ */
+ private transient Method udfMethod;
+
+ /**
+ * Helper to convert the parameters before passing to udfMethod.
+ */
+ private transient ConversionHelper conversionHelper;
+ /**
+ * The actual udf object.
+ */
+ private transient UDF udf;
+ /**
+ * The non-deferred real arguments for method invocation.
+ */
+ private transient Object[] realArguments;
+
+ /**
* Create a new GenericUDFBridge object.
*
* @param udfName
@@ -76,7 +96,7 @@ public GenericUDFBridge(String udfName, boolean isOperator,
this.isOperator = isOperator;
this.udfClassName = udfClassName;
}
-
+
// For Java serialization only
public GenericUDFBridge() {
}
@@ -113,24 +133,6 @@ public Class<? extends UDF> getUdfClass() {
}
}
- /**
- * The underlying method of the UDF class.
- */
- transient Method udfMethod;
-
- /**
- * Helper to convert the parameters before passing to udfMethod.
- */
- transient ConversionHelper conversionHelper;
- /**
- * The actual udf object.
- */
- transient UDF udf;
- /**
- * The non-deferred real arguments for method invocation.
- */
- transient Object[] realArguments;
-
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/BaseTestQueries.java b/ql/src/test/org/apache/hadoop/hive/ql/BaseTestQueries.java
index c16e316..40674cf 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/BaseTestQueries.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/BaseTestQueries.java
@@ -28,11 +28,11 @@
public abstract class BaseTestQueries extends TestCase {
protected final String inpDir = System
- .getProperty("ql.test.query.clientpositive.dir");
+ .getProperty("hive.root") + "/ql/src/test/queries/clientpositive";
protected final String resDir = System
- .getProperty("ql.test.results.clientpositive.dir");
- protected final String logDir = System.getProperty("test.log.dir")
- + "/clientpositive";
+ .getProperty("hive.root") + "/ql/src/test/results/clientpositive";
+ protected final String logDir = System
+ .getProperty("build.dir") + "/junit-qfile-results/clientpositive";
/**
* Create a file for each test name in the inpDir.
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
index a787aee..ff8fa01 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -108,12 +108,7 @@ public class QTestUtil {
private final Set<String> qSkipSet;
private final Set<String> qSortSet;
private static final String SORT_SUFFIX = ".sorted";
- public static final HashSet<String> srcTables = new HashSet<String>
- (Arrays.asList(new String [] {
- "src", "src1", "srcbucket", "srcbucket2", "src_json", "src_thrift",
- "src_sequencefile", "srcpart", "alltypesorc"
- }));
-
+ public static final HashSet<String> srcTables = new HashSet<String>();
private ParseDriver pd;
private Hive db;
protected HiveConf conf;
@@ -128,6 +123,18 @@ public class QTestUtil {
private String hadoopVer = null;
private QTestSetup setup = null;
+ static {
+ for (String srcTable : System.getProperty("test.src.tables", "").trim().split(",")) {
+ srcTable = srcTable.trim();
+ if (!srcTable.isEmpty()) {
+ srcTables.add(srcTable);
+ }
+ }
+ if (srcTables.isEmpty()) {
+ throw new AssertionError("Source tables cannot be empty");
+ }
+ }
+
public boolean deleteDirectory(File path) {
if (path.exists()) {
File[] files = path.listFiles();
@@ -470,14 +477,14 @@ else if(!Shell.WINDOWS){
/**
* Clear out any side effects of running tests
*/
- public void clearPostTestEffects () throws Exception {
+ public void clearPostTestEffects() throws Exception {
setup.postTest(conf);
}
/**
* Clear out any side effects of running tests
*/
- public void clearTestSideEffects () throws Exception {
+ public void clearTestSideEffects() throws Exception {
// Delete any tables other than the source tables
// and any databases other than the default database.
for (String dbName : db.getAllDatabases()) {
@@ -759,7 +766,7 @@ public void cliInit(String tname, boolean recreate) throws Exception {
cliDriver = new CliDriver();
if (tname.equals("init_file.q")) {
- ss.initFiles.add("../data/scripts/test_init_file.sql");
+ ss.initFiles.add("../../data/scripts/test_init_file.sql");
}
cliDriver.processInitFiles(ss);
}
@@ -1152,7 +1159,6 @@ private void maskPatterns(Pattern[] patterns, String fname) throws Exception {
});
public int checkCliDriverResults(String tname) throws Exception {
- String[] cmdArray;
assert(qMap.containsKey(tname));
String outFileName = outPath(outDir, tname + ".out");
@@ -1174,7 +1180,7 @@ public int checkCliDriverResults(String tname) throws Exception {
private static int overwriteResults(String inFileName, String outFileName) throws Exception {
// This method can be replaced with Files.copy(source, target, REPLACE_EXISTING)
// once Hive uses JAVA 7.
- System.out.println("Overwriting results");
+ System.out.println("Overwriting results " + inFileName + " to " + outFileName);
return executeCmd(new String[] {
"cp",
getQuotedString(inFileName),
@@ -1348,7 +1354,7 @@ public QTestSetup() {
public void preTest(HiveConf conf) throws Exception {
if (zooKeeperCluster == null) {
- String tmpdir = System.getProperty("user.dir")+"/../build/ql/tmp";
+ String tmpdir = System.getProperty("test.tmp.dir");
zooKeeperCluster = new MiniZooKeeperCluster();
zkPort = zooKeeperCluster.startup(new File(tmpdir, "zookeeper"));
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java b/ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java
index 0ddc1d5..9384a35 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java
@@ -32,6 +32,14 @@
* ignored.
*/
public class TestLocationQueries extends BaseTestQueries {
+
+ public TestLocationQueries() {
+ File logDirFile = new File(logDir);
+ if (!(logDirFile.exists() || logDirFile.mkdirs())) {
+ fail("Could not create " + logDir);
+ }
+ }
+
/**
* Our own checker - validate the location of the partition.
*/
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java b/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
index 870a0d4..7dc1385 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
@@ -25,12 +25,19 @@
*/
public class TestMTQueries extends BaseTestQueries {
+ public TestMTQueries() {
+ File logDirFile = new File(logDir);
+ if (!(logDirFile.exists() || logDirFile.mkdirs())) {
+ fail("Could not create " + logDir);
+ }
+ }
+
public void testMTQueries1() throws Exception {
String[] testNames = new String[] {"join1.q", "join2.q", "groupby1.q",
"groupby2.q", "join3.q", "input1.q", "input19.q"};
File[] qfiles = setupQFiles(testNames);
-
+
QTestUtil[] qts = QTestUtil.queryListRunnerSetup(qfiles, resDir, logDir);
boolean success = QTestUtil.queryListRunnerMultiThreaded(qfiles, qts);
if (!success) {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index 68c319c..ec114f9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -69,10 +69,9 @@ public class TestExecDriver extends TestCase {
static HiveConf conf;
- private static String tmpdir = System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")
- + File.separator;
- private static Log LOG = LogFactory.getLog(TestExecDriver.class);
- private static Path tmppath = new Path(tmpdir);
+ private static final String tmpdir = System.getProperty("test.tmp.dir");
+ private static final Log LOG = LogFactory.getLog(TestExecDriver.class);
+ private static final Path tmppath = new Path(tmpdir);
private static Hive db;
private static FileSystem fs;
@@ -131,8 +130,7 @@ public class TestExecDriver extends TestCase {
}
} catch (Throwable e) {
- e.printStackTrace();
- throw new RuntimeException("Encountered throwable");
+ throw new RuntimeException("Encountered throwable", e);
}
}
@@ -156,10 +154,10 @@ private static void fileDiff(String datafile, String testdir) throws Exception {
// inbuilt assumption that the testdir has only one output file.
Path di_test = new Path(tmppath, testdir);
if (!fs.exists(di_test)) {
- throw new RuntimeException(tmpdir + testdir + " does not exist");
+ throw new RuntimeException(tmpdir + File.separator + testdir + " does not exist");
}
if (!fs.getFileStatus(di_test).isDir()) {
- throw new RuntimeException(tmpdir + testdir + " is not a directory");
+ throw new RuntimeException(tmpdir + File.separator + testdir + " is not a directory");
}
FSDataInputStream fi_test = fs.open((fs.listStatus(di_test))[0].getPath());
@@ -198,7 +196,7 @@ private FilterDesc getTestFilterDesc(String column) {
@SuppressWarnings("unchecked")
private void populateMapPlan1(Table src) {
- Operator<FileSinkDesc> op2 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op2 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapplan1.out", Utilities.defaultTd, true));
Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
@@ -209,7 +207,7 @@ private void populateMapPlan1(Table src) {
@SuppressWarnings("unchecked")
private void populateMapPlan2(Table src) {
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapplan2.out", Utilities.defaultTd, false));
Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat",
@@ -245,7 +243,7 @@ private void populateMapRedPlan1(Table src) throws SemanticException {
mr.setReduceWork(rWork);
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan1.out", Utilities.defaultTd, false));
Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
@@ -275,7 +273,7 @@ private void populateMapRedPlan2(Table src) throws SemanticException {
mr.setReduceWork(rWork);
// reduce side work
- Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan2.out", Utilities.defaultTd, false));
Operator<FilterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
@@ -319,7 +317,7 @@ private void populateMapRedPlan3(Table src, Table src2) throws SemanticException
rWork.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
// reduce side work
- Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan3.out", Utilities.defaultTd, false));
Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(Utilities
@@ -362,7 +360,7 @@ private void populateMapRedPlan4(Table src) throws SemanticException {
mr.setReduceWork(rWork);
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan4.out", Utilities.defaultTd, false));
Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
@@ -401,7 +399,7 @@ private void populateMapRedPlan5(Table src) throws SemanticException {
rWork.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan5.out", Utilities.defaultTd, false));
Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
@@ -442,7 +440,7 @@ private void populateMapRedPlan6(Table src) throws SemanticException {
rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan6.out", Utilities.defaultTd, false));
Operator<FilterDesc> op2 = OperatorFactory.get(getTestFilterDesc("0"), op3);
@@ -472,118 +470,70 @@ private void executePlan() throws Exception {
public void testMapPlan1() throws Exception {
LOG.info("Beginning testMapPlan1");
-
- try {
- populateMapPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
- executePlan();
- fileDiff("lt100.txt.deflate", "mapplan1.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+ executePlan();
+ fileDiff("lt100.txt.deflate", "mapplan1.out");
}
public void testMapPlan2() throws Exception {
LOG.info("Beginning testMapPlan2");
-
- try {
- populateMapPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
- executePlan();
- fileDiff("lt100.txt", "mapplan2.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+ executePlan();
+ fileDiff("lt100.txt", "mapplan2.out");
}
public void testMapRedPlan1() throws Exception {
LOG.info("Beginning testMapRedPlan1");
-
- try {
- populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("kv1.val.sorted.txt", "mapredplan1.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("kv1.val.sorted.txt", "mapredplan1.out");
}
public void testMapRedPlan2() throws Exception {
LOG.info("Beginning testMapPlan2");
-
- try {
- populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("lt100.sorted.txt", "mapredplan2.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("lt100.sorted.txt", "mapredplan2.out");
}
public void testMapRedPlan3() throws Exception {
LOG.info("Beginning testMapPlan3");
-
- try {
- populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"), db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2"));
- executePlan();
- fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"), db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2"));
+ executePlan();
+ fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out");
}
public void testMapRedPlan4() throws Exception {
LOG.info("Beginning testMapPlan4");
-
- try {
- populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("kv1.string-sorted.txt", "mapredplan4.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("kv1.string-sorted.txt", "mapredplan4.out");
}
public void testMapRedPlan5() throws Exception {
LOG.info("Beginning testMapPlan5");
-
- try {
- populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("kv1.string-sorted.txt", "mapredplan5.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("kv1.string-sorted.txt", "mapredplan5.out");
}
public void testMapRedPlan6() throws Exception {
LOG.info("Beginning testMapPlan6");
-
- try {
- populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("lt100.sorted.txt", "mapredplan6.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("lt100.sorted.txt", "mapredplan6.out");
}
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
index ec0c7b3..1458075 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
@@ -20,9 +20,9 @@
import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+import java.util.HashSet;
import java.util.Set;
-import org.apache.hadoop.hive.ql.QTestUtil;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.security.UserGroupInformation;
@@ -33,6 +33,20 @@
*/
public class EnforceReadOnlyTables implements ExecuteWithHookContext {
+ private static final Set<String> READ_ONLY_TABLES = new HashSet<String>();
+
+ static {
+ for (String srcTable : System.getProperty("test.src.tables", "").trim().split(",")) {
+ srcTable = srcTable.trim();
+ if (!srcTable.isEmpty()) {
+ READ_ONLY_TABLES.add(srcTable);
+ }
+ }
+ if (READ_ONLY_TABLES.isEmpty()) {
+ throw new AssertionError("Source tables cannot be empty");
+ }
+ }
+
@Override
public void run(HookContext hookContext) throws Exception {
SessionState ss = SessionState.get();
@@ -53,7 +67,7 @@ public void run(SessionState sess, Set<ReadEntity> inputs,
(w.getTyp() == WriteEntity.Type.PARTITION)) {
Table t = w.getTable();
if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(t.getDbName())
- && QTestUtil.srcTables.contains(t.getTableName())) {
+ && READ_ONLY_TABLES.contains(t.getTableName())) {
throw new RuntimeException ("Cannot overwrite read-only table: " + t.getTableName());
}
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
index 9f2a6a1..b3c5722 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
@@ -63,7 +63,7 @@ public PerformTestRCFileAndSeqFile(boolean local, String file)
}
conf.setInt(RCFile.Writer.COLUMNS_BUFFER_SIZE_CONF_STR, 1 * 1024 * 1024);
if (file == null) {
- Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ Path dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
testRCFile = new Path(dir, "test_rcfile");
testSeqFile = new Path(dir, "test_seqfile");
} else {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
index 77824eb..018a61c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
@@ -60,7 +60,7 @@ public void testFlatFileInputJava() throws Exception {
conf = new Configuration();
job = new JobConf(conf);
fs = FileSystem.getLocal(conf);
- dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test.txt");
reporter = Reporter.NULL;
fs.delete(dir, true);
@@ -142,7 +142,7 @@ public void testFlatFileInputRecord() throws Exception {
conf = new Configuration();
job = new JobConf(conf);
fs = FileSystem.getLocal(conf);
- dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test.txt");
reporter = Reporter.NULL;
fs.delete(dir, true);
@@ -215,7 +215,7 @@ public void testFlatFileInputRecord() throws Exception {
*
* try { // // create job and filesystem and reporter and such. // conf = new
* Configuration(); job = new JobConf(conf); fs = FileSystem.getLocal(conf);
- * dir = new Path(System.getProperty("test.data.dir",".") + "/mapred"); file =
+ * dir = new Path(System.getProperty("test.tmp.dir",".") + "/mapred"); file =
* new Path(dir, "test.txt"); reporter = Reporter.NULL; fs.delete(dir, true);
*
* job.setClass(FlatFileInputFormat.SerializationContextFromConf.
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
index a10842a..d2b06ec 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
@@ -105,7 +105,7 @@ public void setup() throws Exception {
conf = new Configuration();
ColumnProjectionUtils.setReadAllColumns(conf);
fs = FileSystem.getLocal(conf);
- dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test_rcfile");
cleanup();
// the SerDe part is from TestLazySimpleSerDe
@@ -611,7 +611,7 @@ public void testSynAndSplit() throws IOException {
@Test
public void testSync() throws IOException {
- Path testDir = new Path(System.getProperty("test.data.dir", ".")
+ Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
+ "/mapred/testsync");
Path testFile = new Path(testDir, "test_rcfile");
fs.delete(testFile, true);
@@ -678,7 +678,7 @@ private void splitAfterSync() throws IOException {
private void writeThenReadByRecordReader(int intervalRecordCount,
int writeCount, int splitNumber, long minSplitSize, CompressionCodec codec)
throws IOException {
- Path testDir = new Path(System.getProperty("test.data.dir", ".")
+ Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
+ "/mapred/testsmallfirstsplit");
Path testFile = new Path(testDir, "test_rcfile");
fs.delete(testFile, true);
@@ -750,7 +750,7 @@ public void testCloseForErroneousRCFile() throws IOException {
Configuration conf = new Configuration();
LocalFileSystem fs = FileSystem.getLocal(conf);
// create an empty file (which is not a valid rcfile)
- Path path = new Path(System.getProperty("test.build.data", ".")
+ Path path = new Path(System.getProperty("test.tmp.dir", ".")
+ "/broken.rcfile");
fs.create(path).close();
// try to create RCFile.Reader
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
index f2cd0cd..0a67f62 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
@@ -71,7 +71,7 @@ protected void setUp() throws IOException {
conf = new Configuration();
job = new JobConf(conf);
fileSystem = FileSystem.getLocal(conf);
- testDir = new Path(System.getProperty("test.data.dir", System.getProperty(
+ testDir = new Path(System.getProperty("test.tmp.dir", System.getProperty(
"user.dir", new File(".").getAbsolutePath()))
+ "/TestSymlinkTextInputFormat");
reporter = Reporter.NULL;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
index 8158ad3..3545a2c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
@@ -34,16 +34,13 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Before;
import org.junit.Test;
public class TestFileDump {
- Path workDir = new Path(System.getProperty("test.tmp.dir",
- "target" + File.separator + "test" + File.separator + "tmp"));
- Path resourceDir = new Path(System.getProperty("test.build.resources",
- "ql" + File.separator + "src" + File.separator + "test" + File.separator + "resources"));
-
+ Path workDir = new Path(System.getProperty("test.tmp.dir"));
Configuration conf;
FileSystem fs;
Path testFilePath;
@@ -71,13 +68,16 @@ static class MyRecord {
private static void checkOutput(String expected,
String actual) throws Exception {
BufferedReader eStream =
- new BufferedReader(new FileReader(expected));
+ new BufferedReader(new FileReader(HiveTestUtils.getFileFromClasspath(expected)));
BufferedReader aStream =
new BufferedReader(new FileReader(actual));
- String line = eStream.readLine();
- while (line != null) {
- assertEquals(line, aStream.readLine());
- line = eStream.readLine();
+ String expectedLine = eStream.readLine();
+ while (expectedLine != null) {
+ String actualLine = aStream.readLine();
+ System.out.println("actual: " + actualLine);
+ System.out.println("expected: " + expectedLine);
+ assertEquals(expectedLine, actualLine);
+ expectedLine = eStream.readLine();
}
assertNull(eStream.readLine());
assertNull(aStream.readLine());
@@ -110,8 +110,8 @@ public void testDump() throws Exception {
}
writer.close();
PrintStream origOut = System.out;
- String outputFilename = File.separator + "orc-file-dump.out";
- FileOutputStream myOut = new FileOutputStream(workDir + outputFilename);
+ String outputFilename = "orc-file-dump.out";
+ FileOutputStream myOut = new FileOutputStream(workDir + File.separator + outputFilename);
// replace stdout and run command
System.setOut(new PrintStream(myOut));
@@ -120,7 +120,7 @@ public void testDump() throws Exception {
System.setOut(origOut);
- checkOutput(resourceDir + outputFilename, workDir + outputFilename);
+ checkOutput(outputFilename, workDir + File.separator + outputFilename);
}
// Test that if the fraction of rows that have distinct strings is greater than the configured
@@ -164,8 +164,8 @@ public void testDictionaryThreshold() throws Exception {
}
writer.close();
PrintStream origOut = System.out;
- String outputFilename = File.separator + "orc-file-dump-dictionary-threshold.out";
- FileOutputStream myOut = new FileOutputStream(workDir + outputFilename);
+ String outputFilename = "orc-file-dump-dictionary-threshold.out";
+ FileOutputStream myOut = new FileOutputStream(workDir + File.separator + outputFilename);
// replace stdout and run command
System.setOut(new PrintStream(myOut));
@@ -173,6 +173,6 @@ public void testDictionaryThreshold() throws Exception {
System.out.flush();
System.setOut(origOut);
- checkOutput(resourceDir + outputFilename, workDir + outputFilename);
+ checkOutput(outputFilename, workDir + File.separator + outputFilename);
}
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index 1f7ed42..ac2bf02 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -71,7 +71,7 @@
public class TestInputOutputFormat {
- Path workDir = new Path(System.getProperty("test.tmp.dir","target/test/tmp"));
+ Path workDir = new Path(System.getProperty("test.tmp.dir","target/tmp"));
public static class MyRow implements Writable {
int x;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java
index bb51bab..fb6be16 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java
@@ -51,7 +51,7 @@ public void testBasics() throws Exception {
NullCallback callback = new NullCallback();
long poolSize = mgr.getTotalMemoryPool();
assertEquals(Math.round(ManagementFactory.getMemoryMXBean().
- getHeapMemoryUsage().getMax() * 0.5f), poolSize);
+ getHeapMemoryUsage().getMax() * 0.5d), poolSize);
assertEquals(1.0, mgr.getAllocationScale(), 0.00001);
mgr.addWriter(new Path("p1"), 1000, callback);
assertEquals(1.0, mgr.getAllocationScale(), 0.00001);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index 3c45ec3..c03a80a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -66,6 +66,7 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -205,10 +206,7 @@ public void openFileSystem () throws Exception {
@Test
public void testReadFormat_0_11() throws Exception {
- Path resourceDir = new Path(System.getProperty("test.build.resources", "ql"
- + File.separator + "src" + File.separator + "test" + File.separator
- + "resources"));
- Path oldFilePath = new Path(resourceDir, "orc-file-11-format.orc");
+ Path oldFilePath = new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader = OrcFile.createReader(fs, oldFilePath);
int stripeCount = 0;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
index a1e9543..6aba386 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
@@ -41,6 +41,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -571,10 +572,7 @@ public void testOrcSerDeStatsComplexOldFormat() throws Exception {
@Test(expected = ClassCastException.class)
public void testSerdeStatsOldFormat() throws Exception {
- Path resourceDir = new Path(System.getProperty("test.build.resources", "ql"
- + File.separator + "src" + File.separator + "test" + File.separator
- + "resources"));
- Path oldFilePath = new Path(resourceDir, "orc-file-11-format.orc");
+ Path oldFilePath = new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader = OrcFile.createReader(fs, oldFilePath);
int stripeCount = 0;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFBridge.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFBridge.java
new file mode 100644
index 0000000..effe842
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFBridge.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.junit.Test;
+
+public class TestGenericUDFBridge {
+
+ @Test(expected = UDFArgumentException.class)
+ public void testInvalidName() throws Exception {
+ GenericUDFBridge udf = new GenericUDFBridge("someudf", false, "not a class name");
+ udf.initialize(new ObjectInspector[0]);
+ udf.close();
+ }
+
+ @Test(expected = UDFArgumentException.class)
+ public void testNullName() throws Exception {
+ GenericUDFBridge udf = new GenericUDFBridge("someudf", false, null);
+ udf.initialize(new ObjectInspector[0]);
+ udf.close();
+ }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java
index 4be73a4..e9a4675 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java
@@ -27,7 +27,7 @@
public class TestDosToUnix extends TestCase {
- private static final String dataFile = System.getProperty("test.data.dir", ".") + "data_TestDosToUnix";
+ private static final String dataFile = System.getProperty("test.tmp.dir", ".") + "data_TestDosToUnix";
@Override
protected void setUp() throws Exception {
super.setUp();
diff --git a/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q b/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q
index 4881757..4193315 100644
--- a/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q
+++ b/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q
@@ -1,9 +1,9 @@
set hive.exec.concatenate.check.index=true;
create table src_rc_concatenate_test(key int, value string) stored as rcfile;
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test;
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test;
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test;
show table extended like `src_rc_concatenate_test`;
diff --git a/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q b/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q
index 5f9d5ef..8cbb25c 100644
--- a/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q
+++ b/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q
@@ -2,7 +2,7 @@
create table if not exists alter_part_invalidspec(key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12');
alter table alter_part_invalidspec partition (year='1997') enable no_drop;
diff --git a/ql/src/test/queries/clientnegative/alter_partition_nodrop.q b/ql/src/test/queries/clientnegative/alter_partition_nodrop.q
index 92af30b..3c0ff02 100644
--- a/ql/src/test/queries/clientnegative/alter_partition_nodrop.q
+++ b/ql/src/test/queries/clientnegative/alter_partition_nodrop.q
@@ -2,8 +2,8 @@
create table if not exists alter_part_nodrop_part(key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12');
alter table alter_part_nodrop_part partition (year='1996') enable no_drop;
alter table alter_part_nodrop_part drop partition (year='1996');
diff --git a/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q b/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q
index 135411f..f2135b1 100644
--- a/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q
+++ b/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q
@@ -2,8 +2,8 @@
create table if not exists alter_part_nodrop_table(key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12');
alter table alter_part_nodrop_table partition (year='1996') enable no_drop;
drop table alter_part_nodrop_table;
diff --git a/ql/src/test/queries/clientnegative/alter_partition_offline.q b/ql/src/test/queries/clientnegative/alter_partition_offline.q
index 899145d..7376d8b 100644
--- a/ql/src/test/queries/clientnegative/alter_partition_offline.q
+++ b/ql/src/test/queries/clientnegative/alter_partition_offline.q
@@ -2,8 +2,8 @@
create table if not exists alter_part_offline (key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12');
alter table alter_part_offline partition (year='1996') disable offline;
select * from alter_part_offline where year = '1996';
diff --git a/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q b/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q
index 26ba287..be971f1 100644
--- a/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q
+++ b/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q
@@ -1,5 +1,5 @@
create table alter_rename_partition_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src ;
create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table alter_rename_partition partition (pCol1='old_part1:', pcol2='old_part2:') select col1 from alter_rename_partition_src ;
diff --git a/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q b/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q
index 6e51c2f..4babdda 100644
--- a/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q
+++ b/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q
@@ -1,5 +1,5 @@
create table alter_rename_partition_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src ;
create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table alter_rename_partition partition (pCol1='old_part1:', pcol2='old_part2:') select col1 from alter_rename_partition_src ;
diff --git a/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q b/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q
index 2d4ce0b..3af807e 100644
--- a/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q
+++ b/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q
@@ -1,5 +1,5 @@
create table alter_rename_partition_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src ;
create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table alter_rename_partition partition (pCol1='old_part1:', pcol2='old_part2:') select col1 from alter_rename_partition_src ;
diff --git a/ql/src/test/queries/clientnegative/archive_corrupt.q b/ql/src/test/queries/clientnegative/archive_corrupt.q
index bea2539..130b37b 100644
--- a/ql/src/test/queries/clientnegative/archive_corrupt.q
+++ b/ql/src/test/queries/clientnegative/archive_corrupt.q
@@ -14,5 +14,5 @@
-- to be thrown during the LOAD step. This former behavior is tested
-- in clientpositive/archive_corrupt.q
-load data local inpath '../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11');
+load data local inpath '../../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11');
diff --git a/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q b/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q
index 048a8fd..6bebb89 100644
--- a/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q
+++ b/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q
@@ -1,19 +1,19 @@
CREATE TABLE srcbucket_mapjoin_part (key int, value string)
partitioned by (ds string) CLUSTERED BY (key) INTO 3 BUCKETS
STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt'
+load data local inpath '../../data/files/srcbucket20.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt'
+load data local inpath '../../data/files/srcbucket21.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt'
+load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string)
partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt'
+load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt'
+load data local inpath '../../data/files/srcbucket23.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-- The number of buckets in the 2 tables above (being joined later) dont match.
diff --git a/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q b/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q
index 9478a2f..802fcd9 100644
--- a/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q
+++ b/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q
@@ -8,10 +8,10 @@
create table table2(key string, value string) clustered by (value, key)
into 2 BUCKETS stored as textfile;
-load data local inpath '../data/files/T1.txt' overwrite into table table1;
+load data local inpath '../../data/files/T1.txt' overwrite into table table1;
-load data local inpath '../data/files/T1.txt' overwrite into table table2;
-load data local inpath '../data/files/T2.txt' overwrite into table table2;
+load data local inpath '../../data/files/T1.txt' overwrite into table table2;
+load data local inpath '../../data/files/T2.txt' overwrite into table table2;
set hive.optimize.bucketmapjoin = true;
set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
diff --git a/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q b/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q
index 69afe0a..ac5abeb 100644
--- a/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q
+++ b/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q
@@ -8,13 +8,13 @@
create table table2(key string, value string) clustered by (value, key)
into 2 BUCKETS stored as textfile;
-load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='1');
-load data local inpath '../data/files/T2.txt' overwrite into table table1 partition (ds='1');
+load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='1');
+load data local inpath '../../data/files/T2.txt' overwrite into table table1 partition (ds='1');
-load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='2');
+load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='2');
-load data local inpath '../data/files/T1.txt' overwrite into table table2;
-load data local inpath '../data/files/T2.txt' overwrite into table table2;
+load data local inpath '../../data/files/T1.txt' overwrite into table table2;
+load data local inpath '../../data/files/T2.txt' overwrite into table table2;
set hive.optimize.bucketmapjoin = true;
set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
diff --git a/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q b/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q
index af92350..b4887c4 100644
--- a/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q
+++ b/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q
@@ -3,12 +3,12 @@
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- dynamic partitioning syntax
explain
diff --git a/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q b/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q
index d9725dd..2f8e927 100644
--- a/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q
+++ b/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q
@@ -3,12 +3,12 @@
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- don't specify all partitioning keys
explain
diff --git a/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q b/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
index eb73962..34f91fc 100644
--- a/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
+++ b/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
@@ -3,12 +3,12 @@
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- specify invalid values for the partitioning keys
explain
diff --git a/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q b/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q
index dbfaaec..49d89dd 100644
--- a/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q
+++ b/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q
@@ -3,12 +3,12 @@
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- specify partitioning clause multiple times
explain
diff --git a/ql/src/test/queries/clientnegative/columnstats_tbllvl.q b/ql/src/test/queries/clientnegative/columnstats_tbllvl.q
index ca85489..a4e0056 100644
--- a/ql/src/test/queries/clientnegative/columnstats_tbllvl.q
+++ b/ql/src/test/queries/clientnegative/columnstats_tbllvl.q
@@ -13,7 +13,7 @@
avgTimeOnSite int)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
+LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
explain
analyze table UserVisits_web_text_none compute statistics for columns destIP;
diff --git a/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q b/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q
index 5bbd70d..85a5f0a 100644
--- a/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q
+++ b/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q
@@ -8,7 +8,7 @@
d MAP<STRING,ARRAY<STRING>>
) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type;
+LOAD DATA LOCAL INPATH '../../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type;
explain
diff --git a/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q b/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q
index ca85489..a4e0056 100644
--- a/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q
+++ b/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q
@@ -13,7 +13,7 @@
avgTimeOnSite int)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
+LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
explain
analyze table UserVisits_web_text_none compute statistics for columns destIP;
diff --git a/ql/src/test/queries/clientnegative/compute_stats_long.q b/ql/src/test/queries/clientnegative/compute_stats_long.q
index e24f417..5974811 100644
--- a/ql/src/test/queries/clientnegative/compute_stats_long.q
+++ b/ql/src/test/queries/clientnegative/compute_stats_long.q
@@ -1,7 +1,7 @@
create table tab_int(a int);
-- insert some data
-LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int;
+LOAD DATA LOCAL INPATH "../../data/files/int.txt" INTO TABLE tab_int;
-- compute stats should raise an error since the number of bit vectors > 1024
select compute_stats(a, 10000) from tab_int;
diff --git a/ql/src/test/queries/clientnegative/deletejar.q b/ql/src/test/queries/clientnegative/deletejar.q
index f9acaed..0bd6985 100644
--- a/ql/src/test/queries/clientnegative/deletejar.q
+++ b/ql/src/test/queries/clientnegative/deletejar.q
@@ -1,4 +1,4 @@
-ADD JAR ../build/ql/test/TestSerDe.jar;
-DELETE JAR ../build/ql/test/TestSerDe.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
+DELETE JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
diff --git a/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q b/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q
index 0be2e71..0ad99d1 100644
--- a/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q
+++ b/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q
@@ -8,7 +8,7 @@
create table dest_table like srcpart;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11);
-- Tests creating dynamic partitions with characters not in the whitelist (i.e. 9)
-- If the directory is not empty the hook will throw an error, instead the error should come from the metastore
diff --git a/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q b/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
index d86ecd5..6ffc33a 100644
--- a/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
+++ b/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'nosuchschema://nosuchauthority/ql/test/data/exports/exim_department';
drop table exim_department;
diff --git a/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q b/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
index 5f32231..970e646 100644
--- a/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
+++ b/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -16,9 +16,9 @@
create table exim_department ( dep_id int comment "department identifier")
stored as textfile
tblproperties("maker"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q b/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
index d7204dc..3589183 100644
--- a/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
+++ b/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
@@ -6,16 +6,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -27,12 +27,12 @@
partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
stored as textfile
tblproperties("maker"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
select * from exim_employee;
drop table exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q b/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
index 6cd7eda..45268c2 100644
--- a/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
+++ b/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -18,6 +18,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q b/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
index 7f3f577..cad6c90 100644
--- a/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
+++ b/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -18,6 +18,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q b/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
index d3ec9ff..f5f904f 100644
--- a/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
+++ b/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -18,6 +18,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q b/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
index 1cc691f..c56329c 100644
--- a/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
+++ b/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -18,6 +18,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q b/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
index 27830ad..afaedcd 100644
--- a/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
+++ b/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -21,6 +21,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q b/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
index d85048a..230b28c 100644
--- a/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
+++ b/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -19,6 +19,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q b/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
index 84b3786..c2e00a9 100644
--- a/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
+++ b/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -23,6 +23,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q b/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
index eaf9c57..a6586ea 100644
--- a/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
+++ b/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -19,6 +19,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q b/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
index 092fd77..990a686 100644
--- a/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
+++ b/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
@@ -5,9 +5,9 @@
clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -20,6 +20,6 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q b/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
index 05de3d7..02537ef 100644
--- a/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
+++ b/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -18,7 +18,7 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q b/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
index dc194ca..897c674 100644
--- a/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
+++ b/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -19,7 +19,7 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q b/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
index e233707..12013e5 100644
--- a/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
+++ b/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
@@ -5,9 +5,9 @@
partitioned by (dep_org string)
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr");
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -19,7 +19,7 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q b/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
index a10788e..d8d2b80 100644
--- a/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
+++ b/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
@@ -5,9 +5,9 @@
partitioned by (dep_org string)
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr");
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -20,7 +20,7 @@
tblproperties("creator"="krishna");
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q b/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
index cc4a56c..82dcce9 100644
--- a/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
+++ b/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
@@ -6,16 +6,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -25,6 +25,6 @@
describe extended exim_employee;
select * from exim_employee;
drop table exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q b/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
index 140e3bb..d92efeb 100644
--- a/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
+++ b/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
@@ -6,16 +6,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -25,6 +25,6 @@
describe extended exim_employee;
select * from exim_employee;
drop table exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q b/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
index 048befe..12d827b 100644
--- a/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
+++ b/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
@@ -4,9 +4,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -17,7 +17,7 @@
stored as textfile
tblproperties("creator"="krishna");
import external table exim_department from 'ql/test/data/exports/exim_department';
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop table exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q b/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
index 89cbb9e..726dee5 100644
--- a/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
+++ b/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
@@ -4,17 +4,17 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_department/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
create table exim_department ( dep_id int comment "department id")
stored as textfile
@@ -22,9 +22,9 @@
tblproperties("creator"="krishna");
import table exim_department from 'ql/test/data/exports/exim_department'
location 'ql/test/data/tablestore2/exim_department';
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop table exim_department;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q b/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
index 0cbfc85..d187c78 100644
--- a/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
+++ b/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
@@ -6,16 +6,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -29,7 +29,7 @@
tblproperties("creator"="krishna");
import external table exim_employee partition (emp_country="us", emp_state="tn")
from 'ql/test/data/exports/exim_employee';
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop table exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_22_export_authfail.q b/ql/src/test/queries/clientnegative/exim_22_export_authfail.q
index d9ab0cf..b818686 100644
--- a/ql/src/test/queries/clientnegative/exim_22_export_authfail.q
+++ b/ql/src/test/queries/clientnegative/exim_22_export_authfail.q
@@ -5,8 +5,8 @@
set hive.security.authorization.enabled=true;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
set hive.security.authorization.enabled=false;
diff --git a/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q b/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
index 2dbd534..4acefb9 100644
--- a/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
+++ b/ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
@@ -2,9 +2,9 @@
set hive.test.mode.prefix=;
create table exim_department ( dep_id int) stored as textfile;
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -18,5 +18,5 @@
set hive.security.authorization.enabled=false;
drop table exim_department;
drop database importer;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
diff --git a/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q b/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
index ccbcee3..467014e 100644
--- a/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
+++ b/ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
@@ -7,10 +7,10 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -26,6 +26,6 @@
import from 'ql/test/data/exports/exim_employee';
set hive.security.authorization.enabled=false;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop table exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q b/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
index 50bfe00..595fa7e 100644
--- a/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
+++ b/ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
@@ -3,9 +3,9 @@
set hive.test.mode.nosamplelist=exim_department,exim_employee;
create table exim_department ( dep_id int) stored as textfile;
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -19,5 +19,5 @@
select * from exim_department;
drop table exim_department;
drop database importer;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
diff --git a/ql/src/test/queries/clientnegative/fetchtask_ioexception.q b/ql/src/test/queries/clientnegative/fetchtask_ioexception.q
index 9f44f22..82230f7 100644
--- a/ql/src/test/queries/clientnegative/fetchtask_ioexception.q
+++ b/ql/src/test/queries/clientnegative/fetchtask_ioexception.q
@@ -2,6 +2,6 @@
KEY STRING,
VALUE STRING) STORED AS SEQUENCEFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception;
+LOAD DATA LOCAL INPATH '../../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception;
SELECT * FROM fetchtask_ioexception;
diff --git a/ql/src/test/queries/clientnegative/illegal_partition_type.q b/ql/src/test/queries/clientnegative/illegal_partition_type.q
index 1cdaffd..1ab828c 100644
--- a/ql/src/test/queries/clientnegative/illegal_partition_type.q
+++ b/ql/src/test/queries/clientnegative/illegal_partition_type.q
@@ -1,6 +1,6 @@
-- begin part(string, int) pass(string, string)
CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='second');
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='second');
select * from tab1;
drop table tab1;
diff --git a/ql/src/test/queries/clientnegative/insertover_dynapart_ifnotexists.q b/ql/src/test/queries/clientnegative/insertover_dynapart_ifnotexists.q
index cbf65c4..a8f77c2 100644
--- a/ql/src/test/queries/clientnegative/insertover_dynapart_ifnotexists.q
+++ b/ql/src/test/queries/clientnegative/insertover_dynapart_ifnotexists.q
@@ -4,6 +4,6 @@
create table destpart_dp like srcpart;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_dp partition(ds='2008-04-08', hr=11);
insert overwrite table destpart_dp partition (ds='2008-04-08', hr) if not exists select key, value, hr from srcpart_dp where ds='2008-04-08';
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/invalid_columns.q b/ql/src/test/queries/clientnegative/invalid_columns.q
index f705666..f8be8c8 100644
--- a/ql/src/test/queries/clientnegative/invalid_columns.q
+++ b/ql/src/test/queries/clientnegative/invalid_columns.q
@@ -1,4 +1,4 @@
-ADD JAR ../build/ql/test/TestSerDe.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
STORED AS TEXTFILE
TBLPROPERTIES('columns'='valid_colname,invalid.colname');
diff --git a/ql/src/test/queries/clientnegative/load_exist_part_authfail.q b/ql/src/test/queries/clientnegative/load_exist_part_authfail.q
index f86cd92..eb72d94 100644
--- a/ql/src/test/queries/clientnegative/load_exist_part_authfail.q
+++ b/ql/src/test/queries/clientnegative/load_exist_part_authfail.q
@@ -1,4 +1,4 @@
create table hive_test_src ( col1 string ) partitioned by (pcol1 string) stored as textfile;
alter table hive_test_src add partition (pcol1 = 'test_part');
set hive.security.authorization.enabled=true;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
diff --git a/ql/src/test/queries/clientnegative/load_non_native.q b/ql/src/test/queries/clientnegative/load_non_native.q
index 387aaed..75a5216 100644
--- a/ql/src/test/queries/clientnegative/load_non_native.q
+++ b/ql/src/test/queries/clientnegative/load_non_native.q
@@ -2,4 +2,4 @@
CREATE TABLE non_native2(key int, value string)
STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler';
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE non_native2;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE non_native2;
diff --git a/ql/src/test/queries/clientnegative/load_nonpart_authfail.q b/ql/src/test/queries/clientnegative/load_nonpart_authfail.q
index d807c69..3265363 100644
--- a/ql/src/test/queries/clientnegative/load_nonpart_authfail.q
+++ b/ql/src/test/queries/clientnegative/load_nonpart_authfail.q
@@ -1,3 +1,3 @@
create table hive_test_src ( col1 string ) stored as textfile;
set hive.security.authorization.enabled=true;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src ;
diff --git a/ql/src/test/queries/clientnegative/load_part_authfail.q b/ql/src/test/queries/clientnegative/load_part_authfail.q
index c409d5a..315988d 100644
--- a/ql/src/test/queries/clientnegative/load_part_authfail.q
+++ b/ql/src/test/queries/clientnegative/load_part_authfail.q
@@ -1,3 +1,3 @@
create table hive_test_src ( col1 string ) partitioned by (pcol1 string) stored as textfile;
set hive.security.authorization.enabled=true;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
diff --git a/ql/src/test/queries/clientnegative/load_part_nospec.q b/ql/src/test/queries/clientnegative/load_part_nospec.q
index 356c16a..8151799 100644
--- a/ql/src/test/queries/clientnegative/load_part_nospec.q
+++ b/ql/src/test/queries/clientnegative/load_part_nospec.q
@@ -1,2 +1,2 @@
create table hive_test_src ( col1 string ) partitioned by (pcol1 string) stored as textfile;
-load data local inpath '../data/files/test.dat' into table hive_test_src;
+load data local inpath '../../data/files/test.dat' into table hive_test_src;
diff --git a/ql/src/test/queries/clientnegative/load_stored_as_dirs.q b/ql/src/test/queries/clientnegative/load_stored_as_dirs.q
index eed5651..c56f0d4 100644
--- a/ql/src/test/queries/clientnegative/load_stored_as_dirs.q
+++ b/ql/src/test/queries/clientnegative/load_stored_as_dirs.q
@@ -4,4 +4,4 @@
CREATE TABLE if not exists stored_as_dirs_multiple (col1 STRING, col2 int, col3 STRING)
SKEWED BY (col1, col2) ON (('s1',1), ('s3',3), ('s13',13), ('s78',78)) stored as DIRECTORIES;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE stored_as_dirs_multiple;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE stored_as_dirs_multiple;
diff --git a/ql/src/test/queries/clientnegative/load_view_failure.q b/ql/src/test/queries/clientnegative/load_view_failure.q
index 927f02e..64182ea 100644
--- a/ql/src/test/queries/clientnegative/load_view_failure.q
+++ b/ql/src/test/queries/clientnegative/load_view_failure.q
@@ -1,3 +1,3 @@
DROP VIEW xxx11;
CREATE VIEW xxx11 AS SELECT * FROM src;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE xxx11;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE xxx11;
diff --git a/ql/src/test/queries/clientnegative/load_wrong_fileformat.q b/ql/src/test/queries/clientnegative/load_wrong_fileformat.q
index 16feeca..f0c3b59 100644
--- a/ql/src/test/queries/clientnegative/load_wrong_fileformat.q
+++ b/ql/src/test/queries/clientnegative/load_wrong_fileformat.q
@@ -3,4 +3,4 @@
CREATE TABLE load_wrong_fileformat_T1(name STRING) STORED AS SEQUENCEFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE load_wrong_fileformat_T1;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE load_wrong_fileformat_T1;
diff --git a/ql/src/test/queries/clientnegative/load_wrong_fileformat_rc_seq.q b/ql/src/test/queries/clientnegative/load_wrong_fileformat_rc_seq.q
index 7e589fb..4d79bbe 100644
--- a/ql/src/test/queries/clientnegative/load_wrong_fileformat_rc_seq.q
+++ b/ql/src/test/queries/clientnegative/load_wrong_fileformat_rc_seq.q
@@ -3,4 +3,4 @@
CREATE TABLE T1(name STRING) STORED AS RCFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1;
\ No newline at end of file
+LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T1;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/load_wrong_fileformat_txt_seq.q b/ql/src/test/queries/clientnegative/load_wrong_fileformat_txt_seq.q
index ff5ed4e..050c819 100644
--- a/ql/src/test/queries/clientnegative/load_wrong_fileformat_txt_seq.q
+++ b/ql/src/test/queries/clientnegative/load_wrong_fileformat_txt_seq.q
@@ -3,4 +3,4 @@
CREATE TABLE T1(name STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1;
\ No newline at end of file
+LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T1;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/load_wrong_noof_part.q b/ql/src/test/queries/clientnegative/load_wrong_noof_part.q
index ffb64ed..7f5ad75 100644
--- a/ql/src/test/queries/clientnegative/load_wrong_noof_part.q
+++ b/ql/src/test/queries/clientnegative/load_wrong_noof_part.q
@@ -1,3 +1,3 @@
CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING,ds1 STRING);
-LOAD DATA LOCAL INPATH '../data1/files/kv1.txt' INTO TABLE loadpart1 PARTITION(ds='2009-05-05');
+LOAD DATA LOCAL INPATH '../../data1/files/kv1.txt' INTO TABLE loadpart1 PARTITION(ds='2009-05-05');
diff --git a/ql/src/test/queries/clientnegative/local_mapred_error_cache.q b/ql/src/test/queries/clientnegative/local_mapred_error_cache.q
index 8f4b37a..ed9e21d 100644
--- a/ql/src/test/queries/clientnegative/local_mapred_error_cache.q
+++ b/ql/src/test/queries/clientnegative/local_mapred_error_cache.q
@@ -1,4 +1,4 @@
set hive.exec.mode.local.auto=true;
set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateLocalErrorsHook;
-FROM src SELECT TRANSFORM(key, value) USING 'python ../data/scripts/cat_error.py' AS (key, value);
+FROM src SELECT TRANSFORM(key, value) USING 'python ../../data/scripts/cat_error.py' AS (key, value);
diff --git a/ql/src/test/queries/clientnegative/nested_complex_neg.q b/ql/src/test/queries/clientnegative/nested_complex_neg.q
index ac6c4ee..09f13f5 100644
--- a/ql/src/test/queries/clientnegative/nested_complex_neg.q
+++ b/ql/src/test/queries/clientnegative/nested_complex_neg.q
@@ -10,6 +10,6 @@
-- This should fail in as extended nesting levels are not enabled using the serdeproperty hive.serialization.extend.nesting.levels
-load data local inpath '../data/files/nested_complex.txt' overwrite into table nestedcomplex;
+load data local inpath '../../data/files/nested_complex.txt' overwrite into table nestedcomplex;
select * from nestedcomplex sort by simple_int;
diff --git a/ql/src/test/queries/clientnegative/nopart_insert.q b/ql/src/test/queries/clientnegative/nopart_insert.q
index 4841f9e..6669bf6 100644
--- a/ql/src/test/queries/clientnegative/nopart_insert.q
+++ b/ql/src/test/queries/clientnegative/nopart_insert.q
@@ -2,6 +2,6 @@
CREATE TABLE nopart_insert(a STRING, b STRING) PARTITIONED BY (ds STRING);
INSERT OVERWRITE TABLE nopart_insert
-SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+SELECT TRANSFORM(src.key, src.value) USING '../../data/scripts/error_script' AS (tkey, tvalue)
FROM src;
diff --git a/ql/src/test/queries/clientnegative/nopart_load.q b/ql/src/test/queries/clientnegative/nopart_load.q
index 6e5ad6e..966982f 100644
--- a/ql/src/test/queries/clientnegative/nopart_load.q
+++ b/ql/src/test/queries/clientnegative/nopart_load.q
@@ -1,5 +1,5 @@
CREATE TABLE nopart_load(a STRING, b STRING) PARTITIONED BY (ds STRING);
-load data local inpath '../data/files/kv1.txt' overwrite into table nopart_load ;
+load data local inpath '../../data/files/kv1.txt' overwrite into table nopart_load ;
diff --git a/ql/src/test/queries/clientnegative/protectmode_part2.q b/ql/src/test/queries/clientnegative/protectmode_part2.q
index 72b55ea..3fdc036 100644
--- a/ql/src/test/queries/clientnegative/protectmode_part2.q
+++ b/ql/src/test/queries/clientnegative/protectmode_part2.q
@@ -4,6 +4,6 @@
create table tbl_protectmode6 (c1 string,c2 string) partitioned by (p string);
alter table tbl_protectmode6 add partition (p='p1');
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1');
alter table tbl_protectmode6 partition (p='p1') enable offline;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1');
diff --git a/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q b/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q
index 542367a..ef37225 100644
--- a/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q
+++ b/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q
@@ -12,7 +12,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- testAggrFuncsWithNoGBYNoPartDef
select p_mfgr,
diff --git a/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q b/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q
index 95b3511..5843042 100644
--- a/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q
+++ b/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q
@@ -12,7 +12,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- testAmbiguousWindowDefn
select p_mfgr, p_name, p_size,
diff --git a/ql/src/test/queries/clientnegative/script_error.q b/ql/src/test/queries/clientnegative/script_error.q
index e46aed0..8ca849b 100644
--- a/ql/src/test/queries/clientnegative/script_error.q
+++ b/ql/src/test/queries/clientnegative/script_error.q
@@ -1,7 +1,7 @@
EXPLAIN
-SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+SELECT TRANSFORM(src.key, src.value) USING '../../data/scripts/error_script' AS (tkey, tvalue)
FROM src;
-SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+SELECT TRANSFORM(src.key, src.value) USING '../../data/scripts/error_script' AS (tkey, tvalue)
FROM src;
diff --git a/ql/src/test/queries/clientnegative/serde_regex2.q b/ql/src/test/queries/clientnegative/serde_regex2.q
index a395574..d523d03 100644
--- a/ql/src/test/queries/clientnegative/serde_regex2.q
+++ b/ql/src/test/queries/clientnegative/serde_regex2.q
@@ -16,8 +16,8 @@
)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex;
-LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
-- raise an exception
SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientnegative/subquery_windowing_corr.q b/ql/src/test/queries/clientnegative/subquery_windowing_corr.q
index bcc767b..105d3d2 100644
--- a/ql/src/test/queries/clientnegative/subquery_windowing_corr.q
+++ b/ql/src/test/queries/clientnegative/subquery_windowing_corr.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- corr and windowing
diff --git a/ql/src/test/queries/clientnegative/udfnull.q b/ql/src/test/queries/clientnegative/udfnull.q
deleted file mode 100644
index 3c4204f..0000000
--- a/ql/src/test/queries/clientnegative/udfnull.q
+++ /dev/null
@@ -1,6 +0,0 @@
-
-CREATE TEMPORARY FUNCTION example_arraysum AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum';
-
-SELECT example_arraysum(lint)FROM src_thrift;
-
-DROP TEMPORARY FUNCTION example_arraysum;
diff --git a/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q b/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q
index 46bca37..15f8fae 100644
--- a/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q
+++ b/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
select p_mfgr, p_name, p_size,
diff --git a/ql/src/test/queries/clientpositive/alter1.q b/ql/src/test/queries/clientpositive/alter1.q
index fb8a792..312a017 100644
--- a/ql/src/test/queries/clientpositive/alter1.q
+++ b/ql/src/test/queries/clientpositive/alter1.q
@@ -15,7 +15,7 @@
alter table alter1 set serdeproperties('s1'='10', 's2' ='20');
describe extended alter1;
-add jar ../build/ql/test/TestSerDe.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9');
describe extended alter1;
@@ -56,7 +56,7 @@
ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20');
DESCRIBE EXTENDED alter1;
-add jar ../build/ql/test/TestSerDe.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9');
DESCRIBE EXTENDED alter1;
diff --git a/ql/src/test/queries/clientpositive/alter3.q b/ql/src/test/queries/clientpositive/alter3.q
index 3cde007..91e4e9b 100644
--- a/ql/src/test/queries/clientpositive/alter3.q
+++ b/ql/src/test/queries/clientpositive/alter3.q
@@ -1,5 +1,5 @@
create table alter3_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter3_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter3_src ;
create table alter3 ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
@@ -34,7 +34,7 @@
SHOW TABLES;
CREATE TABLE alter3_src (col1 STRING) STORED AS TEXTFILE ;
-LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter3_src ;
+LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE alter3_src ;
CREATE TABLE alter3 (col1 STRING) PARTITIONED BY (pcol1 STRING, pcol2 STRING) STORED AS SEQUENCEFILE;
diff --git a/ql/src/test/queries/clientpositive/alter5.q b/ql/src/test/queries/clientpositive/alter5.q
index 0d14f22..66c9f8d 100644
--- a/ql/src/test/queries/clientpositive/alter5.q
+++ b/ql/src/test/queries/clientpositive/alter5.q
@@ -3,7 +3,7 @@
--
create table alter5_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter5_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter5_src ;
create table alter5 ( col1 string ) partitioned by (dt string);
@@ -32,7 +32,7 @@
SHOW TABLES;
create table alter5_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter5_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter5_src ;
create table alter5 ( col1 string ) partitioned by (dt string);
alter table alter5 add partition (dt='a') location 'parta';
diff --git a/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q b/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q
index 807ef53..e1c3780 100644
--- a/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q
+++ b/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q
@@ -1,9 +1,9 @@
set hive.exec.concatenate.check.index =false;
create table src_rc_concatenate_test(key int, value string) stored as rcfile;
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test;
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test;
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test;
show table extended like `src_rc_concatenate_test`;
@@ -26,9 +26,9 @@
alter table src_rc_concatenate_test_part add partition (ds='2011');
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test_part partition (ds='2011');
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test_part partition (ds='2011');
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test_part partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test_part partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test_part partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test_part partition (ds='2011');
show table extended like `src_rc_concatenate_test_part` partition (ds='2011');
diff --git a/ql/src/test/queries/clientpositive/alter_merge.q b/ql/src/test/queries/clientpositive/alter_merge.q
index c350273..ceabd08 100644
--- a/ql/src/test/queries/clientpositive/alter_merge.q
+++ b/ql/src/test/queries/clientpositive/alter_merge.q
@@ -1,8 +1,8 @@
create table src_rc_merge_test(key int, value string) stored as rcfile;
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test;
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test;
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test;
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test;
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test;
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test;
show table extended like `src_rc_merge_test`;
@@ -21,9 +21,9 @@
alter table src_rc_merge_test_part add partition (ds='2011');
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2011');
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2011');
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2011');
show table extended like `src_rc_merge_test_part` partition (ds='2011');
diff --git a/ql/src/test/queries/clientpositive/alter_merge_2.q b/ql/src/test/queries/clientpositive/alter_merge_2.q
index 65ddfed..e09703d 100644
--- a/ql/src/test/queries/clientpositive/alter_merge_2.q
+++ b/ql/src/test/queries/clientpositive/alter_merge_2.q
@@ -3,9 +3,9 @@
alter table src_rc_merge_test_part add partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
desc extended src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31');
select count(1) from src_rc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31';
select sum(hash(key)), sum(hash(value)) from src_rc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31';
diff --git a/ql/src/test/queries/clientpositive/alter_merge_stats.q b/ql/src/test/queries/clientpositive/alter_merge_stats.q
index 23bae55..0af87e2 100644
--- a/ql/src/test/queries/clientpositive/alter_merge_stats.q
+++ b/ql/src/test/queries/clientpositive/alter_merge_stats.q
@@ -1,8 +1,8 @@
create table src_rc_merge_test_stat(key int, value string) stored as rcfile;
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_stat;
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_stat;
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_stat;
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_stat;
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_stat;
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_stat;
show table extended like `src_rc_merge_test_stat`;
desc extended src_rc_merge_test_stat;
@@ -21,9 +21,9 @@
alter table src_rc_merge_test_part_stat add partition (ds='2011');
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part_stat partition (ds='2011');
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part_stat partition (ds='2011');
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part_stat partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part_stat partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part_stat partition (ds='2011');
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part_stat partition (ds='2011');
show table extended like `src_rc_merge_test_part_stat` partition (ds='2011');
desc extended src_rc_merge_test_part_stat;
diff --git a/ql/src/test/queries/clientpositive/alter_partition_protect_mode.q b/ql/src/test/queries/clientpositive/alter_partition_protect_mode.q
index 7bcb9f0..7a1f3dd 100644
--- a/ql/src/test/queries/clientpositive/alter_partition_protect_mode.q
+++ b/ql/src/test/queries/clientpositive/alter_partition_protect_mode.q
@@ -2,10 +2,10 @@
create table if not exists alter_part_protect_mode(key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='12');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1995', month='09');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1994', month='07');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1995', month='09');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1994', month='07');
-- offline
alter table alter_part_protect_mode partition (year='1996') disable offline;
diff --git a/ql/src/test/queries/clientpositive/alter_rename_partition.q b/ql/src/test/queries/clientpositive/alter_rename_partition.q
index d498cd5..8ebbe9882 100644
--- a/ql/src/test/queries/clientpositive/alter_rename_partition.q
+++ b/ql/src/test/queries/clientpositive/alter_rename_partition.q
@@ -4,7 +4,7 @@
SHOW TABLES;
create table alter_rename_partition_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src ;
create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
@@ -28,7 +28,7 @@
SHOW TABLES;
CREATE TABLE alter_rename_partition_src (col1 STRING) STORED AS TEXTFILE ;
-LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter_rename_partition_src ;
+LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE alter_rename_partition_src ;
CREATE TABLE alter_rename_partition (col1 STRING) PARTITIONED BY (pcol1 STRING, pcol2 STRING) STORED AS SEQUENCEFILE;
diff --git a/ql/src/test/queries/clientpositive/archive_corrupt.q b/ql/src/test/queries/clientpositive/archive_corrupt.q
index b83eab5..cc9801d 100644
--- a/ql/src/test/queries/clientpositive/archive_corrupt.q
+++ b/ql/src/test/queries/clientpositive/archive_corrupt.q
@@ -14,7 +14,7 @@
-- to be thrown during the LOAD step. This behavior is now tested in
-- clientnegative/archive_corrupt.q
-load data local inpath '../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11');
+load data local inpath '../../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11');
insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
select key, value from srcpart where ds='2008-04-08' and hr='12';
diff --git a/ql/src/test/queries/clientpositive/auto_join32.q b/ql/src/test/queries/clientpositive/auto_join32.q
index 289bfbc..e7846ee 100644
--- a/ql/src/test/queries/clientpositive/auto_join32.q
+++ b/ql/src/test/queries/clientpositive/auto_join32.q
@@ -32,10 +32,10 @@
on (s.name = v.name)
group by s.name;
-load data local inpath '../data/files/empty1.txt' into table studenttab10k_smb;
-load data local inpath '../data/files/empty2.txt' into table studenttab10k_smb;
-load data local inpath '../data/files/empty1.txt' into table votertab10k_smb;
-load data local inpath '../data/files/empty2.txt' into table votertab10k_smb;
+load data local inpath '../../data/files/empty1.txt' into table studenttab10k_smb;
+load data local inpath '../../data/files/empty2.txt' into table studenttab10k_smb;
+load data local inpath '../../data/files/empty1.txt' into table votertab10k_smb;
+load data local inpath '../../data/files/empty2.txt' into table votertab10k_smb;
explain select s.name, count(distinct registration)
from studenttab10k_smb s join votertab10k_smb v
@@ -51,10 +51,10 @@
create table studenttab10k_part (name string, age int, gpa double) partitioned by (p string) clustered by (name) sorted by (name) into 2 buckets;
create table votertab10k_part (name string, age int, registration string, contributions float) partitioned by (p string) clustered by (name) sorted by (name) into 2 buckets;
-load data local inpath '../data/files/empty1.txt' into table studenttab10k_part partition (p='foo');
-load data local inpath '../data/files/empty2.txt' into table studenttab10k_part partition (p='foo');
-load data local inpath '../data/files/empty1.txt' into table votertab10k_part partition (p='foo');
-load data local inpath '../data/files/empty2.txt' into table votertab10k_part partition (p='foo');
+load data local inpath '../../data/files/empty1.txt' into table studenttab10k_part partition (p='foo');
+load data local inpath '../../data/files/empty2.txt' into table studenttab10k_part partition (p='foo');
+load data local inpath '../../data/files/empty1.txt' into table votertab10k_part partition (p='foo');
+load data local inpath '../../data/files/empty2.txt' into table votertab10k_part partition (p='foo');
explain select s.name, count(distinct registration)
from studenttab10k_part s join votertab10k_part v
diff --git a/ql/src/test/queries/clientpositive/auto_join_filters.q b/ql/src/test/queries/clientpositive/auto_join_filters.q
index 458504c..eefd211 100644
--- a/ql/src/test/queries/clientpositive/auto_join_filters.q
+++ b/ql/src/test/queries/clientpositive/auto_join_filters.q
@@ -1,7 +1,7 @@
set hive.auto.convert.join = true;
CREATE TABLE myinput1(key int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in3.txt' INTO TABLE myinput1;
+LOAD DATA LOCAL INPATH '../../data/files/in3.txt' INTO TABLE myinput1;
SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
@@ -38,10 +38,10 @@
CREATE TABLE smb_input1(key int, value int) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS;
CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2;
-LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2;
SET hive.optimize.bucketmapjoin = true;
SET hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/auto_join_nulls.q b/ql/src/test/queries/clientpositive/auto_join_nulls.q
index 766348d..d1b7bb4 100644
--- a/ql/src/test/queries/clientpositive/auto_join_nulls.q
+++ b/ql/src/test/queries/clientpositive/auto_join_nulls.q
@@ -1,7 +1,7 @@
set hive.auto.convert.join = true;
CREATE TABLE myinput1(key int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE myinput1;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' INTO TABLE myinput1;
SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b;
SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b;
diff --git a/ql/src/test/queries/clientpositive/auto_join_reordering_values.q b/ql/src/test/queries/clientpositive/auto_join_reordering_values.q
index 46a4a0d..2fc0cd4 100644
--- a/ql/src/test/queries/clientpositive/auto_join_reordering_values.q
+++ b/ql/src/test/queries/clientpositive/auto_join_reordering_values.q
@@ -1,7 +1,7 @@
-- HIVE-5056 RS has expression list for values, but it's ignored in MapJoinProcessor
create table testsrc ( `key` int,`val` string);
-load data local inpath '../data/files/kv1.txt' overwrite into table testsrc;
+load data local inpath '../../data/files/kv1.txt' overwrite into table testsrc;
drop table if exists orderpayment_small;
create table orderpayment_small (`dealid` int,`date` string,`time` string, `cityid` int, `userid` int);
insert overwrite table orderpayment_small select 748, '2011-03-24', '2011-03-24', 55 ,5372613 from testsrc limit 1;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_1.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_1.q
index e76b560..ddd2c18 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_1.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_1.q
@@ -2,19 +2,19 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string)
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.auto.convert.join=true;
set hive.auto.convert.sortmerge.join=true;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_11.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_11.q
index f9fa1e4..da2e26f 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_11.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_11.q
@@ -1,19 +1,19 @@
-- small 1 part, 2 bucket & big 2 part, 4 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.auto.convert.join=true;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_12.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_12.q
index db53a65..f434b33 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_12.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_12.q
@@ -2,19 +2,19 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string)
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.auto.convert.join=true;
set hive.auto.convert.sortmerge.join=true;
@@ -23,9 +23,9 @@
CREATE TABLE bucket_medium (key string, value string) partitioned by (ds string)
CLUSTERED BY (key) SORTED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08');
explain extended select count(*) FROM bucket_small a JOIN bucket_medium b ON a.key = b.key JOIN bucket_big c ON c.key = b.key JOIN bucket_medium d ON c.key = b.key;
select count(*) FROM bucket_small a JOIN bucket_medium b ON a.key = b.key JOIN bucket_big c ON c.key = b.key JOIN bucket_medium d ON c.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_2.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_2.q
index 7533977..eef5483 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_2.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_2.q
@@ -1,16 +1,16 @@
-- small 1 part, 4 bucket & big 2 part, 2 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.auto.convert.join=true;
set hive.auto.convert.sortmerge.join=true;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_3.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_3.q
index 39a695f..c094ecd 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_3.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_3.q
@@ -1,16 +1,16 @@
-- small 2 part, 2 bucket & big 1 part, 4 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
set hive.auto.convert.join=true;
set hive.auto.convert.sortmerge.join=true;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_4.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_4.q
index 6072272..18acfbf 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_4.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_4.q
@@ -1,18 +1,18 @@
-- small 2 part, 4 bucket & big 1 part, 2 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
set hive.auto.convert.join=true;
set hive.auto.convert.sortmerge.join=true;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_5.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_5.q
index a28ce3d..98d6df9 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_5.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_5.q
@@ -1,13 +1,13 @@
-- small no part, 4 bucket & big no part, 2 bucket
CREATE TABLE bucket_small (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small;
CREATE TABLE bucket_big (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big;
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big;
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big;
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big;
set hive.auto.convert.sortmerge.join=true;
set hive.optimize.bucketmapjoin = true;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_7.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_7.q
index d62f637..e19cc31 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_7.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_7.q
@@ -1,21 +1,21 @@
-- small 2 part, 4 bucket & big 2 part, 2 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.auto.convert.join=true;
set hive.auto.convert.sortmerge.join=true;
diff --git a/ql/src/test/queries/clientpositive/auto_sortmerge_join_8.q b/ql/src/test/queries/clientpositive/auto_sortmerge_join_8.q
index 6302a1b..a66806f 100644
--- a/ql/src/test/queries/clientpositive/auto_sortmerge_join_8.q
+++ b/ql/src/test/queries/clientpositive/auto_sortmerge_join_8.q
@@ -1,21 +1,21 @@
-- small 2 part, 2 bucket & big 2 part, 4 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.auto.convert.join=true;
set hive.auto.convert.sortmerge.join=true;
diff --git a/ql/src/test/queries/clientpositive/avro_compression_enabled.q b/ql/src/test/queries/clientpositive/avro_compression_enabled.q
index 8367206..cb6f173 100644
--- a/ql/src/test/queries/clientpositive/avro_compression_enabled.q
+++ b/ql/src/test/queries/clientpositive/avro_compression_enabled.q
@@ -35,7 +35,7 @@
]
}');
-LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors4;
+LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4;
set hive.exec.compress.output=true;
diff --git a/ql/src/test/queries/clientpositive/avro_evolved_schemas.q b/ql/src/test/queries/clientpositive/avro_evolved_schemas.q
index 3fe8ff1..f723cbc 100644
--- a/ql/src/test/queries/clientpositive/avro_evolved_schemas.q
+++ b/ql/src/test/queries/clientpositive/avro_evolved_schemas.q
@@ -36,7 +36,7 @@
DESCRIBE doctors_with_new_field;
-LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors_with_new_field;
+LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors_with_new_field;
SELECT * FROM doctors_with_new_field ORDER BY first_name;
diff --git a/ql/src/test/queries/clientpositive/avro_joins.q b/ql/src/test/queries/clientpositive/avro_joins.q
index 25b77c0..4c33a83 100644
--- a/ql/src/test/queries/clientpositive/avro_joins.q
+++ b/ql/src/test/queries/clientpositive/avro_joins.q
@@ -37,7 +37,7 @@
DESCRIBE doctors4;
-LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors4;
+LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4;
CREATE TABLE episodes
ROW FORMAT
@@ -70,7 +70,7 @@
DESCRIBE episodes;
-LOAD DATA LOCAL INPATH '../data/files/episodes.avro' INTO TABLE episodes;
+LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes;
SELECT e.title, e.air_date, d.first_name, d.last_name, d.extra_field, e.air_date
FROM doctors4 d JOIN episodes e ON (d.number=e.doctor)
diff --git a/ql/src/test/queries/clientpositive/avro_nullable_fields.q b/ql/src/test/queries/clientpositive/avro_nullable_fields.q
index 584c6f7..f90ceb9 100644
--- a/ql/src/test/queries/clientpositive/avro_nullable_fields.q
+++ b/ql/src/test/queries/clientpositive/avro_nullable_fields.q
@@ -17,7 +17,7 @@
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/csv.txt' INTO TABLE test_serializer;
+LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer;
CREATE TABLE as_avro
ROW FORMAT
diff --git a/ql/src/test/queries/clientpositive/avro_partitioned.q b/ql/src/test/queries/clientpositive/avro_partitioned.q
index 8e4d40f..068a13c 100644
--- a/ql/src/test/queries/clientpositive/avro_partitioned.q
+++ b/ql/src/test/queries/clientpositive/avro_partitioned.q
@@ -28,7 +28,7 @@
]
}');
-LOAD DATA LOCAL INPATH '../data/files/episodes.avro' INTO TABLE episodes;
+LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes;
CREATE TABLE episodes_partitioned
PARTITIONED BY (doctor_pt INT)
diff --git a/ql/src/test/queries/clientpositive/avro_sanity_test.q b/ql/src/test/queries/clientpositive/avro_sanity_test.q
index e3f8b07..dbb9995 100644
--- a/ql/src/test/queries/clientpositive/avro_sanity_test.q
+++ b/ql/src/test/queries/clientpositive/avro_sanity_test.q
@@ -30,7 +30,7 @@
DESCRIBE doctors;
-LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors;
+LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors;
SELECT * FROM doctors ORDER BY number;
diff --git a/ql/src/test/queries/clientpositive/binarysortable_1.q b/ql/src/test/queries/clientpositive/binarysortable_1.q
index a98a230..39c1d25 100644
--- a/ql/src/test/queries/clientpositive/binarysortable_1.q
+++ b/ql/src/test/queries/clientpositive/binarysortable_1.q
@@ -3,7 +3,7 @@
FIELDS TERMINATED BY '9'
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE mytable;
+LOAD DATA LOCAL INPATH '../../data/files/string.txt' INTO TABLE mytable;
EXPLAIN
SELECT REGEXP_REPLACE(REGEXP_REPLACE(REGEXP_REPLACE(key, '\001', '^A'), '\0', '^@'), '\002', '^B'), value
diff --git a/ql/src/test/queries/clientpositive/bucket_map_join_1.q b/ql/src/test/queries/clientpositive/bucket_map_join_1.q
index 33dd5d5..6bdb09e 100644
--- a/ql/src/test/queries/clientpositive/bucket_map_join_1.q
+++ b/ql/src/test/queries/clientpositive/bucket_map_join_1.q
@@ -9,8 +9,8 @@
create table table2(key string, value string) clustered by (value, key)
sorted by (value, key) into 1 BUCKETS stored as textfile;
-load data local inpath '../data/files/SortCol1Col2.txt' overwrite into table table1;
-load data local inpath '../data/files/SortCol2Col1.txt' overwrite into table table2;
+load data local inpath '../../data/files/SortCol1Col2.txt' overwrite into table table1;
+load data local inpath '../../data/files/SortCol2Col1.txt' overwrite into table table2;
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/bucket_map_join_2.q b/ql/src/test/queries/clientpositive/bucket_map_join_2.q
index d1097e7..07f6d15 100644
--- a/ql/src/test/queries/clientpositive/bucket_map_join_2.q
+++ b/ql/src/test/queries/clientpositive/bucket_map_join_2.q
@@ -9,8 +9,8 @@
create table table2(key string, value string) clustered by (value, key)
sorted by (value desc, key desc) into 1 BUCKETS stored as textfile;
-load data local inpath '../data/files/SortCol1Col2.txt' overwrite into table table1;
-load data local inpath '../data/files/SortCol2Col1.txt' overwrite into table table2;
+load data local inpath '../../data/files/SortCol1Col2.txt' overwrite into table table1;
+load data local inpath '../../data/files/SortCol2Col1.txt' overwrite into table table2;
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_1.q b/ql/src/test/queries/clientpositive/bucketcontext_1.q
index 5b01399..047a2a5 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_1.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_1.q
@@ -1,18 +1,18 @@
-- small 1 part, 2 bucket & big 2 part, 4 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_2.q b/ql/src/test/queries/clientpositive/bucketcontext_2.q
index f952f2e..d58e844 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_2.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_2.q
@@ -1,16 +1,16 @@
-- small 1 part, 4 bucket & big 2 part, 2 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_3.q b/ql/src/test/queries/clientpositive/bucketcontext_3.q
index 461fbb1..fd80174 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_3.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_3.q
@@ -1,16 +1,16 @@
-- small 2 part, 2 bucket & big 1 part, 4 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_4.q b/ql/src/test/queries/clientpositive/bucketcontext_4.q
index 366da44..5d21ea5d 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_4.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_4.q
@@ -1,18 +1,18 @@
-- small 2 part, 4 bucket & big 1 part, 2 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_5.q b/ql/src/test/queries/clientpositive/bucketcontext_5.q
index 411fed3..5078072 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_5.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_5.q
@@ -1,13 +1,13 @@
-- small no part, 4 bucket & big no part, 2 bucket
CREATE TABLE bucket_small (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small;
CREATE TABLE bucket_big (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big;
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big;
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big;
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big;
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_6.q b/ql/src/test/queries/clientpositive/bucketcontext_6.q
index 204d1e9..0f7c72f 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_6.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_6.q
@@ -1,16 +1,16 @@
-- small no part, 4 bucket & big 2 part, 2 bucket
CREATE TABLE bucket_small (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small;
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small;
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small;
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_7.q b/ql/src/test/queries/clientpositive/bucketcontext_7.q
index b0bca46..c528f61 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_7.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_7.q
@@ -1,21 +1,21 @@
-- small 2 part, 4 bucket & big 2 part, 2 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketcontext_8.q b/ql/src/test/queries/clientpositive/bucketcontext_8.q
index 9533c55..27c55a3 100644
--- a/ql/src/test/queries/clientpositive/bucketcontext_8.q
+++ b/ql/src/test/queries/clientpositive/bucketcontext_8.q
@@ -1,21 +1,21 @@
-- small 2 part, 2 bucket & big 2 part, 4 bucket
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketizedhiveinputformat.q b/ql/src/test/queries/clientpositive/bucketizedhiveinputformat.q
index 4c7f0c9..d2e12e8 100644
--- a/ql/src/test/queries/clientpositive/bucketizedhiveinputformat.q
+++ b/ql/src/test/queries/clientpositive/bucketizedhiveinputformat.q
@@ -3,7 +3,7 @@
CREATE TABLE T1(name STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1;
CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE;
@@ -26,8 +26,8 @@
SELECT COUNT(1) FROM T2;
CREATE TABLE T3(name STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3;
-LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T3;
EXPLAIN SELECT COUNT(1) FROM T3;
SELECT COUNT(1) FROM T3;
diff --git a/ql/src/test/queries/clientpositive/bucketizedhiveinputformat_auto.q b/ql/src/test/queries/clientpositive/bucketizedhiveinputformat_auto.q
index 9cdfe8e..8b7535d 100644
--- a/ql/src/test/queries/clientpositive/bucketizedhiveinputformat_auto.q
+++ b/ql/src/test/queries/clientpositive/bucketizedhiveinputformat_auto.q
@@ -1,17 +1,17 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08');
CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08');
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin1.q b/ql/src/test/queries/clientpositive/bucketmapjoin1.q
index 2bd8e1e..204e759 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin1.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin1.q
@@ -25,16 +25,16 @@
from srcbucket_mapjoin_part a join srcbucket_mapjoin_part_2 b
on a.key=b.key where b.ds="2008-04-08";
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint);
create table bucketmapjoin_hash_result_2 (key bigint , value1 bigint, value2 bigint);
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin10.q b/ql/src/test/queries/clientpositive/bucketmapjoin10.q
index cf4222b..09c0ae2 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin10.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin10.q
@@ -2,23 +2,23 @@
CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_1 CLUSTERED BY (key) INTO 3 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 3 BUCKETS;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin11.q b/ql/src/test/queries/clientpositive/bucketmapjoin11.q
index e10ab52..d330b77 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin11.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin11.q
@@ -2,25 +2,25 @@
CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_1 CLUSTERED BY (key) INTO 4 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2');
CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2');
set hive.optimize.bucketmapjoin=true;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin12.q b/ql/src/test/queries/clientpositive/bucketmapjoin12.q
index 8139000..43a9de4 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin12.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin12.q
@@ -2,20 +2,20 @@
CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_2 NOT CLUSTERED;
CREATE TABLE srcbucket_mapjoin_part_3 (key INT, value STRING) PARTITIONED BY (part STRING)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_3 CLUSTERED BY (key) INTO 2 BUCKETS;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin2.q b/ql/src/test/queries/clientpositive/bucketmapjoin2.q
index fdbadfc..108b67a 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin2.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin2.q
@@ -1,12 +1,12 @@
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint);
create table bucketmapjoin_hash_result_2 (key bigint , value1 bigint, value2 bigint);
@@ -76,8 +76,8 @@
on a.key = b.key;
-- HIVE-3210
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin3.q b/ql/src/test/queries/clientpositive/bucketmapjoin3.q
index 8fda802..78c23d5 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin3.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin3.q
@@ -1,16 +1,16 @@
CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint);
create table bucketmapjoin_hash_result_2 (key bigint , value1 bigint, value2 bigint);
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin4.q b/ql/src/test/queries/clientpositive/bucketmapjoin4.q
index c1a8f2a..54626e7 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin4.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin4.q
@@ -1,17 +1,17 @@
set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint);
create table bucketmapjoin_hash_result_2 (key bigint , value1 bigint, value2 bigint);
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin5.q b/ql/src/test/queries/clientpositive/bucketmapjoin5.q
index 2df49b6..72cffc2 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin5.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin5.q
@@ -1,22 +1,22 @@
CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint);
create table bucketmapjoin_hash_result_2 (key bigint , value1 bigint, value2 bigint);
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin7.q b/ql/src/test/queries/clientpositive/bucketmapjoin7.q
index 3a96c64..a15570b 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin7.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin7.q
@@ -2,13 +2,13 @@
CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) PARTITIONED BY (ds STRING, hr STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0');
CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (ds STRING, hr STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0');
set hive.optimize.bucketmapjoin=true;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin8.q b/ql/src/test/queries/clientpositive/bucketmapjoin8.q
index 5e8daa5..f467ea6 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin8.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin8.q
@@ -2,13 +2,13 @@
CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 3 BUCKETS;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin9.q b/ql/src/test/queries/clientpositive/bucketmapjoin9.q
index 86344a5..f1d5f58 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin9.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin9.q
@@ -2,14 +2,14 @@
CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1');
CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
CLUSTERED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS;
@@ -28,8 +28,8 @@
ALTER TABLE srcbucket_mapjoin_part_2 DROP PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (value) INTO 2 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1');
ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS;
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin_negative.q b/ql/src/test/queries/clientpositive/bucketmapjoin_negative.q
index d763433..ea140dd 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin_negative.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin_negative.q
@@ -3,13 +3,13 @@
CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin_negative2.q b/ql/src/test/queries/clientpositive/bucketmapjoin_negative2.q
index 901f056..e2c0d8c 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin_negative2.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin_negative2.q
@@ -1,12 +1,12 @@
CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09');
set hive.optimize.bucketmapjoin = true;
create table bucketmapjoin_tmp_result (key string , value1 string, value2 string);
diff --git a/ql/src/test/queries/clientpositive/bucketmapjoin_negative3.q b/ql/src/test/queries/clientpositive/bucketmapjoin_negative3.q
index d66e123..6398fff 100644
--- a/ql/src/test/queries/clientpositive/bucketmapjoin_negative3.q
+++ b/ql/src/test/queries/clientpositive/bucketmapjoin_negative3.q
@@ -8,21 +8,21 @@
create table test3 (key string, value string) clustered by (key, value) sorted by (key, value) into 3 buckets;
create table test4 (key string, value string) clustered by (value, key) sorted by (value, key) into 3 buckets;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test1;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test1;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test1;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test1;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test1;
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test1;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test2;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test2;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test2;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test2;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test2;
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test2;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test3;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test3;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test3;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test3;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test3;
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test3;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test4;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test4;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test4;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test4;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test4;
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test4;
set hive.optimize.bucketmapjoin = true;
-- should be allowed
diff --git a/ql/src/test/queries/clientpositive/column_access_stats.q b/ql/src/test/queries/clientpositive/column_access_stats.q
index 3c8a309..fbf8bba 100644
--- a/ql/src/test/queries/clientpositive/column_access_stats.q
+++ b/ql/src/test/queries/clientpositive/column_access_stats.q
@@ -4,7 +4,7 @@
-- This test is used for testing the ColumnAccessAnalyzer
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
diff --git a/ql/src/test/queries/clientpositive/columnstats_partlvl.q b/ql/src/test/queries/clientpositive/columnstats_partlvl.q
index 9862777..9dfe8ff 100644
--- a/ql/src/test/queries/clientpositive/columnstats_partlvl.q
+++ b/ql/src/test/queries/clientpositive/columnstats_partlvl.q
@@ -4,8 +4,8 @@
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0);
-LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0);
+LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0);
+LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0);
explain
analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns employeeID;
diff --git a/ql/src/test/queries/clientpositive/columnstats_tbllvl.q b/ql/src/test/queries/clientpositive/columnstats_tbllvl.q
index 72d88a6..170fbc5 100644
--- a/ql/src/test/queries/clientpositive/columnstats_tbllvl.q
+++ b/ql/src/test/queries/clientpositive/columnstats_tbllvl.q
@@ -13,7 +13,7 @@
avgTimeOnSite int)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
+LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
explain
analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue;
diff --git a/ql/src/test/queries/clientpositive/compute_stats_binary.q b/ql/src/test/queries/clientpositive/compute_stats_binary.q
index c198136..fd15634 100644
--- a/ql/src/test/queries/clientpositive/compute_stats_binary.q
+++ b/ql/src/test/queries/clientpositive/compute_stats_binary.q
@@ -1,7 +1,7 @@
create table tab_binary(a binary);
-- insert some data
-LOAD DATA LOCAL INPATH "../data/files/binary.txt" INTO TABLE tab_binary;
+LOAD DATA LOCAL INPATH "../../data/files/binary.txt" INTO TABLE tab_binary;
select count(*) from tab_binary;
diff --git a/ql/src/test/queries/clientpositive/compute_stats_boolean.q b/ql/src/test/queries/clientpositive/compute_stats_boolean.q
index dc76f7c..cddb53f 100644
--- a/ql/src/test/queries/clientpositive/compute_stats_boolean.q
+++ b/ql/src/test/queries/clientpositive/compute_stats_boolean.q
@@ -1,7 +1,7 @@
create table tab_bool(a boolean);
-- insert some data
-LOAD DATA LOCAL INPATH "../data/files/bool.txt" INTO TABLE tab_bool;
+LOAD DATA LOCAL INPATH "../../data/files/bool.txt" INTO TABLE tab_bool;
select count(*) from tab_bool;
diff --git a/ql/src/test/queries/clientpositive/compute_stats_double.q b/ql/src/test/queries/clientpositive/compute_stats_double.q
index 6c6dc47..7a1e0f6 100644
--- a/ql/src/test/queries/clientpositive/compute_stats_double.q
+++ b/ql/src/test/queries/clientpositive/compute_stats_double.q
@@ -1,7 +1,7 @@
create table tab_double(a double);
-- insert some data
-LOAD DATA LOCAL INPATH "../data/files/double.txt" INTO TABLE tab_double;
+LOAD DATA LOCAL INPATH "../../data/files/double.txt" INTO TABLE tab_double;
select count(*) from tab_double;
diff --git a/ql/src/test/queries/clientpositive/compute_stats_long.q b/ql/src/test/queries/clientpositive/compute_stats_long.q
index 7d0a158..6a2070f 100644
--- a/ql/src/test/queries/clientpositive/compute_stats_long.q
+++ b/ql/src/test/queries/clientpositive/compute_stats_long.q
@@ -1,7 +1,7 @@
create table tab_int(a int);
-- insert some data
-LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int;
+LOAD DATA LOCAL INPATH "../../data/files/int.txt" INTO TABLE tab_int;
select count(*) from tab_int;
diff --git a/ql/src/test/queries/clientpositive/compute_stats_string.q b/ql/src/test/queries/clientpositive/compute_stats_string.q
index f146f6b..0023e7f 100644
--- a/ql/src/test/queries/clientpositive/compute_stats_string.q
+++ b/ql/src/test/queries/clientpositive/compute_stats_string.q
@@ -1,7 +1,7 @@
create table tab_string(a string);
-- insert some data
-LOAD DATA LOCAL INPATH "../data/files/string.txt" INTO TABLE tab_string;
+LOAD DATA LOCAL INPATH "../../data/files/string.txt" INTO TABLE tab_string;
select count(*) from tab_string;
diff --git a/ql/src/test/queries/clientpositive/correlationoptimizer4.q b/ql/src/test/queries/clientpositive/correlationoptimizer4.q
index 70fcdfc..953d191 100644
--- a/ql/src/test/queries/clientpositive/correlationoptimizer4.q
+++ b/ql/src/test/queries/clientpositive/correlationoptimizer4.q
@@ -1,9 +1,9 @@
CREATE TABLE T1(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
CREATE TABLE T3(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T3;
set hive.auto.convert.join=false;
set hive.optimize.correlation=false;
diff --git a/ql/src/test/queries/clientpositive/correlationoptimizer5.q b/ql/src/test/queries/clientpositive/correlationoptimizer5.q
index ac836c0..287c7a3 100644
--- a/ql/src/test/queries/clientpositive/correlationoptimizer5.q
+++ b/ql/src/test/queries/clientpositive/correlationoptimizer5.q
@@ -1,11 +1,11 @@
CREATE TABLE T1(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1;
CREATE TABLE T2(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T2;
CREATE TABLE T3(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../data/files/kv3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE T3;
CREATE TABLE T4(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../data/files/kv5.txt' INTO TABLE T4;
+LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE T4;
CREATE TABLE dest_co1(key INT, val STRING);
CREATE TABLE dest_co2(key INT, val STRING);
diff --git a/ql/src/test/queries/clientpositive/count.q b/ql/src/test/queries/clientpositive/count.q
index 0d66a5e..74ae9e4 100644
--- a/ql/src/test/queries/clientpositive/count.q
+++ b/ql/src/test/queries/clientpositive/count.q
@@ -1,5 +1,5 @@
create table abcd (a int, b int, c int, d int);
-LOAD DATA LOCAL INPATH '../data/files/in4.txt' INTO TABLE abcd;
+LOAD DATA LOCAL INPATH '../../data/files/in4.txt' INTO TABLE abcd;
select * from abcd;
set hive.map.aggr=true;
diff --git a/ql/src/test/queries/clientpositive/create_like.q b/ql/src/test/queries/clientpositive/create_like.q
index cb4d657..13539a6 100644
--- a/ql/src/test/queries/clientpositive/create_like.q
+++ b/ql/src/test/queries/clientpositive/create_like.q
@@ -21,8 +21,8 @@
SELECT * FROM table1;
SELECT * FROM table2;
-CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:test.src.data.dir}/files/ext_test';
-CREATE EXTERNAL TABLE table5 LIKE table4 LOCATION '${system:test.src.data.dir}/files/ext_test';
+CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:hive.root}/data/files/ext_test';
+CREATE EXTERNAL TABLE table5 LIKE table4 LOCATION '${system:hive.root}/data/files/ext_test';
SELECT * FROM table4;
SELECT * FROM table5;
@@ -31,5 +31,5 @@
SELECT * FROM table4;
DROP TABLE table4;
-CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:test.src.data.dir}/files/ext_test';
+CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:hive.root}/data/files/ext_test';
SELECT * FROM table4;
diff --git a/ql/src/test/queries/clientpositive/create_merge_compressed.q b/ql/src/test/queries/clientpositive/create_merge_compressed.q
index 4418b34..483931b 100644
--- a/ql/src/test/queries/clientpositive/create_merge_compressed.q
+++ b/ql/src/test/queries/clientpositive/create_merge_compressed.q
@@ -1,6 +1,6 @@
create table src_rc_merge_test(key int, value string) stored as rcfile;
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test;
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test;
set hive.exec.compress.output = true;
diff --git a/ql/src/test/queries/clientpositive/create_nested_type.q b/ql/src/test/queries/clientpositive/create_nested_type.q
index 2debd0d..735b139 100644
--- a/ql/src/test/queries/clientpositive/create_nested_type.q
+++ b/ql/src/test/queries/clientpositive/create_nested_type.q
@@ -9,7 +9,7 @@
DESCRIBE table1;
DESCRIBE EXTENDED table1;
-LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1;
+LOAD DATA LOCAL INPATH '../../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1;
SELECT * from table1;
diff --git a/ql/src/test/queries/clientpositive/create_struct_table.q b/ql/src/test/queries/clientpositive/create_struct_table.q
index dd5aa63..1e5d151 100644
--- a/ql/src/test/queries/clientpositive/create_struct_table.q
+++ b/ql/src/test/queries/clientpositive/create_struct_table.q
@@ -4,7 +4,7 @@
fields terminated by '\t'
collection items terminated by '\001';
-load data local inpath '../data/files/kv1.txt'
+load data local inpath '../../data/files/kv1.txt'
overwrite into table abc;
SELECT strct, strct.a, strct.b FROM abc LIMIT 10;
diff --git a/ql/src/test/queries/clientpositive/create_union_table.q b/ql/src/test/queries/clientpositive/create_union_table.q
index bb0e5b9..6bc4d29 100644
--- a/ql/src/test/queries/clientpositive/create_union_table.q
+++ b/ql/src/test/queries/clientpositive/create_union_table.q
@@ -4,7 +4,7 @@
create table abc(mydata uniontype<int,double,array<string>,struct<a:int,b:string>>,
strct struct<a:int, b:string, c:string>);
-load data local inpath '../data/files/union_input.txt'
+load data local inpath '../../data/files/union_input.txt'
overwrite into table abc;
SELECT * FROM abc;
diff --git a/ql/src/test/queries/clientpositive/custom_input_output_format.q b/ql/src/test/queries/clientpositive/custom_input_output_format.q
index ff5e86d..d769d05 100644
--- a/ql/src/test/queries/clientpositive/custom_input_output_format.q
+++ b/ql/src/test/queries/clientpositive/custom_input_output_format.q
@@ -1,6 +1,7 @@
-ADD JAR ../build/ql/test/test-udfs.jar;
CREATE TABLE src1_rot13_iof(key STRING, value STRING)
STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.udf.Rot13InputFormat'
OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.udf.Rot13OutputFormat';
+DESCRIBE EXTENDED src1_rot13_iof;
+SELECT * FROM src1 ORDER BY key, value;
INSERT OVERWRITE TABLE src1_rot13_iof SELECT * FROM src1;
-SELECT * FROM src1_rot13_iof;
+SELECT * FROM src1_rot13_iof ORDER BY key, value;
diff --git a/ql/src/test/queries/clientpositive/database.q b/ql/src/test/queries/clientpositive/database.q
index 9140a42..e3ceacc 100644
--- a/ql/src/test/queries/clientpositive/database.q
+++ b/ql/src/test/queries/clientpositive/database.q
@@ -58,7 +58,7 @@
DESCRIBE EXTENDED test_table_like;
-- LOAD and SELECT
-LOAD DATA LOCAL INPATH '../data/files/test.dat'
+LOAD DATA LOCAL INPATH '../../data/files/test.dat'
OVERWRITE INTO TABLE test_table;
SELECT * FROM test_table;
@@ -146,7 +146,7 @@
STORED AS TEXTFILE;
-- LOAD into foreign table
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.src;
-- SELECT from foreign table
@@ -158,7 +158,7 @@
STORED AS TEXTFILE;
-- LOAD data into Partitioned foreign table
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.srcpart
PARTITION (ds='2008-04-08', hr='11');
diff --git a/ql/src/test/queries/clientpositive/database_drop.q b/ql/src/test/queries/clientpositive/database_drop.q
index 4e17c7a..0e5fe6f 100644
--- a/ql/src/test/queries/clientpositive/database_drop.q
+++ b/ql/src/test/queries/clientpositive/database_drop.q
@@ -14,7 +14,7 @@
-- add a table, index and view
CREATE TABLE temp_tbl (id INT, name STRING);
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE temp_tbl;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE temp_tbl;
CREATE VIEW temp_tbl_view AS SELECT * FROM temp_tbl;
CREATE INDEX idx1 ON TABLE temp_tbl(id) AS 'COMPACT' with DEFERRED REBUILD;
ALTER INDEX idx1 ON temp_tbl REBUILD;
@@ -23,15 +23,15 @@
dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/dbcascade/temp_tbl2_idx2;
-- add a table, index and view with a different storage location
CREATE TABLE temp_tbl2 (id INT, name STRING) LOCATION 'file:${system:test.tmp.dir}/dbcascade/temp_tbl2';
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' into table temp_tbl2;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' into table temp_tbl2;
CREATE VIEW temp_tbl2_view AS SELECT * FROM temp_tbl2;
CREATE INDEX idx2 ON TABLE temp_tbl2(id) AS 'COMPACT' with DEFERRED REBUILD LOCATION 'file:${system:test.tmp.dir}/dbcascade/temp_tbl2_idx2';
ALTER INDEX idx2 ON temp_tbl2 REBUILD;
-- add a partitioned table, index and view
CREATE TABLE part_tab (id INT, name STRING) PARTITIONED BY (ds string);
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2009-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2009-04-09');
CREATE INDEX idx3 ON TABLE part_tab(id) AS 'COMPACT' with DEFERRED REBUILD;
ALTER INDEX idx3 ON part_tab PARTITION (ds='2008-04-09') REBUILD;
ALTER INDEX idx3 ON part_tab PARTITION (ds='2009-04-09') REBUILD;
@@ -41,8 +41,8 @@
-- add a partitioned table, index and view with a different storage location
CREATE TABLE part_tab2 (id INT, name STRING) PARTITIONED BY (ds string)
LOCATION 'file:${system:test.tmp.dir}/dbcascade/part_tab2';
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2009-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2009-04-09');
CREATE INDEX idx4 ON TABLE part_tab2(id) AS 'COMPACT' with DEFERRED REBUILD
LOCATION 'file:${system:test.tmp.dir}/dbcascade/part_tab2_idx4';
ALTER INDEX idx4 ON part_tab2 PARTITION (ds='2008-04-09') REBUILD;
@@ -56,8 +56,8 @@
CREATE TABLE part_tab3 (id INT, name STRING) PARTITIONED BY (ds string)
LOCATION 'file:${system:test.tmp.dir}/dbcascade/part_tab3';
ALTER TABLE part_tab3 ADD PARTITION (ds='2007-04-09') LOCATION 'file:${system:test.tmp.dir}/dbcascade/part_tab3_p1';
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2009-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2009-04-09');
CREATE INDEX idx5 ON TABLE part_tab3(id) AS 'COMPACT' with DEFERRED REBUILD
LOCATION 'file:${system:test.tmp.dir}/dbcascade/part_tab3_idx5';
ALTER INDEX idx5 ON part_tab3 PARTITION (ds='2008-04-09') REBUILD;
diff --git a/ql/src/test/queries/clientpositive/date_2.q b/ql/src/test/queries/clientpositive/date_2.q
index 0821e01..c5346c8 100644
--- a/ql/src/test/queries/clientpositive/date_2.q
+++ b/ql/src/test/queries/clientpositive/date_2.q
@@ -8,7 +8,7 @@
FL_NUM int
);
-LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_2;
+LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_2;
select fl_date, fl_num from date_2 order by fl_date asc, fl_num desc;
select fl_date, fl_num from date_2 order by fl_date desc, fl_num asc;
diff --git a/ql/src/test/queries/clientpositive/date_join1.q b/ql/src/test/queries/clientpositive/date_join1.q
index a5844b7..34bb8c8 100644
--- a/ql/src/test/queries/clientpositive/date_join1.q
+++ b/ql/src/test/queries/clientpositive/date_join1.q
@@ -8,7 +8,7 @@
FL_NUM int
);
-LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE date_join1;
+LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE date_join1;
-- Note that there are 2 rows with date 2000-11-28, so we should expect 4 rows with that date in the join results
select t1.fl_num, t1.fl_date, t2.fl_num, t2.fl_date
diff --git a/ql/src/test/queries/clientpositive/date_serde.q b/ql/src/test/queries/clientpositive/date_serde.q
index ffc06d2..24b4820 100644
--- a/ql/src/test/queries/clientpositive/date_serde.q
+++ b/ql/src/test/queries/clientpositive/date_serde.q
@@ -22,7 +22,7 @@
)
stored as textfile;
-load data local inpath '../data/files/flights_tiny.txt.1' overwrite into table date_serde_regex;
+load data local inpath '../../data/files/flights_tiny.txt.1' overwrite into table date_serde_regex;
select * from date_serde_regex;
select fl_date, count(*) from date_serde_regex group by fl_date;
diff --git a/ql/src/test/queries/clientpositive/date_udf.q b/ql/src/test/queries/clientpositive/date_udf.q
index 9696320..c55b9f9 100644
--- a/ql/src/test/queries/clientpositive/date_udf.q
+++ b/ql/src/test/queries/clientpositive/date_udf.q
@@ -17,7 +17,7 @@
ARR_DELAY float,
FL_NUM int
);
-LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight;
+LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight;
-- Test UDFs with date input
select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
diff --git a/ql/src/test/queries/clientpositive/decimal_3.q b/ql/src/test/queries/clientpositive/decimal_3.q
index 0cdc4c9..d7e46a6 100644
--- a/ql/src/test/queries/clientpositive/decimal_3.q
+++ b/ql/src/test/queries/clientpositive/decimal_3.q
@@ -5,7 +5,7 @@
FIELDS TERMINATED BY ' '
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_3;
+LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3;
SELECT * FROM DECIMAL_3 ORDER BY key, value;
diff --git a/ql/src/test/queries/clientpositive/decimal_4.q b/ql/src/test/queries/clientpositive/decimal_4.q
index 0c18488..699ba3c 100644
--- a/ql/src/test/queries/clientpositive/decimal_4.q
+++ b/ql/src/test/queries/clientpositive/decimal_4.q
@@ -9,7 +9,7 @@
CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25))
STORED AS ORC;
-LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_4_1;
+LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1;
INSERT OVERWRITE TABLE DECIMAL_4_2 SELECT key, key * 3 FROM DECIMAL_4_1;
diff --git a/ql/src/test/queries/clientpositive/decimal_5.q b/ql/src/test/queries/clientpositive/decimal_5.q
index ecf9376..70e5db0 100644
--- a/ql/src/test/queries/clientpositive/decimal_5.q
+++ b/ql/src/test/queries/clientpositive/decimal_5.q
@@ -5,7 +5,7 @@
FIELDS TERMINATED BY ' '
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_5;
+LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5;
SELECT key FROM DECIMAL_5 ORDER BY key;
diff --git a/ql/src/test/queries/clientpositive/decimal_6.q b/ql/src/test/queries/clientpositive/decimal_6.q
index c1135f1..b58e224 100644
--- a/ql/src/test/queries/clientpositive/decimal_6.q
+++ b/ql/src/test/queries/clientpositive/decimal_6.q
@@ -12,8 +12,8 @@
FIELDS TERMINATED BY ' '
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv9.txt' INTO TABLE DECIMAL_6_1;
-LOAD DATA LOCAL INPATH '../data/files/kv9.txt' INTO TABLE DECIMAL_6_2;
+LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1;
+LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2;
SELECT T.key from (
SELECT key, value from DECIMAL_6_1
diff --git a/ql/src/test/queries/clientpositive/decimal_join.q b/ql/src/test/queries/clientpositive/decimal_join.q
index 26fa6c3..86c14d9 100644
--- a/ql/src/test/queries/clientpositive/decimal_join.q
+++ b/ql/src/test/queries/clientpositive/decimal_join.q
@@ -1,6 +1,6 @@
-- HIVE-5292 Join on decimal columns fails
create table src_dec (key decimal(3,0), value string);
-load data local inpath '../data/files/kv1.txt' into table src_dec;
+load data local inpath '../../data/files/kv1.txt' into table src_dec;
select * from src_dec a join src_dec b on a.key=b.key+450;
diff --git a/ql/src/test/queries/clientpositive/decimal_precision.q b/ql/src/test/queries/clientpositive/decimal_precision.q
index 1c81379..da09c6c 100644
--- a/ql/src/test/queries/clientpositive/decimal_precision.q
+++ b/ql/src/test/queries/clientpositive/decimal_precision.q
@@ -5,7 +5,7 @@
FIELDS TERMINATED BY ' '
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION;
+LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION;
SELECT * FROM DECIMAL_PRECISION ORDER BY dec;
diff --git a/ql/src/test/queries/clientpositive/decimal_serde.q b/ql/src/test/queries/clientpositive/decimal_serde.q
index 3556807..cf3a86c 100644
--- a/ql/src/test/queries/clientpositive/decimal_serde.q
+++ b/ql/src/test/queries/clientpositive/decimal_serde.q
@@ -8,7 +8,7 @@
FIELDS TERMINATED BY ' '
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT;
+LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT;
SELECT * FROM DECIMAL_TEXT ORDER BY key, value;
diff --git a/ql/src/test/queries/clientpositive/decimal_udf.q b/ql/src/test/queries/clientpositive/decimal_udf.q
index 06d1785..f1ea686 100644
--- a/ql/src/test/queries/clientpositive/decimal_udf.q
+++ b/ql/src/test/queries/clientpositive/decimal_udf.q
@@ -5,7 +5,7 @@
FIELDS TERMINATED BY ' '
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_UDF;
+LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF;
-- addition
EXPLAIN SELECT key + key FROM DECIMAL_UDF;
diff --git a/ql/src/test/queries/clientpositive/delimiter.q b/ql/src/test/queries/clientpositive/delimiter.q
index 112ac57..14d508c 100644
--- a/ql/src/test/queries/clientpositive/delimiter.q
+++ b/ql/src/test/queries/clientpositive/delimiter.q
@@ -3,7 +3,7 @@
fields terminated by '\t'
lines terminated by '\n'
stored as textfile;
-LOAD DATA LOCAL INPATH '../data/files/in7.txt' INTO TABLE impressions;
+LOAD DATA LOCAL INPATH '../../data/files/in7.txt' INTO TABLE impressions;
select * from impressions;
diff --git a/ql/src/test/queries/clientpositive/disable_file_format_check.q b/ql/src/test/queries/clientpositive/disable_file_format_check.q
index 6ea4156..81a5b3a 100644
--- a/ql/src/test/queries/clientpositive/disable_file_format_check.q
+++ b/ql/src/test/queries/clientpositive/disable_file_format_check.q
@@ -1,9 +1,9 @@
set hive.fileformat.check = false;
create table kv_fileformat_check_txt (key string, value string) stored as textfile;
-load data local inpath '../data/files/kv1.seq' overwrite into table kv_fileformat_check_txt;
+load data local inpath '../../data/files/kv1.seq' overwrite into table kv_fileformat_check_txt;
create table kv_fileformat_check_seq (key string, value string) stored as sequencefile;
-load data local inpath '../data/files/kv1.txt' overwrite into table kv_fileformat_check_seq;
+load data local inpath '../../data/files/kv1.txt' overwrite into table kv_fileformat_check_seq;
diff --git a/ql/src/test/queries/clientpositive/escape1.q b/ql/src/test/queries/clientpositive/escape1.q
index a7f4cf7..d29a7a8 100644
--- a/ql/src/test/queries/clientpositive/escape1.q
+++ b/ql/src/test/queries/clientpositive/escape1.q
@@ -8,7 +8,7 @@
DROP TABLE escape_raw;
CREATE TABLE escape_raw (s STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/escapetest.txt' INTO TABLE escape_raw;
+LOAD DATA LOCAL INPATH '../../data/files/escapetest.txt' INTO TABLE escape_raw;
SELECT count(*) from escape_raw;
SELECT * from escape_raw;
diff --git a/ql/src/test/queries/clientpositive/escape2.q b/ql/src/test/queries/clientpositive/escape2.q
index 473cbf8..2460134 100644
--- a/ql/src/test/queries/clientpositive/escape2.q
+++ b/ql/src/test/queries/clientpositive/escape2.q
@@ -10,7 +10,7 @@
DROP TABLE IF EXISTS escape_raw;
CREATE TABLE escape_raw (s STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/escapetest.txt' INTO TABLE escape_raw;
+LOAD DATA LOCAL INPATH '../../data/files/escapetest.txt' INTO TABLE escape_raw;
SELECT count(*) from escape_raw;
SELECT * from escape_raw;
diff --git a/ql/src/test/queries/clientpositive/exim_00_nonpart_empty.q b/ql/src/test/queries/clientpositive/exim_00_nonpart_empty.q
index 7fa96b6..6f9a481 100644
--- a/ql/src/test/queries/clientpositive/exim_00_nonpart_empty.q
+++ b/ql/src/test/queries/clientpositive/exim_00_nonpart_empty.q
@@ -5,8 +5,8 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -16,7 +16,7 @@
import from 'ql/test/data/exports/exim_department';
describe extended exim_department;
show table extended like exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
select * from exim_department;
drop table exim_department;
diff --git a/ql/src/test/queries/clientpositive/exim_01_nonpart.q b/ql/src/test/queries/clientpositive/exim_01_nonpart.q
index 9920e77..1e2eed8 100644
--- a/ql/src/test/queries/clientpositive/exim_01_nonpart.q
+++ b/ql/src/test/queries/clientpositive/exim_01_nonpart.q
@@ -5,9 +5,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -17,7 +17,7 @@
import from 'ql/test/data/exports/exim_department';
describe extended exim_department;
show table extended like exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
select * from exim_department;
drop table exim_department;
diff --git a/ql/src/test/queries/clientpositive/exim_02_00_part_empty.q b/ql/src/test/queries/clientpositive/exim_02_00_part_empty.q
index 4017c83..474a5a4 100644
--- a/ql/src/test/queries/clientpositive/exim_02_00_part_empty.q
+++ b/ql/src/test/queries/clientpositive/exim_02_00_part_empty.q
@@ -7,8 +7,8 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -18,7 +18,7 @@
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
show table extended like exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_02_part.q b/ql/src/test/queries/clientpositive/exim_02_part.q
index 21138f0..dbd2c6b 100644
--- a/ql/src/test/queries/clientpositive/exim_02_part.q
+++ b/ql/src/test/queries/clientpositive/exim_02_part.q
@@ -7,10 +7,10 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -20,7 +20,7 @@
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
show table extended like exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q b/ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q
index 5f6bdee..47d949a 100644
--- a/ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q
+++ b/ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q
@@ -5,9 +5,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -21,6 +21,6 @@
describe extended exim_department;
select * from exim_department;
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientpositive/exim_04_all_part.q b/ql/src/test/queries/clientpositive/exim_04_all_part.q
index 69c6faa..b2567fb 100644
--- a/ql/src/test/queries/clientpositive/exim_04_all_part.q
+++ b/ql/src/test/queries/clientpositive/exim_04_all_part.q
@@ -7,16 +7,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -26,7 +26,7 @@
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
show table extended like exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q b/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q
index cdc02fa..82df698 100644
--- a/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q
+++ b/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q
@@ -19,8 +19,8 @@
outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat";
alter table exim_employee add partition (emp_country='in', emp_state='ka');
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -32,7 +32,7 @@
describe extended exim_employee partition (emp_country='in', emp_state='tn');
describe extended exim_employee partition (emp_country='in', emp_state='ka');
show table extended like exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_05_some_part.q b/ql/src/test/queries/clientpositive/exim_05_some_part.q
index 50a5946..a2c9773 100644
--- a/ql/src/test/queries/clientpositive/exim_05_some_part.q
+++ b/ql/src/test/queries/clientpositive/exim_05_some_part.q
@@ -7,16 +7,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee partition (emp_state="ka") to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -26,7 +26,7 @@
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
show table extended like exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_06_one_part.q b/ql/src/test/queries/clientpositive/exim_06_one_part.q
index 5136090..3a61296 100644
--- a/ql/src/test/queries/clientpositive/exim_06_one_part.q
+++ b/ql/src/test/queries/clientpositive/exim_06_one_part.q
@@ -7,16 +7,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee partition (emp_country="in",emp_state="ka") to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -26,7 +26,7 @@
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
show table extended like exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q b/ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q
index 5b9d4dd..8c774d5 100644
--- a/ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q
+++ b/ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q
@@ -7,16 +7,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -28,12 +28,12 @@
partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
stored as textfile
tblproperties("maker"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="al");
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
select * from exim_employee;
drop table exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q b/ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q
index 173f156..8a1d945 100644
--- a/ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q
+++ b/ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q
@@ -5,9 +5,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -17,12 +17,12 @@
partitioned by (emp_org string)
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr");
+load data local inpath "../../data/files/test.dat" into table exim_department partition (emp_org="hr");
import table exim_imported_dept from 'ql/test/data/exports/exim_department';
describe extended exim_imported_dept;
select * from exim_imported_dept;
drop table exim_imported_dept;
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q b/ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q
index 178b766..53fc293 100644
--- a/ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q
+++ b/ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q
@@ -7,16 +7,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -27,14 +27,14 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
import table exim_employee partition (emp_country="us", emp_state="tn") from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
select * from exim_employee;
drop table exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientpositive/exim_10_external_managed.q b/ql/src/test/queries/clientpositive/exim_10_external_managed.q
index 413f2aa..54859ee 100644
--- a/ql/src/test/queries/clientpositive/exim_10_external_managed.q
+++ b/ql/src/test/queries/clientpositive/exim_10_external_managed.q
@@ -2,18 +2,18 @@
set hive.test.mode.prefix=;
set hive.test.mode.nosamplelist=exim_department,exim_employee;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_department/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
create external table exim_department ( dep_id int comment "department id")
stored as textfile
location 'ql/test/data/tablestore/exim_department'
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
create database importer;
use importer;
@@ -22,6 +22,6 @@
describe extended exim_department;
select * from exim_department;
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
diff --git a/ql/src/test/queries/clientpositive/exim_11_managed_external.q b/ql/src/test/queries/clientpositive/exim_11_managed_external.q
index f3b2896..4fc39dc 100644
--- a/ql/src/test/queries/clientpositive/exim_11_managed_external.q
+++ b/ql/src/test/queries/clientpositive/exim_11_managed_external.q
@@ -5,9 +5,9 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -17,7 +17,7 @@
import external table exim_department from 'ql/test/data/exports/exim_department';
describe extended exim_department;
select * from exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
select * from exim_department;
drop table exim_department;
diff --git a/ql/src/test/queries/clientpositive/exim_12_external_location.q b/ql/src/test/queries/clientpositive/exim_12_external_location.q
index 37d0634..e4d50ff 100644
--- a/ql/src/test/queries/clientpositive/exim_12_external_location.q
+++ b/ql/src/test/queries/clientpositive/exim_12_external_location.q
@@ -5,24 +5,24 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/ql/test/data/exports/exim_department/temp;
+dfs -rmr ${system:test.tmp.dir}/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_department/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/ql/test/data/tablestore/exim_department/temp;
+dfs -rmr ${system:test.tmp.dir}/ql/test/data/tablestore/exim_department;
import external table exim_department from 'ql/test/data/exports/exim_department'
location 'ql/test/data/tablestore/exim_department';
describe extended exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr ${system:test.tmp.dir}/ql/test/data/exports/exim_department;
select * from exim_department;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs -rmr ${system:test.tmp.dir}/ql/test/data/tablestore/exim_department;
select * from exim_department;
drop table exim_department;
diff --git a/ql/src/test/queries/clientpositive/exim_13_managed_location.q b/ql/src/test/queries/clientpositive/exim_13_managed_location.q
index fb5058b..909d237 100644
--- a/ql/src/test/queries/clientpositive/exim_13_managed_location.q
+++ b/ql/src/test/queries/clientpositive/exim_13_managed_location.q
@@ -5,24 +5,24 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_department/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
import table exim_department from 'ql/test/data/exports/exim_department'
location 'ql/test/data/tablestore/exim_department';
describe extended exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
select * from exim_department;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
select * from exim_department;
drop table exim_department;
diff --git a/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q b/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
index 031b6bd..dbb5fd9 100644
--- a/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
+++ b/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
@@ -5,17 +5,17 @@
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_department/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
create table exim_department ( dep_id int comment "department id")
stored as textfile
@@ -24,9 +24,9 @@
import table exim_department from 'ql/test/data/exports/exim_department'
location 'ql/test/data/tablestore/exim_department';
describe extended exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
select * from exim_department;
-dfs -rmr ../build/ql/test/data/tablestore/exim_department;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_department;
select * from exim_department;
drop table exim_department;
diff --git a/ql/src/test/queries/clientpositive/exim_15_external_part.q b/ql/src/test/queries/clientpositive/exim_15_external_part.q
index ff088c7..989dd6c 100644
--- a/ql/src/test/queries/clientpositive/exim_15_external_part.q
+++ b/ql/src/test/queries/clientpositive/exim_15_external_part.q
@@ -7,24 +7,24 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
create external table exim_employee ( emp_id int comment "employee id")
comment "employee table"
@@ -32,17 +32,17 @@
stored as textfile
location 'ql/test/data/tablestore/exim_employee'
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
import external table exim_employee partition (emp_country="us", emp_state="tn")
from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_16_part_external.q b/ql/src/test/queries/clientpositive/exim_16_part_external.q
index 6f4ee7a..7eec358 100644
--- a/ql/src/test/queries/clientpositive/exim_16_part_external.q
+++ b/ql/src/test/queries/clientpositive/exim_16_part_external.q
@@ -7,26 +7,26 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore2/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/tablestore2/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore2/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore2/exim_employee;
create external table exim_employee ( emp_id int comment "employee id")
comment "employee table"
@@ -39,11 +39,11 @@
location 'ql/test/data/tablestore/exim_employee';
show table extended like exim_employee;
show table extended like exim_employee partition (emp_country="us", emp_state="tn");
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
select * from exim_employee;
drop table exim_employee;
-dfs -rmr ../build/ql/test/data/tablestore2/exim_employee;
+dfs -rmr target/tmp/ql/test/data/tablestore2/exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientpositive/exim_17_part_managed.q b/ql/src/test/queries/clientpositive/exim_17_part_managed.q
index 56ec152..20cd7e0 100644
--- a/ql/src/test/queries/clientpositive/exim_17_part_managed.q
+++ b/ql/src/test/queries/clientpositive/exim_17_part_managed.q
@@ -7,24 +7,24 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
create table exim_employee ( emp_id int comment "employee id")
comment "employee table"
@@ -39,9 +39,9 @@
show table extended like exim_employee;
show table extended like exim_employee partition (emp_country="us", emp_state="tn");
show table extended like exim_employee partition (emp_country="us", emp_state="ap");
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_18_part_external.q b/ql/src/test/queries/clientpositive/exim_18_part_external.q
index 7aa1297..a300b1d 100644
--- a/ql/src/test/queries/clientpositive/exim_18_part_external.q
+++ b/ql/src/test/queries/clientpositive/exim_18_part_external.q
@@ -7,16 +7,16 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -29,7 +29,7 @@
show table extended like exim_employee;
show table extended like exim_employee partition (emp_country="us", emp_state="tn");
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q b/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q
index cb9f8ef..a821c75 100644
--- a/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q
+++ b/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q
@@ -7,20 +7,20 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test2.dat"
+load data local inpath "../../data/files/test2.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
import external table exim_employee
from 'ql/test/data/exports/exim_employee'
@@ -29,9 +29,9 @@
show table extended like exim_employee;
show table extended like exim_employee partition (emp_country="in", emp_state="tn");
show table extended like exim_employee partition (emp_country="in", emp_state="ka");
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_19_part_external_location.q b/ql/src/test/queries/clientpositive/exim_19_part_external_location.q
index bdbd19d..be12164 100644
--- a/ql/src/test/queries/clientpositive/exim_19_part_external_location.q
+++ b/ql/src/test/queries/clientpositive/exim_19_part_external_location.q
@@ -7,24 +7,24 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
import external table exim_employee partition (emp_country="us", emp_state="tn")
from 'ql/test/data/exports/exim_employee'
@@ -32,9 +32,9 @@
describe extended exim_employee;
show table extended like exim_employee;
show table extended like exim_employee partition (emp_country="us", emp_state="tn");
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q b/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
index eb44961..000904aa 100644
--- a/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
+++ b/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
@@ -7,24 +7,24 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
create database importer;
use importer;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/tablestore/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
import table exim_employee partition (emp_country="us", emp_state="tn")
from 'ql/test/data/exports/exim_employee'
@@ -32,9 +32,9 @@
describe extended exim_employee;
show table extended like exim_employee;
show table extended like exim_employee partition (emp_country="us", emp_state="tn");
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
+dfs -rmr target/tmp/ql/test/data/tablestore/exim_employee;
select * from exim_employee;
drop table exim_employee;
diff --git a/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q b/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q
index 822ed70..293a011 100644
--- a/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q
+++ b/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q
@@ -2,13 +2,13 @@
set hive.test.mode.prefix=;
create table exim_department ( dep_id int) stored as textfile;
-load data local inpath "../data/files/test.dat" into table exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
set hive.security.authorization.enabled=true;
grant Select on table exim_department to user hive_test_user;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
set hive.security.authorization.enabled=false;
diff --git a/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q b/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q
index 440d08d..03714ab 100644
--- a/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q
+++ b/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q
@@ -3,9 +3,9 @@
set hive.test.mode.nosamplelist=exim_department,exim_employee;
create table exim_department ( dep_id int) stored as textfile;
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -22,5 +22,5 @@
select * from exim_department;
drop table exim_department;
drop database importer;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
diff --git a/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q b/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q
index 30fc343..cb6af0e 100644
--- a/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q
+++ b/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q
@@ -7,10 +7,10 @@
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -29,6 +29,6 @@
set hive.security.authorization.enabled=false;
select * from exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop table exim_employee;
drop database importer;
diff --git a/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q b/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q
index 2dc5af6..8934c47 100644
--- a/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q
+++ b/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q
@@ -3,9 +3,9 @@
set hive.test.mode.nosamplelist=exim_department,exim_employee;
create table exim_department ( dep_id int) stored as textfile;
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/test;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/test;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -20,5 +20,5 @@
select * from exim_department;
drop table exim_department;
drop database importer;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
diff --git a/ql/src/test/queries/clientpositive/global_limit.q b/ql/src/test/queries/clientpositive/global_limit.q
index b76cf34..c8a08af 100644
--- a/ql/src/test/queries/clientpositive/global_limit.q
+++ b/ql/src/test/queries/clientpositive/global_limit.q
@@ -8,9 +8,9 @@
create table gl_src1 (key int, value string) stored as textfile;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1;
@@ -49,10 +49,10 @@
-- partition
create table gl_src_part1 (key int, value string) partitioned by (p string) stored as textfile;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE gl_src_part1 partition(p='11');
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12');
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12');
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE gl_src_part1 partition(p='11');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12');
select key from gl_src_part1 where p like '1%' ORDER BY key ASC limit 10;
select key from gl_src_part1 where p='11' ORDER BY key ASC limit 10;
diff --git a/ql/src/test/queries/clientpositive/groupby10.q b/ql/src/test/queries/clientpositive/groupby10.q
index db38d43..7750cb9 100644
--- a/ql/src/test/queries/clientpositive/groupby10.q
+++ b/ql/src/test/queries/clientpositive/groupby10.q
@@ -6,7 +6,7 @@
CREATE TABLE dest2(key INT, val1 INT, val2 INT);
CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv5.txt' INTO TABLE INPUT;
+LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT;
EXPLAIN
FROM INPUT
diff --git a/ql/src/test/queries/clientpositive/groupby_bigdata.q b/ql/src/test/queries/clientpositive/groupby_bigdata.q
index 7e97f75..2e3eddc 100644
--- a/ql/src/test/queries/clientpositive/groupby_bigdata.q
+++ b/ql/src/test/queries/clientpositive/groupby_bigdata.q
@@ -1,7 +1,7 @@
set hive.map.aggr.hash.percentmemory = 0.3;
set hive.mapred.local.mem = 384;
-add file ../data/scripts/dumpdata_script.py;
+add file ../../data/scripts/dumpdata_script.py;
select count(distinct subq.key) from
(FROM src MAP src.key USING 'python dumpdata_script.py' AS key WHERE src.key = 10) subq;
diff --git a/ql/src/test/queries/clientpositive/groupby_cube1.q b/ql/src/test/queries/clientpositive/groupby_cube1.q
index 46e1f00..099beb4 100644
--- a/ql/src/test/queries/clientpositive/groupby_cube1.q
+++ b/ql/src/test/queries/clientpositive/groupby_cube1.q
@@ -3,7 +3,7 @@
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
EXPLAIN
SELECT key, val, count(1) FROM T1 GROUP BY key, val with cube;
diff --git a/ql/src/test/queries/clientpositive/groupby_grouping_id1.q b/ql/src/test/queries/clientpositive/groupby_grouping_id1.q
index bced21f..de4a7c3 100644
--- a/ql/src/test/queries/clientpositive/groupby_grouping_id1.q
+++ b/ql/src/test/queries/clientpositive/groupby_grouping_id1.q
@@ -1,6 +1,6 @@
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
SELECT key, val, GROUPING__ID from T1 group by key, val with cube;
diff --git a/ql/src/test/queries/clientpositive/groupby_grouping_id2.q b/ql/src/test/queries/clientpositive/groupby_grouping_id2.q
index ffc627c..f451f17 100644
--- a/ql/src/test/queries/clientpositive/groupby_grouping_id2.q
+++ b/ql/src/test/queries/clientpositive/groupby_grouping_id2.q
@@ -1,6 +1,6 @@
CREATE TABLE T1(key INT, value INT) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/groupby_groupingid.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1;
set hive.groupby.skewindata = true;
diff --git a/ql/src/test/queries/clientpositive/groupby_grouping_sets1.q b/ql/src/test/queries/clientpositive/groupby_grouping_sets1.q
index 4fba733..804dfb3 100644
--- a/ql/src/test/queries/clientpositive/groupby_grouping_sets1.q
+++ b/ql/src/test/queries/clientpositive/groupby_grouping_sets1.q
@@ -1,6 +1,6 @@
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1;
SELECT * FROM T1;
diff --git a/ql/src/test/queries/clientpositive/groupby_grouping_sets2.q b/ql/src/test/queries/clientpositive/groupby_grouping_sets2.q
index 9f2286c..30f1b42 100644
--- a/ql/src/test/queries/clientpositive/groupby_grouping_sets2.q
+++ b/ql/src/test/queries/clientpositive/groupby_grouping_sets2.q
@@ -2,7 +2,7 @@
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1;
-- Since 4 grouping sets would be generated for the query below, an additional MR job should be created
EXPLAIN
diff --git a/ql/src/test/queries/clientpositive/groupby_grouping_sets3.q b/ql/src/test/queries/clientpositive/groupby_grouping_sets3.q
index 9a00d0a..7077377 100644
--- a/ql/src/test/queries/clientpositive/groupby_grouping_sets3.q
+++ b/ql/src/test/queries/clientpositive/groupby_grouping_sets3.q
@@ -4,8 +4,8 @@
-- additional MR job is created for processing the grouping sets.
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/grouping_sets1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/grouping_sets2.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/grouping_sets1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/grouping_sets2.txt' INTO TABLE T1;
set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
set hive.new.job.grouping.set.cardinality = 30;
diff --git a/ql/src/test/queries/clientpositive/groupby_grouping_sets4.q b/ql/src/test/queries/clientpositive/groupby_grouping_sets4.q
index 25f1fcd..ff83185 100644
--- a/ql/src/test/queries/clientpositive/groupby_grouping_sets4.q
+++ b/ql/src/test/queries/clientpositive/groupby_grouping_sets4.q
@@ -4,7 +4,7 @@
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1;
-- This tests that cubes and rollups work fine inside sub-queries.
EXPLAIN
diff --git a/ql/src/test/queries/clientpositive/groupby_grouping_sets5.q b/ql/src/test/queries/clientpositive/groupby_grouping_sets5.q
index fb0c591..d94bd81 100644
--- a/ql/src/test/queries/clientpositive/groupby_grouping_sets5.q
+++ b/ql/src/test/queries/clientpositive/groupby_grouping_sets5.q
@@ -4,7 +4,7 @@
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1;
-- This tests that cubes and rollups work fine where the source is a sub-query
EXPLAIN
diff --git a/ql/src/test/queries/clientpositive/groupby_rollup1.q b/ql/src/test/queries/clientpositive/groupby_rollup1.q
index f79b0c4..ee8038c 100644
--- a/ql/src/test/queries/clientpositive/groupby_rollup1.q
+++ b/ql/src/test/queries/clientpositive/groupby_rollup1.q
@@ -3,7 +3,7 @@
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
EXPLAIN
SELECT key, val, count(1) FROM T1 GROUP BY key, val with rollup;
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_1.q b/ql/src/test/queries/clientpositive/groupby_sort_1.q
index 911a11a..7401a9c 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_1.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_1.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_2.q b/ql/src/test/queries/clientpositive/groupby_sort_2.q
index 31b4ec5..700a8af 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_2.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_2.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (key) SORTED BY (val) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_3.q b/ql/src/test/queries/clientpositive/groupby_sort_3.q
index 103c57a..2ef8447 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_3.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_3.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_4.q b/ql/src/test/queries/clientpositive/groupby_sort_4.q
index e43da3c..3c959e3 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_4.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_4.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (key, val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_5.q b/ql/src/test/queries/clientpositive/groupby_sort_5.q
index bef5e5d..dd05238 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_5.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_5.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
@@ -30,7 +30,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (val, key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
@@ -52,7 +52,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_6.q b/ql/src/test/queries/clientpositive/groupby_sort_6.q
index cf076e8..aa09aec 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_6.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_6.q
@@ -17,7 +17,7 @@
SELECT * FROM outputTbl1 ORDER BY key;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='2');
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='2');
-- The plan should not be converted to a map-side group since no partition is being accessed
EXPLAIN EXTENDED
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_7.q b/ql/src/test/queries/clientpositive/groupby_sort_7.q
index c2d4215..9933785 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_7.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_7.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING) PARTITIONED BY (ds string)
CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1');
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1');
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 PARTITION (ds='1') select key, val from T1 where ds = '1';
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_8.q b/ql/src/test/queries/clientpositive/groupby_sort_8.q
index 121804e..f53295e 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_8.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_8.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING) PARTITIONED BY (ds string)
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1');
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1');
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 PARTITION (ds='1') select key, val from T1 where ds = '1';
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_9.q b/ql/src/test/queries/clientpositive/groupby_sort_9.q
index 1c3d1cd..296336d 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_9.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_9.q
@@ -6,7 +6,7 @@
CREATE TABLE T1(key STRING, val STRING) PARTITIONED BY (ds string)
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1');
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1');
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 PARTITION (ds='1') select key, val from T1 where ds = '1';
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q b/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q
index 068c26a9..db0faa0 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q
@@ -7,7 +7,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
diff --git a/ql/src/test/queries/clientpositive/groupby_sort_test_1.q b/ql/src/test/queries/clientpositive/groupby_sort_test_1.q
index 8efa05e..4ec138e 100644
--- a/ql/src/test/queries/clientpositive/groupby_sort_test_1.q
+++ b/ql/src/test/queries/clientpositive/groupby_sort_test_1.q
@@ -7,7 +7,7 @@
CREATE TABLE T1(key STRING, val STRING)
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-- perform an insert to make sure there are 2 files
INSERT OVERWRITE TABLE T1 select key, val from T1;
diff --git a/ql/src/test/queries/clientpositive/import_exported_table.q b/ql/src/test/queries/clientpositive/import_exported_table.q
index 6d61a4a..9eb185a 100644
--- a/ql/src/test/queries/clientpositive/import_exported_table.q
+++ b/ql/src/test/queries/clientpositive/import_exported_table.q
@@ -1,6 +1,6 @@
dfs ${system:test.dfs.mkdir} hdfs:///tmp/test/;
-dfs -copyFromLocal ../data/files/exported_table hdfs:///tmp/test/;
+dfs -copyFromLocal ../../data/files/exported_table hdfs:///tmp/test/;
IMPORT FROM '/tmp/test/exported_table';
DESCRIBE j1_41;
diff --git a/ql/src/test/queries/clientpositive/index_serde.q b/ql/src/test/queries/clientpositive/index_serde.q
index a6fe16b..d160a6f 100644
--- a/ql/src/test/queries/clientpositive/index_serde.q
+++ b/ql/src/test/queries/clientpositive/index_serde.q
@@ -31,7 +31,7 @@
DESCRIBE doctors;
-LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors;
+LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors;
-- Create and build an index
CREATE INDEX doctors_index ON TABLE doctors(number) AS 'COMPACT' WITH DEFERRED REBUILD;
diff --git a/ql/src/test/queries/clientpositive/infer_bucket_sort_dyn_part.q b/ql/src/test/queries/clientpositive/infer_bucket_sort_dyn_part.q
index 119994e..728b8cc 100644
--- a/ql/src/test/queries/clientpositive/infer_bucket_sort_dyn_part.q
+++ b/ql/src/test/queries/clientpositive/infer_bucket_sort_dyn_part.q
@@ -47,12 +47,12 @@
CREATE TABLE srcpart_merge_dp_rc LIKE srcpart;
ALTER TABLE srcpart_merge_dp_rc SET FILEFORMAT RCFILE;
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
-LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
-LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
-LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11);
-LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=12);
+LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=12);
INSERT OVERWRITE TABLE srcpart_merge_dp_rc PARTITION (ds = '2008-04-08', hr)
SELECT key, value, hr FROM srcpart_merge_dp WHERE ds = '2008-04-08';
diff --git a/ql/src/test/queries/clientpositive/infer_const_type.q b/ql/src/test/queries/clientpositive/infer_const_type.q
index a039dc5..ce5ed84 100644
--- a/ql/src/test/queries/clientpositive/infer_const_type.q
+++ b/ql/src/test/queries/clientpositive/infer_const_type.q
@@ -1,7 +1,7 @@
DROP TABLE infertypes;
CREATE TABLE infertypes(ti TINYINT, si SMALLINT, i INT, bi BIGINT, fl FLOAT, db DOUBLE, str STRING);
-LOAD DATA LOCAL INPATH '../data/files/infer_const_type.txt' OVERWRITE INTO TABLE infertypes;
+LOAD DATA LOCAL INPATH '../../data/files/infer_const_type.txt' OVERWRITE INTO TABLE infertypes;
SELECT * FROM infertypes;
diff --git a/ql/src/test/queries/clientpositive/input13.q b/ql/src/test/queries/clientpositive/input13.q
index 40fbc84..620e9dc 100644
--- a/ql/src/test/queries/clientpositive/input13.q
+++ b/ql/src/test/queries/clientpositive/input13.q
@@ -7,15 +7,15 @@
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
FROM src
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
SELECT dest1.* FROM dest1;
SELECT dest2.* FROM dest2;
SELECT dest3.* FROM dest3;
-dfs -cat ../build/ql/test/data/warehouse/dest4.out/*;
+dfs -cat ${system:test.warehouse.dir}/dest4.out/*;
diff --git a/ql/src/test/queries/clientpositive/input16.q b/ql/src/test/queries/clientpositive/input16.q
index 7604f02..4990d0b 100644
--- a/ql/src/test/queries/clientpositive/input16.q
+++ b/ql/src/test/queries/clientpositive/input16.q
@@ -1,6 +1,6 @@
-- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
DROP TABLE INPUT16;
-ADD JAR ../build/ql/test/TestSerDe.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16;
+LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT16;
SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16;
diff --git a/ql/src/test/queries/clientpositive/input16_cc.q b/ql/src/test/queries/clientpositive/input16_cc.q
index fc4a7c4..9272a92 100644
--- a/ql/src/test/queries/clientpositive/input16_cc.q
+++ b/ql/src/test/queries/clientpositive/input16_cc.q
@@ -4,8 +4,8 @@
-- the user is overwriting it with ctrlC
DROP TABLE INPUT16_CC;
-ADD JAR ../build/ql/test/TestSerDe.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC;
+LOAD DATA LOCAL INPATH '../../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC;
SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC;
diff --git a/ql/src/test/queries/clientpositive/input19.q b/ql/src/test/queries/clientpositive/input19.q
index fec44e9..3dc7fec 100644
--- a/ql/src/test/queries/clientpositive/input19.q
+++ b/ql/src/test/queries/clientpositive/input19.q
@@ -1,5 +1,5 @@
create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES ( 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol', 'quote.delim'= '("|\\[|\\])', 'field.delim'=' ', 'serialization.null.format'='-' ) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/apache.access.log' INTO TABLE apachelog;
+LOAD DATA LOCAL INPATH '../../data/files/apache.access.log' INTO TABLE apachelog;
SELECT a.* FROM apachelog a;
diff --git a/ql/src/test/queries/clientpositive/input20.q b/ql/src/test/queries/clientpositive/input20.q
index 0566ab1..0ea9fda8 100644
--- a/ql/src/test/queries/clientpositive/input20.q
+++ b/ql/src/test/queries/clientpositive/input20.q
@@ -1,6 +1,6 @@
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-ADD FILE ../data/scripts/input20_script;
+ADD FILE ../../data/scripts/input20_script;
EXPLAIN
FROM (
diff --git a/ql/src/test/queries/clientpositive/input21.q b/ql/src/test/queries/clientpositive/input21.q
index d7c814e..43cd01e 100644
--- a/ql/src/test/queries/clientpositive/input21.q
+++ b/ql/src/test/queries/clientpositive/input21.q
@@ -1,7 +1,7 @@
CREATE TABLE src_null(a STRING, b STRING, c STRING, d STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE src_null;
+LOAD DATA LOCAL INPATH '../../data/files/null.txt' INTO TABLE src_null;
EXPLAIN SELECT * FROM src_null DISTRIBUTE BY c SORT BY d;
diff --git a/ql/src/test/queries/clientpositive/input22.q b/ql/src/test/queries/clientpositive/input22.q
index 853947b..8803e4d 100644
--- a/ql/src/test/queries/clientpositive/input22.q
+++ b/ql/src/test/queries/clientpositive/input22.q
@@ -1,5 +1,5 @@
CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4;
EXPLAIN
SELECT a.KEY2
diff --git a/ql/src/test/queries/clientpositive/input33.q b/ql/src/test/queries/clientpositive/input33.q
index 7ab1751..7eae921 100644
--- a/ql/src/test/queries/clientpositive/input33.q
+++ b/ql/src/test/queries/clientpositive/input33.q
@@ -1,6 +1,6 @@
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-ADD FILE ../data/scripts/input20_script;
+ADD FILE ../../data/scripts/input20_script;
EXPLAIN
FROM (
diff --git a/ql/src/test/queries/clientpositive/input37.q b/ql/src/test/queries/clientpositive/input37.q
index 6fd136a..6ded61a 100644
--- a/ql/src/test/queries/clientpositive/input37.q
+++ b/ql/src/test/queries/clientpositive/input37.q
@@ -1,6 +1,6 @@
create table documents(contents string) stored as textfile;
-LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE documents;
+LOAD DATA LOCAL INPATH '../../data/files/docurl.txt' INTO TABLE documents;
select url, count(1)
@@ -8,7 +8,7 @@
(
FROM documents
MAP documents.contents
- USING 'java -cp ../build/ql/test/classes org.apache.hadoop.hive.scripts.extracturl' AS (url, count)
+ USING 'java -cp ../util/target/classes/ org.apache.hadoop.hive.scripts.extracturl' AS (url, count)
) subq
group by url;
diff --git a/ql/src/test/queries/clientpositive/input3_limit.q b/ql/src/test/queries/clientpositive/input3_limit.q
index 3584820..f983aca 100644
--- a/ql/src/test/queries/clientpositive/input3_limit.q
+++ b/ql/src/test/queries/clientpositive/input3_limit.q
@@ -1,7 +1,7 @@
CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, value STRING);
diff --git a/ql/src/test/queries/clientpositive/input4.q b/ql/src/test/queries/clientpositive/input4.q
index 08d6d97..1186bbb 100644
--- a/ql/src/test/queries/clientpositive/input4.q
+++ b/ql/src/test/queries/clientpositive/input4.q
@@ -1,7 +1,7 @@
CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE;
EXPLAIN
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4;
EXPLAIN FORMATTED
SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias;
SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
diff --git a/ql/src/test/queries/clientpositive/input40.q b/ql/src/test/queries/clientpositive/input40.q
index 4166cb5..ab187b5 100644
--- a/ql/src/test/queries/clientpositive/input40.q
+++ b/ql/src/test/queries/clientpositive/input40.q
@@ -2,15 +2,15 @@
create table tmp_insert_test (key string, value string) stored as textfile;
-load data local inpath '../data/files/kv1.txt' into table tmp_insert_test;
+load data local inpath '../../data/files/kv1.txt' into table tmp_insert_test;
select * from tmp_insert_test;
create table tmp_insert_test_p (key string, value string) partitioned by (ds string) stored as textfile;
-load data local inpath '../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01');
+load data local inpath '../../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01');
select * from tmp_insert_test_p where ds= '2009-08-01'
order by key, value;
-load data local inpath '../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01');
+load data local inpath '../../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01');
select * from tmp_insert_test_p where ds= '2009-08-01'
order by key, value;
diff --git a/ql/src/test/queries/clientpositive/input43.q b/ql/src/test/queries/clientpositive/input43.q
index 5512dc3..3182bbe 100644
--- a/ql/src/test/queries/clientpositive/input43.q
+++ b/ql/src/test/queries/clientpositive/input43.q
@@ -1,7 +1,7 @@
drop table tst_src1;
create table tst_src1 like src1;
-load data local inpath '../data/files/kv1.txt' into table tst_src1 ;
+load data local inpath '../../data/files/kv1.txt' into table tst_src1 ;
select count(1) from tst_src1;
-load data local inpath '../data/files/kv1.txt' into table tst_src1 ;
+load data local inpath '../../data/files/kv1.txt' into table tst_src1 ;
select count(1) from tst_src1;
drop table tst_src1;
diff --git a/ql/src/test/queries/clientpositive/input44.q b/ql/src/test/queries/clientpositive/input44.q
index 4557edc..2e975e5 100644
--- a/ql/src/test/queries/clientpositive/input44.q
+++ b/ql/src/test/queries/clientpositive/input44.q
@@ -3,4 +3,4 @@
SET hive.output.file.extension=.txt;
INSERT OVERWRITE TABLE dest SELECT src.* FROM src;
-dfs -cat ../build/ql/test/data/warehouse/dest/*.txt
\ No newline at end of file
+dfs -cat ${system:test.warehouse.dir}/dest/*.txt
diff --git a/ql/src/test/queries/clientpositive/input45.q b/ql/src/test/queries/clientpositive/input45.q
index 633a8c6..334da26 100644
--- a/ql/src/test/queries/clientpositive/input45.q
+++ b/ql/src/test/queries/clientpositive/input45.q
@@ -2,8 +2,8 @@
SET hive.output.file.extension=.txt;
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/x/y/z/' SELECT src.* FROM src;
+INSERT OVERWRITE DIRECTORY 'target/data/x/y/z/' SELECT src.* FROM src;
-dfs -cat ../build/ql/test/data/x/y/z/*.txt;
+dfs -cat ${system:build.dir}/data/x/y/z/*.txt;
-dfs -rmr ../build/ql/test/data/x;
\ No newline at end of file
+dfs -rmr ${system:build.dir}/data/x;
diff --git a/ql/src/test/queries/clientpositive/input4_cb_delim.q b/ql/src/test/queries/clientpositive/input4_cb_delim.q
index 8c57dd3..b18d60a 100644
--- a/ql/src/test/queries/clientpositive/input4_cb_delim.q
+++ b/ql/src/test/queries/clientpositive/input4_cb_delim.q
@@ -1,4 +1,4 @@
CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB;
+LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB;
SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB;
diff --git a/ql/src/test/queries/clientpositive/input_dfs.q b/ql/src/test/queries/clientpositive/input_dfs.q
index 4f5824d..b108cbd 100644
--- a/ql/src/test/queries/clientpositive/input_dfs.q
+++ b/ql/src/test/queries/clientpositive/input_dfs.q
@@ -1,2 +1,2 @@
-dfs -cat ../data/files/kv1.txt;
+dfs -cat ../../data/files/kv1.txt;
diff --git a/ql/src/test/queries/clientpositive/inputddl5.q b/ql/src/test/queries/clientpositive/inputddl5.q
index 9a7ca5d..87c55a2 100644
--- a/ql/src/test/queries/clientpositive/inputddl5.q
+++ b/ql/src/test/queries/clientpositive/inputddl5.q
@@ -1,7 +1,7 @@
-- test for internationalization
-- kv4.txt contains the utf-8 character 0xE982B5E993AE which we are verifying later on
CREATE TABLE INPUTDDL5(name STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE INPUTDDL5;
+LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE INPUTDDL5;
DESCRIBE INPUTDDL5;
SELECT INPUTDDL5.name from INPUTDDL5;
SELECT count(1) FROM INPUTDDL5 WHERE INPUTDDL5.name = _UTF-8 0xE982B5E993AE;
diff --git a/ql/src/test/queries/clientpositive/inputddl6.q b/ql/src/test/queries/clientpositive/inputddl6.q
index d33ab8d..6c70939 100644
--- a/ql/src/test/queries/clientpositive/inputddl6.q
+++ b/ql/src/test/queries/clientpositive/inputddl6.q
@@ -2,8 +2,8 @@
-- test for describe extended table partition
-- test for alter table drop partition
CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08');
DESCRIBE EXTENDED INPUTDDL6;
DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-08');
SHOW PARTITIONS INPUTDDL6;
diff --git a/ql/src/test/queries/clientpositive/inputddl7.q b/ql/src/test/queries/clientpositive/inputddl7.q
index 8a73935..27e587a 100644
--- a/ql/src/test/queries/clientpositive/inputddl7.q
+++ b/ql/src/test/queries/clientpositive/inputddl7.q
@@ -3,22 +3,22 @@
CREATE TABLE T1(name STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1;
SELECT COUNT(1) FROM T1;
CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T2;
SELECT COUNT(1) FROM T2;
CREATE TABLE T3(name STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09');
SELECT COUNT(1) FROM T3 where T3.ds='2008-04-09';
CREATE TABLE T4(name STRING) PARTITIONED BY(ds STRING) STORED AS SEQUENCEFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09');
SELECT COUNT(1) FROM T4 where T4.ds='2008-04-09';
DESCRIBE EXTENDED T1;
diff --git a/ql/src/test/queries/clientpositive/insert1_overwrite_partitions.q b/ql/src/test/queries/clientpositive/insert1_overwrite_partitions.q
index 6ad70b5..6b00f97 100644
--- a/ql/src/test/queries/clientpositive/insert1_overwrite_partitions.q
+++ b/ql/src/test/queries/clientpositive/insert1_overwrite_partitions.q
@@ -1,8 +1,8 @@
CREATE TABLE sourceTable (one string,two string) PARTITIONED BY (ds string,hr string);
-load data local inpath '../data/files/kv1.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='11');
+load data local inpath '../../data/files/kv1.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='11');
-load data local inpath '../data/files/kv3.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='12');
+load data local inpath '../../data/files/kv3.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='12');
CREATE TABLE destinTable (one string,two string) PARTITIONED BY (ds string,hr string);
diff --git a/ql/src/test/queries/clientpositive/insert2_overwrite_partitions.q b/ql/src/test/queries/clientpositive/insert2_overwrite_partitions.q
index 598d30e..bd1eb75 100644
--- a/ql/src/test/queries/clientpositive/insert2_overwrite_partitions.q
+++ b/ql/src/test/queries/clientpositive/insert2_overwrite_partitions.q
@@ -4,9 +4,9 @@
CREATE TABLE db1.sourceTable (one string,two string) PARTITIONED BY (ds string);
-load data local inpath '../data/files/kv1.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11');
+load data local inpath '../../data/files/kv1.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11');
-load data local inpath '../data/files/kv3.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11');
+load data local inpath '../../data/files/kv3.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11');
CREATE TABLE db2.destinTable (one string,two string) PARTITIONED BY (ds string);
diff --git a/ql/src/test/queries/clientpositive/insert_overwrite_local_directory_1.q b/ql/src/test/queries/clientpositive/insert_overwrite_local_directory_1.q
index 25c127f..6d069f5 100644
--- a/ql/src/test/queries/clientpositive/insert_overwrite_local_directory_1.q
+++ b/ql/src/test/queries/clientpositive/insert_overwrite_local_directory_1.q
@@ -1,40 +1,40 @@
-insert overwrite local directory '../data/files/local_src_table_1'
+insert overwrite local directory '../../data/files/local_src_table_1'
select * from src ;
-dfs -cat ../data/files/local_src_table_1/000000_0;
+dfs -cat ../../data/files/local_src_table_1/000000_0;
-insert overwrite local directory '../data/files/local_src_table_2'
+insert overwrite local directory '../../data/files/local_src_table_2'
row format delimited
FIELDS TERMINATED BY ':'
select * from src ;
-dfs -cat ../data/files/local_src_table_2/000000_0;
+dfs -cat ../../data/files/local_src_table_2/000000_0;
create table array_table (a array<string>, b array<string>)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
COLLECTION ITEMS TERMINATED BY ',';
-load data local inpath "../data/files/array_table.txt" overwrite into table array_table;
+load data local inpath "../../data/files/array_table.txt" overwrite into table array_table;
-insert overwrite local directory '../data/files/local_array_table_1'
+insert overwrite local directory '../../data/files/local_array_table_1'
select * from array_table;
-dfs -cat ../data/files/local_array_table_1/000000_0;
+dfs -cat ../../data/files/local_array_table_1/000000_0;
-insert overwrite local directory '../data/files/local_array_table_2'
+insert overwrite local directory '../../data/files/local_array_table_2'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
select * from array_table;
-dfs -cat ../data/files/local_array_table_2/000000_0;
+dfs -cat ../../data/files/local_array_table_2/000000_0;
-insert overwrite local directory '../data/files/local_array_table_2_withfields'
+insert overwrite local directory '../../data/files/local_array_table_2_withfields'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
select b,a from array_table;
-dfs -cat ../data/files/local_array_table_2_withfields/000000_0;
+dfs -cat ../../data/files/local_array_table_2_withfields/000000_0;
create table map_table (foo STRING , bar MAP<STRING, STRING>)
@@ -44,63 +44,63 @@
MAP KEYS TERMINATED BY ':'
STORED AS TEXTFILE;
-load data local inpath "../data/files/map_table.txt" overwrite into table map_table;
+load data local inpath "../../data/files/map_table.txt" overwrite into table map_table;
-insert overwrite local directory '../data/files/local_map_table_1'
+insert overwrite local directory '../../data/files/local_map_table_1'
select * from map_table;
-dfs -cat ../data/files/local_map_table_1/000000_0;
+dfs -cat ../../data/files/local_map_table_1/000000_0;
-insert overwrite local directory '../data/files/local_map_table_2'
+insert overwrite local directory '../../data/files/local_map_table_2'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
MAP KEYS TERMINATED BY '='
select * from map_table;
-dfs -cat ../data/files/local_map_table_2/000000_0;
+dfs -cat ../../data/files/local_map_table_2/000000_0;
-insert overwrite local directory '../data/files/local_map_table_2_withfields'
+insert overwrite local directory '../../data/files/local_map_table_2_withfields'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
MAP KEYS TERMINATED BY '='
select bar,foo from map_table;
-dfs -cat ../data/files/local_map_table_2_withfields/000000_0;
+dfs -cat ../../data/files/local_map_table_2_withfields/000000_0;
-insert overwrite local directory '../data/files/local_array_table_3'
+insert overwrite local directory '../../data/files/local_array_table_3'
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.DelimitedJSONSerDe'
STORED AS TEXTFILE
select * from array_table;
-dfs -cat ../data/files/local_array_table_3/000000_0;
+dfs -cat ../../data/files/local_array_table_3/000000_0;
-insert overwrite local directory '../data/files/local_map_table_3'
+insert overwrite local directory '../../data/files/local_map_table_3'
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.DelimitedJSONSerDe'
STORED AS TEXTFILE
select * from map_table;
-dfs -cat ../data/files/local_map_table_3/000000_0;
+dfs -cat ../../data/files/local_map_table_3/000000_0;
-insert overwrite local directory '../data/files/local_rctable'
+insert overwrite local directory '../../data/files/local_rctable'
STORED AS RCFILE
select value,key from src;
dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/local_rctable/temp;
dfs -rmr ${system:test.tmp.dir}/local_rctable;
dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/local_rctable;
-dfs -put ../data/files/local_rctable/000000_0 ${system:test.tmp.dir}/local_rctable/000000_0;
+dfs -put ../../data/files/local_rctable/000000_0 ${system:test.tmp.dir}/local_rctable/000000_0;
create external table local_rctable(value string, key string)
STORED AS RCFILE
LOCATION '${system:test.tmp.dir}/local_rctable';
-insert overwrite local directory '../data/files/local_rctable_out'
+insert overwrite local directory '../../data/files/local_rctable_out'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
select key,value from local_rctable;
-dfs -cat ../data/files/local_rctable_out/000000_0;
+dfs -cat ../../data/files/local_rctable_out/000000_0;
drop table local_rctable;
drop table array_table;
diff --git a/ql/src/test/queries/clientpositive/join_1to1.q b/ql/src/test/queries/clientpositive/join_1to1.q
index b403814..4d1ae21 100644
--- a/ql/src/test/queries/clientpositive/join_1to1.q
+++ b/ql/src/test/queries/clientpositive/join_1to1.q
@@ -1,9 +1,9 @@
CREATE TABLE join_1to1_1(key1 int, key2 int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in5.txt' INTO TABLE join_1to1_1;
+LOAD DATA LOCAL INPATH '../../data/files/in5.txt' INTO TABLE join_1to1_1;
CREATE TABLE join_1to1_2(key1 int, key2 int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in6.txt' INTO TABLE join_1to1_2;
+LOAD DATA LOCAL INPATH '../../data/files/in6.txt' INTO TABLE join_1to1_2;
set hive.outerjoin.supports.filters=false;
diff --git a/ql/src/test/queries/clientpositive/join_array.q b/ql/src/test/queries/clientpositive/join_array.q
index e4d95a5..81e984e 100644
--- a/ql/src/test/queries/clientpositive/join_array.q
+++ b/ql/src/test/queries/clientpositive/join_array.q
@@ -1,8 +1,8 @@
create table tinyA(a bigint, b bigint) stored as textfile;
create table tinyB(a bigint, bList array<int>) stored as textfile;
-load data local inpath '../data/files/tiny_a.txt' into table tinyA;
-load data local inpath '../data/files/tiny_b.txt' into table tinyB;
+load data local inpath '../../data/files/tiny_a.txt' into table tinyA;
+load data local inpath '../../data/files/tiny_b.txt' into table tinyB;
select * from tinyA;
select * from tinyB;
diff --git a/ql/src/test/queries/clientpositive/join_casesensitive.q b/ql/src/test/queries/clientpositive/join_casesensitive.q
index a9b69c5..0c0962c 100644
--- a/ql/src/test/queries/clientpositive/join_casesensitive.q
+++ b/ql/src/test/queries/clientpositive/join_casesensitive.q
@@ -1,8 +1,8 @@
CREATE TABLE joinone(key1 int, key2 int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in5.txt' INTO TABLE joinone;
+LOAD DATA LOCAL INPATH '../../data/files/in5.txt' INTO TABLE joinone;
CREATE TABLE joinTwo(key1 int, key2 int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in6.txt' INTO TABLE joinTwo;
+LOAD DATA LOCAL INPATH '../../data/files/in6.txt' INTO TABLE joinTwo;
SELECT * FROM joinone JOIN joinTwo ON(joinone.key2=joinTwo.key2) ORDER BY joinone.key1 ASC, joinone.key2 ASC, joinone.value ASC, joinTwo.key1 ASC, joinTwo.key2 ASC, joinTwo.value ASC;
diff --git a/ql/src/test/queries/clientpositive/join_filters.q b/ql/src/test/queries/clientpositive/join_filters.q
index d54aa95..49b6c6f 100644
--- a/ql/src/test/queries/clientpositive/join_filters.q
+++ b/ql/src/test/queries/clientpositive/join_filters.q
@@ -1,5 +1,5 @@
CREATE TABLE myinput1(key int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in3.txt' INTO TABLE myinput1;
+LOAD DATA LOCAL INPATH '../../data/files/in3.txt' INTO TABLE myinput1;
SELECT * FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC;
SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC;
@@ -55,10 +55,10 @@
CREATE TABLE smb_input1(key int, value int) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS;
CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2;
-LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2;
SET hive.optimize.bucketmapjoin = true;
SET hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/join_hive_626.q b/ql/src/test/queries/clientpositive/join_hive_626.q
index 31b0c8c..c4c239c 100644
--- a/ql/src/test/queries/clientpositive/join_hive_626.q
+++ b/ql/src/test/queries/clientpositive/join_hive_626.q
@@ -13,9 +13,9 @@
create table hive_count (bar_id int, n int) row format delimited fields
terminated by ',' stored as textfile;
-load data local inpath '../data/files/hive_626_foo.txt' overwrite into table hive_foo;
-load data local inpath '../data/files/hive_626_bar.txt' overwrite into table hive_bar;
-load data local inpath '../data/files/hive_626_count.txt' overwrite into table hive_count;
+load data local inpath '../../data/files/hive_626_foo.txt' overwrite into table hive_foo;
+load data local inpath '../../data/files/hive_626_bar.txt' overwrite into table hive_bar;
+load data local inpath '../../data/files/hive_626_count.txt' overwrite into table hive_count;
explain
select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id =
diff --git a/ql/src/test/queries/clientpositive/join_nulls.q b/ql/src/test/queries/clientpositive/join_nulls.q
index 4ff6071..047a769 100644
--- a/ql/src/test/queries/clientpositive/join_nulls.q
+++ b/ql/src/test/queries/clientpositive/join_nulls.q
@@ -1,5 +1,5 @@
CREATE TABLE myinput1(key int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE myinput1;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' INTO TABLE myinput1;
SELECT * FROM myinput1 a JOIN myinput1 b ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC;
SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC;
@@ -42,10 +42,10 @@
CREATE TABLE smb_input1(key int, value int) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS;
CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2;
-LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2;
SET hive.optimize.bucketmapJOIN = true;
SET hive.optimize.bucketmapJOIN.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/join_nullsafe.q b/ql/src/test/queries/clientpositive/join_nullsafe.q
index 05b57bc..5e22517 100644
--- a/ql/src/test/queries/clientpositive/join_nullsafe.q
+++ b/ql/src/test/queries/clientpositive/join_nullsafe.q
@@ -1,7 +1,7 @@
set hive.nullsafe.equijoin=true;
CREATE TABLE myinput1(key int, value int);
-LOAD DATA LOCAL INPATH '../data/files/in8.txt' INTO TABLE myinput1;
+LOAD DATA LOCAL INPATH '../../data/files/in8.txt' INTO TABLE myinput1;
-- merging
explain select * from myinput1 a join myinput1 b on a.key<=>b.value ORDER BY a.key, a.value, b.key, b.value;
@@ -31,10 +31,10 @@
-- smbs
CREATE TABLE smb_input1(key int, value int) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS;
CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS;
-LOAD DATA LOCAL INPATH '../data/files/in8.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in9.txt' into table smb_input1;
-LOAD DATA LOCAL INPATH '../data/files/in8.txt' into table smb_input2;
-LOAD DATA LOCAL INPATH '../data/files/in9.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in8.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in9.txt' into table smb_input1;
+LOAD DATA LOCAL INPATH '../../data/files/in8.txt' into table smb_input2;
+LOAD DATA LOCAL INPATH '../../data/files/in9.txt' into table smb_input2;
SET hive.optimize.bucketmapJOIN = true;
SET hive.optimize.bucketmapJOIN.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/join_reorder.q b/ql/src/test/queries/clientpositive/join_reorder.q
index b92a79b..b209c50 100644
--- a/ql/src/test/queries/clientpositive/join_reorder.q
+++ b/ql/src/test/queries/clientpositive/join_reorder.q
@@ -6,9 +6,9 @@
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
EXPLAIN FROM T1 a JOIN src c ON c.key+1=a.key
SELECT a.key, a.val, c.key;
diff --git a/ql/src/test/queries/clientpositive/join_reorder2.q b/ql/src/test/queries/clientpositive/join_reorder2.q
index 238c0ad..ca1e65e 100644
--- a/ql/src/test/queries/clientpositive/join_reorder2.q
+++ b/ql/src/test/queries/clientpositive/join_reorder2.q
@@ -8,10 +8,10 @@
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
CREATE TABLE T4(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4;
EXPLAIN
SELECT /*+ STREAMTABLE(a) */ *
diff --git a/ql/src/test/queries/clientpositive/join_reorder3.q b/ql/src/test/queries/clientpositive/join_reorder3.q
index 1bda28f..994be16 100644
--- a/ql/src/test/queries/clientpositive/join_reorder3.q
+++ b/ql/src/test/queries/clientpositive/join_reorder3.q
@@ -8,10 +8,10 @@
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
CREATE TABLE T4(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4;
EXPLAIN
SELECT /*+ STREAMTABLE(a,c) */ *
diff --git a/ql/src/test/queries/clientpositive/join_reorder4.q b/ql/src/test/queries/clientpositive/join_reorder4.q
index 126f356..16ef204 100644
--- a/ql/src/test/queries/clientpositive/join_reorder4.q
+++ b/ql/src/test/queries/clientpositive/join_reorder4.q
@@ -2,9 +2,9 @@
CREATE TABLE T2(key2 STRING, val2 STRING) STORED AS TEXTFILE;
CREATE TABLE T3(key3 STRING, val3 STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
set hive.auto.convert.join=true;
diff --git a/ql/src/test/queries/clientpositive/join_star.q b/ql/src/test/queries/clientpositive/join_star.q
index 8314161..c95a13b 100644
--- a/ql/src/test/queries/clientpositive/join_star.q
+++ b/ql/src/test/queries/clientpositive/join_star.q
@@ -7,14 +7,14 @@
create table dim6(f11 int, f12 int);
create table dim7(f13 int, f14 int);
-LOAD DATA LOCAL INPATH '../data/files/fact-data.txt' INTO TABLE fact;
-LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim1;
-LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim2;
-LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim3;
-LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim4;
-LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim5;
-LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim6;
-LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim7;
+LOAD DATA LOCAL INPATH '../../data/files/fact-data.txt' INTO TABLE fact;
+LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim1;
+LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim2;
+LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim3;
+LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim4;
+LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim5;
+LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim6;
+LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim7;
set hive.auto.convert.join=true;
set hive.auto.convert.join.noconditionaltask=true;
diff --git a/ql/src/test/queries/clientpositive/leadlag.q b/ql/src/test/queries/clientpositive/leadlag.q
index f497667..5623cbf 100644
--- a/ql/src/test/queries/clientpositive/leadlag.q
+++ b/ql/src/test/queries/clientpositive/leadlag.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
--1. testLagWithPTFWindowing
select p_mfgr, p_name,
diff --git a/ql/src/test/queries/clientpositive/leadlag_queries.q b/ql/src/test/queries/clientpositive/leadlag_queries.q
index 6ef3bdb..e53abce 100644
--- a/ql/src/test/queries/clientpositive/leadlag_queries.q
+++ b/ql/src/test/queries/clientpositive/leadlag_queries.q
@@ -11,7 +11,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- 1. testLeadUDAF
select p_mfgr, p_retailprice,
diff --git a/ql/src/test/queries/clientpositive/leftsemijoin.q b/ql/src/test/queries/clientpositive/leftsemijoin.q
index abe3d33..0c16fb8 100644
--- a/ql/src/test/queries/clientpositive/leftsemijoin.q
+++ b/ql/src/test/queries/clientpositive/leftsemijoin.q
@@ -9,9 +9,9 @@
CREATE TABLE things (id INT, name STRING) partitioned by (ds string)
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
-load data local inpath '../data/files/sales.txt' INTO TABLE sales;
-load data local inpath '../data/files/things.txt' INTO TABLE things partition(ds='2011-10-23');
-load data local inpath '../data/files/things2.txt' INTO TABLE things partition(ds='2011-10-24');
+load data local inpath '../../data/files/sales.txt' INTO TABLE sales;
+load data local inpath '../../data/files/things.txt' INTO TABLE things partition(ds='2011-10-23');
+load data local inpath '../../data/files/things2.txt' INTO TABLE things partition(ds='2011-10-24');
SELECT name,id FROM sales ORDER BY name ASC, id ASC;
diff --git a/ql/src/test/queries/clientpositive/leftsemijoin_mr.q b/ql/src/test/queries/clientpositive/leftsemijoin_mr.q
index 5813ca3..c9ebe0e 100644
--- a/ql/src/test/queries/clientpositive/leftsemijoin_mr.q
+++ b/ql/src/test/queries/clientpositive/leftsemijoin_mr.q
@@ -1,7 +1,7 @@
CREATE TABLE T1(key INT);
-LOAD DATA LOCAL INPATH '../data/files/leftsemijoin_mr_t1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t1.txt' INTO TABLE T1;
CREATE TABLE T2(key INT);
-LOAD DATA LOCAL INPATH '../data/files/leftsemijoin_mr_t2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t2.txt' INTO TABLE T2;
-- Run this query using TestMinimrCliDriver
diff --git a/ql/src/test/queries/clientpositive/load_binary_data.q b/ql/src/test/queries/clientpositive/load_binary_data.q
index 7da3631..653918a 100644
--- a/ql/src/test/queries/clientpositive/load_binary_data.q
+++ b/ql/src/test/queries/clientpositive/load_binary_data.q
@@ -4,7 +4,7 @@
STORED AS TEXTFILE;
-- this query loads native binary data, stores in a table and then queries it. Note that string.txt contains binary data. Also uses transform clause and then length udf.
-LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE mytable;
+LOAD DATA LOCAL INPATH '../../data/files/string.txt' INTO TABLE mytable;
create table dest1 (key binary, value int);
diff --git a/ql/src/test/queries/clientpositive/load_exist_part_authsuccess.q b/ql/src/test/queries/clientpositive/load_exist_part_authsuccess.q
index 6d2a8b8..35eb219 100644
--- a/ql/src/test/queries/clientpositive/load_exist_part_authsuccess.q
+++ b/ql/src/test/queries/clientpositive/load_exist_part_authsuccess.q
@@ -2,4 +2,4 @@
alter table hive_test_src add partition (pcol1 = 'test_part');
set hive.security.authorization.enabled=true;
grant Update on table hive_test_src to user hive_test_user;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
diff --git a/ql/src/test/queries/clientpositive/load_file_with_space_in_the_name.q b/ql/src/test/queries/clientpositive/load_file_with_space_in_the_name.q
index 3b8951a..672d5d2 100644
--- a/ql/src/test/queries/clientpositive/load_file_with_space_in_the_name.q
+++ b/ql/src/test/queries/clientpositive/load_file_with_space_in_the_name.q
@@ -2,4 +2,4 @@
CREATE TABLE load_file_with_space_in_the_name(name STRING, age INT);
-LOAD DATA LOCAL INPATH '../data/files/person age.txt' INTO TABLE load_file_with_space_in_the_name;
+LOAD DATA LOCAL INPATH '../../data/files/person age.txt' INTO TABLE load_file_with_space_in_the_name;
diff --git a/ql/src/test/queries/clientpositive/load_fs.q b/ql/src/test/queries/clientpositive/load_fs.q
index c1ac29c..2f06ca4 100644
--- a/ql/src/test/queries/clientpositive/load_fs.q
+++ b/ql/src/test/queries/clientpositive/load_fs.q
@@ -2,9 +2,9 @@
create table load_overwrite (key string, value string) stored as textfile location 'file:${system:test.tmp.dir}/load_overwrite';
create table load_overwrite2 (key string, value string) stored as textfile location 'file:${system:test.tmp.dir}/load2_overwrite2';
-load data local inpath '../data/files/kv1.txt' into table load_overwrite;
-load data local inpath '../data/files/kv2.txt' into table load_overwrite;
-load data local inpath '../data/files/kv3.txt' into table load_overwrite;
+load data local inpath '../../data/files/kv1.txt' into table load_overwrite;
+load data local inpath '../../data/files/kv2.txt' into table load_overwrite;
+load data local inpath '../../data/files/kv3.txt' into table load_overwrite;
show table extended like load_overwrite;
desc extended load_overwrite;
diff --git a/ql/src/test/queries/clientpositive/load_fs2.q b/ql/src/test/queries/clientpositive/load_fs2.q
index 7255324..a75758a 100644
--- a/ql/src/test/queries/clientpositive/load_fs2.q
+++ b/ql/src/test/queries/clientpositive/load_fs2.q
@@ -4,17 +4,17 @@
create table result (key string, value string);
create table loader (key string, value string);
-load data local inpath '../data/files/kv1.txt' into table loader;
+load data local inpath '../../data/files/kv1.txt' into table loader;
load data inpath '/build/ql/test/data/warehouse/loader/kv1.txt' into table result;
show table extended like result;
-load data local inpath '../data/files/kv1.txt' into table loader;
+load data local inpath '../../data/files/kv1.txt' into table loader;
load data inpath '/build/ql/test/data/warehouse/loader/kv1.txt' into table result;
show table extended like result;
-load data local inpath '../data/files/kv1.txt' into table loader;
+load data local inpath '../../data/files/kv1.txt' into table loader;
load data inpath '/build/ql/test/data/warehouse/loader/kv1.txt' into table result;
show table extended like result;
diff --git a/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q b/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q
index 31dbc41..d4520e2 100644
--- a/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q
+++ b/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q
@@ -1,6 +1,6 @@
dfs ${system:test.dfs.mkdir} hdfs:///tmp/test/;
-dfs -copyFromLocal ../data/files hdfs:///tmp/test/.;
+dfs -copyFromLocal ../../data/files hdfs:///tmp/test/.;
CREATE TABLE load_file_with_space_in_the_name(name STRING, age INT);
LOAD DATA INPATH 'hdfs:///tmp/test/files/person age.txt' INTO TABLE load_file_with_space_in_the_name;
diff --git a/ql/src/test/queries/clientpositive/load_nonpart_authsuccess.q b/ql/src/test/queries/clientpositive/load_nonpart_authsuccess.q
index 40d8210..fdee451 100644
--- a/ql/src/test/queries/clientpositive/load_nonpart_authsuccess.q
+++ b/ql/src/test/queries/clientpositive/load_nonpart_authsuccess.q
@@ -1,4 +1,4 @@
create table hive_test_src ( col1 string ) stored as textfile;
set hive.security.authorization.enabled=true;
grant Update on table hive_test_src to user hive_test_user;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src ;
diff --git a/ql/src/test/queries/clientpositive/load_overwrite.q b/ql/src/test/queries/clientpositive/load_overwrite.q
index 73853f1..080c784 100644
--- a/ql/src/test/queries/clientpositive/load_overwrite.q
+++ b/ql/src/test/queries/clientpositive/load_overwrite.q
@@ -5,11 +5,11 @@
select count(*) from load_overwrite;
-load data local inpath '../data/files/kv1.txt' into table load_overwrite;
+load data local inpath '../../data/files/kv1.txt' into table load_overwrite;
show table extended like load_overwrite;
select count(*) from load_overwrite;
-load data local inpath '../data/files/kv1.txt' overwrite into table load_overwrite;
+load data local inpath '../../data/files/kv1.txt' overwrite into table load_overwrite;
show table extended like load_overwrite;
select count(*) from load_overwrite;
diff --git a/ql/src/test/queries/clientpositive/load_part_authsuccess.q b/ql/src/test/queries/clientpositive/load_part_authsuccess.q
index ff54324..cee5873 100644
--- a/ql/src/test/queries/clientpositive/load_part_authsuccess.q
+++ b/ql/src/test/queries/clientpositive/load_part_authsuccess.q
@@ -1,4 +1,4 @@
create table hive_test_src ( col1 string ) partitioned by (pcol1 string) stored as textfile;
set hive.security.authorization.enabled=true;
grant Update on table hive_test_src to user hive_test_user;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part');
diff --git a/ql/src/test/queries/clientpositive/loadpart1.q b/ql/src/test/queries/clientpositive/loadpart1.q
index 0813bb2..735befe 100644
--- a/ql/src/test/queries/clientpositive/loadpart1.q
+++ b/ql/src/test/queries/clientpositive/loadpart1.q
@@ -2,7 +2,7 @@
create table hive_test_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src ;
create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src ;
diff --git a/ql/src/test/queries/clientpositive/loadpart_err.q b/ql/src/test/queries/clientpositive/loadpart_err.q
index 6e4df21..cc9c1fe 100644
--- a/ql/src/test/queries/clientpositive/loadpart_err.q
+++ b/ql/src/test/queries/clientpositive/loadpart_err.q
@@ -1,6 +1,6 @@
set hive.cli.errors.ignore=true;
-ADD FILE ../data/scripts/error_script;
+ADD FILE ../../data/scripts/error_script;
-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19, 0.20, 0.20S, 0.23)
-- (this test is flaky so it is currently disabled for all Hadoop versions)
@@ -14,7 +14,7 @@
DESCRIBE loadpart1;
SHOW PARTITIONS loadpart1;
-LOAD DATA LOCAL INPATH '../data1/files/kv1.txt' INTO TABLE loadpart1 PARTITION(ds='2009-05-05');
+LOAD DATA LOCAL INPATH '../../data1/files/kv1.txt' INTO TABLE loadpart1 PARTITION(ds='2009-05-05');
SHOW PARTITIONS loadpart1;
diff --git a/ql/src/test/queries/clientpositive/mapjoin_subquery2.q b/ql/src/test/queries/clientpositive/mapjoin_subquery2.q
index 9980946..aed8990 100644
--- a/ql/src/test/queries/clientpositive/mapjoin_subquery2.q
+++ b/ql/src/test/queries/clientpositive/mapjoin_subquery2.q
@@ -11,9 +11,9 @@
CREATE TABLE z (id INT, name STRING)
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
-load data local inpath '../data/files/x.txt' INTO TABLE x;
-load data local inpath '../data/files/y.txt' INTO TABLE y;
-load data local inpath '../data/files/z.txt' INTO TABLE z;
+load data local inpath '../../data/files/x.txt' INTO TABLE x;
+load data local inpath '../../data/files/y.txt' INTO TABLE y;
+load data local inpath '../../data/files/z.txt' INTO TABLE z;
set hive.auto.convert.join=true;
set hive.auto.convert.join.noconditionaltask=true;
diff --git a/ql/src/test/queries/clientpositive/merge_dynamic_partition.q b/ql/src/test/queries/clientpositive/merge_dynamic_partition.q
index 1379426..5be02aa 100644
--- a/ql/src/test/queries/clientpositive/merge_dynamic_partition.q
+++ b/ql/src/test/queries/clientpositive/merge_dynamic_partition.q
@@ -5,10 +5,10 @@
create table merge_dynamic_part like srcpart;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
set hive.input.format=org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
set hive.merge.mapfiles=false;
diff --git a/ql/src/test/queries/clientpositive/merge_dynamic_partition2.q b/ql/src/test/queries/clientpositive/merge_dynamic_partition2.q
index b51c70e..5f36491 100644
--- a/ql/src/test/queries/clientpositive/merge_dynamic_partition2.q
+++ b/ql/src/test/queries/clientpositive/merge_dynamic_partition2.q
@@ -5,12 +5,12 @@
create table merge_dynamic_part like srcpart;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket0.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/srcbucket1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket0.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
diff --git a/ql/src/test/queries/clientpositive/merge_dynamic_partition3.q b/ql/src/test/queries/clientpositive/merge_dynamic_partition3.q
index b3bcf01..43be59e 100644
--- a/ql/src/test/queries/clientpositive/merge_dynamic_partition3.q
+++ b/ql/src/test/queries/clientpositive/merge_dynamic_partition3.q
@@ -5,20 +5,20 @@
create table merge_dynamic_part like srcpart;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11);
-load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11);
-load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12);
-load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12);
+load data local inpath '../../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11);
+load data local inpath '../../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11);
+load data local inpath '../../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12);
+load data local inpath '../../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12);
show partitions srcpart_merge_dp;
diff --git a/ql/src/test/queries/clientpositive/merge_dynamic_partition4.q b/ql/src/test/queries/clientpositive/merge_dynamic_partition4.q
index ef769a0..5897170 100644
--- a/ql/src/test/queries/clientpositive/merge_dynamic_partition4.q
+++ b/ql/src/test/queries/clientpositive/merge_dynamic_partition4.q
@@ -9,15 +9,15 @@
create table merge_dynamic_part like srcpart;
alter table merge_dynamic_part set fileformat RCFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
insert overwrite table srcpart_merge_dp_rc partition (ds = '2008-04-08', hr)
select key, value, hr from srcpart_merge_dp where ds = '2008-04-08';
diff --git a/ql/src/test/queries/clientpositive/merge_dynamic_partition5.q b/ql/src/test/queries/clientpositive/merge_dynamic_partition5.q
index a196fa0..9f64724 100644
--- a/ql/src/test/queries/clientpositive/merge_dynamic_partition5.q
+++ b/ql/src/test/queries/clientpositive/merge_dynamic_partition5.q
@@ -8,12 +8,12 @@
create table merge_dynamic_part like srcpart;
alter table merge_dynamic_part set fileformat RCFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
insert overwrite table srcpart_merge_dp_rc partition (ds = '2008-04-08', hr)
select key, value, hr from srcpart_merge_dp where ds = '2008-04-08';
diff --git a/ql/src/test/queries/clientpositive/metadata_export_drop.q b/ql/src/test/queries/clientpositive/metadata_export_drop.q
index 41be152..e2da61a 100644
--- a/ql/src/test/queries/clientpositive/metadata_export_drop.q
+++ b/ql/src/test/queries/clientpositive/metadata_export_drop.q
@@ -1,8 +1,8 @@
create table tmp_meta_export_listener_drop_test (foo string);
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/HIVE-3427;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/data/exports/HIVE-3427;
set hive.metastore.pre.event.listeners=org.apache.hadoop.hive.ql.parse.MetaDataExportListener;
-set hive.metadata.export.location=../build/ql/test/data/exports/HIVE-3427;
+set hive.metadata.export.location=${system:test.tmp.dir}/data/exports/HIVE-3427;
set hive.move.exported.metadata.to.trash=false;
drop table tmp_meta_export_listener_drop_test;
-dfs -rmr ../build/ql/test/data/exports/HIVE-3427;
+dfs -rmr ${system:test.tmp.dir}/data/exports/HIVE-3427;
set hive.metastore.pre.event.listeners=;
diff --git a/ql/src/test/queries/clientpositive/nested_complex.q b/ql/src/test/queries/clientpositive/nested_complex.q
index b94fbb7..6fd76b8 100644
--- a/ql/src/test/queries/clientpositive/nested_complex.q
+++ b/ql/src/test/queries/clientpositive/nested_complex.q
@@ -17,6 +17,6 @@
describe extended nestedcomplex;
-load data local inpath '../data/files/nested_complex.txt' overwrite into table nestedcomplex;
+load data local inpath '../../data/files/nested_complex.txt' overwrite into table nestedcomplex;
select * from nestedcomplex sort by simple_int;
diff --git a/ql/src/test/queries/clientpositive/newline.q b/ql/src/test/queries/clientpositive/newline.q
index 722ecf6..11168fc 100644
--- a/ql/src/test/queries/clientpositive/newline.q
+++ b/ql/src/test/queries/clientpositive/newline.q
@@ -1,4 +1,4 @@
-add file ../data/scripts/newline.py;
+add file ../../data/scripts/newline.py;
set hive.transform.escape.input=true;
create table tmp_tmp(key string, value string) stored as rcfile;
@@ -10,10 +10,10 @@
drop table tmp_tmp;
-add file ../data/scripts/escapednewline.py;
-add file ../data/scripts/escapedtab.py;
-add file ../data/scripts/doubleescapedtab.py;
-add file ../data/scripts/escapedcarriagereturn.py;
+add file ../../data/scripts/escapednewline.py;
+add file ../../data/scripts/escapedtab.py;
+add file ../../data/scripts/doubleescapedtab.py;
+add file ../../data/scripts/escapedcarriagereturn.py;
create table tmp_tmp(key string, value string) stored as rcfile;
insert overwrite table tmp_tmp
diff --git a/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q b/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q
index 9cb89da..e33b4bf 100644
--- a/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q
+++ b/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q
@@ -1,12 +1,12 @@
CREATE TABLE table(string string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE table;
+LOAD DATA LOCAL INPATH '../../data/files/docurl.txt' INTO TABLE table;
SELECT table, count(1)
FROM
(
FROM table
SELECT TRANSFORM (table.string)
- USING 'java -cp ../build/ql/test/classes org.apache.hadoop.hive.scripts.extracturl' AS (table, count)
+ USING 'java -cp ../util/target/classes/ org.apache.hadoop.hive.scripts.extracturl' AS (table, count)
) subq
GROUP BY table;
diff --git a/ql/src/test/queries/clientpositive/null_column.q b/ql/src/test/queries/clientpositive/null_column.q
index fa4a863..4b43d60 100644
--- a/ql/src/test/queries/clientpositive/null_column.q
+++ b/ql/src/test/queries/clientpositive/null_column.q
@@ -3,7 +3,7 @@
create table temp_null(a int) stored as textfile;
-load data local inpath '../data/files/test.dat' overwrite into table temp_null;
+load data local inpath '../../data/files/test.dat' overwrite into table temp_null;
select null, null from temp_null;
@@ -15,8 +15,8 @@
insert overwrite table tt_b select null, null from temp_null;
select * from tt_b;
-insert overwrite directory "../build/ql/test/data/warehouse/null_columns.out" select null, null from temp_null;
-dfs -cat ../build/ql/test/data/warehouse/null_columns.out/*;
+insert overwrite directory "target/warehouse/null_columns.out" select null, null from temp_null;
+dfs -cat ${system:test.warehouse.dir}/null_columns.out/*;
create table temp_null2 (key string, value string) partitioned by (ds string);
diff --git a/ql/src/test/queries/clientpositive/nullgroup3.q b/ql/src/test/queries/clientpositive/nullgroup3.q
index a5bc9ff..19e5b10 100644
--- a/ql/src/test/queries/clientpositive/nullgroup3.q
+++ b/ql/src/test/queries/clientpositive/nullgroup3.q
@@ -1,28 +1,28 @@
CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08');
explain
select count(1) from tstparttbl;
select count(1) from tstparttbl;
CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08');
+LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08');
explain
select count(1) from tstparttbl2;
select count(1) from tstparttbl2;
DROP TABLE tstparttbl;
CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08');
explain
select count(1) from tstparttbl;
select count(1) from tstparttbl;
DROP TABLE tstparttbl2;
CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09');
-LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08');
+LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08');
explain
select count(1) from tstparttbl2;
select count(1) from tstparttbl2;
diff --git a/ql/src/test/queries/clientpositive/nullgroup5.q b/ql/src/test/queries/clientpositive/nullgroup5.q
index 12773b6..b4b68fb 100644
--- a/ql/src/test/queries/clientpositive/nullgroup5.q
+++ b/ql/src/test/queries/clientpositive/nullgroup5.q
@@ -1,10 +1,10 @@
CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09');
CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09');
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09');
explain
select u.* from
diff --git a/ql/src/test/queries/clientpositive/nullscript.q b/ql/src/test/queries/clientpositive/nullscript.q
index 95c9e1d..11f4a7a 100644
--- a/ql/src/test/queries/clientpositive/nullscript.q
+++ b/ql/src/test/queries/clientpositive/nullscript.q
@@ -1,7 +1,7 @@
CREATE TABLE nullscript(KEY STRING, VALUE STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE nullscript;
-LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE nullscript;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE nullscript;
+LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE nullscript;
explain
select transform(key) using 'cat' as key1 from nullscript;
select transform(key) using 'cat' as key1 from nullscript;
diff --git a/ql/src/test/queries/clientpositive/orc_create.q b/ql/src/test/queries/clientpositive/orc_create.q
index 6aca548..73b3e8c 100644
--- a/ql/src/test/queries/clientpositive/orc_create.q
+++ b/ql/src/test/queries/clientpositive/orc_create.q
@@ -53,7 +53,7 @@
DESCRIBE FORMATTED orc_create_complex;
-LOAD DATA LOCAL INPATH '../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging;
+LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging;
SELECT * from orc_create_staging;
@@ -72,7 +72,7 @@
address string,
state string);
-LOAD DATA LOCAL INPATH '../data/files/orc_create_people.txt'
+LOAD DATA LOCAL INPATH '../../data/files/orc_create_people.txt'
OVERWRITE INTO TABLE orc_create_people_staging;
CREATE TABLE orc_create_people (
diff --git a/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q b/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q
index f916012..a0eaab7 100644
--- a/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q
+++ b/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q
@@ -19,7 +19,7 @@
ALTER TABLE test_orc SET SERDEPROPERTIES ('orc.stripe.size' = '1');
CREATE TABLE src_thousand(key STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1kv2.cogroup.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1kv2.cogroup.txt'
INTO TABLE src_thousand;
set hive.exec.orc.dictionary.key.size.threshold=0.5;
diff --git a/ql/src/test/queries/clientpositive/orc_ends_with_nulls.q b/ql/src/test/queries/clientpositive/orc_ends_with_nulls.q
index 6685da7..83c5a05 100644
--- a/ql/src/test/queries/clientpositive/orc_ends_with_nulls.q
+++ b/ql/src/test/queries/clientpositive/orc_ends_with_nulls.q
@@ -10,7 +10,7 @@
-- to last index stride are the same (there's only two index strides)
CREATE TABLE src_null(a STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/nulls.txt' INTO TABLE src_null;
+LOAD DATA LOCAL INPATH '../../data/files/nulls.txt' INTO TABLE src_null;
INSERT OVERWRITE TABLE test_orc SELECT a FROM src_null;
diff --git a/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q b/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q
index f214273..78bfb43 100644
--- a/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q
+++ b/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q
@@ -27,7 +27,7 @@
ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/over1k' OVERWRITE INTO TABLE staging;
+LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE staging;
INSERT INTO TABLE orc_pred select * from staging;
diff --git a/ql/src/test/queries/clientpositive/parallel_orderby.q b/ql/src/test/queries/clientpositive/parallel_orderby.q
index 5e09395..73c3940 100644
--- a/ql/src/test/queries/clientpositive/parallel_orderby.q
+++ b/ql/src/test/queries/clientpositive/parallel_orderby.q
@@ -1,6 +1,6 @@
create table src5 (key string, value string);
-load data local inpath '../data/files/kv5.txt' into table src5;
-load data local inpath '../data/files/kv5.txt' into table src5;
+load data local inpath '../../data/files/kv5.txt' into table src5;
+load data local inpath '../../data/files/kv5.txt' into table src5;
set mapred.reduce.tasks = 4;
set hive.optimize.sampling.orderby=true;
diff --git a/ql/src/test/queries/clientpositive/partition_type_check.q b/ql/src/test/queries/clientpositive/partition_type_check.q
index 7f1acca..c9bca99 100644
--- a/ql/src/test/queries/clientpositive/partition_type_check.q
+++ b/ql/src/test/queries/clientpositive/partition_type_check.q
@@ -2,14 +2,14 @@
-- begin part(string, string) pass(string, int)
CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day string) stored as textfile;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2);
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2);
select * from tab1;
drop table tab1;
-- begin part(string, int) pass(string, string)
CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) stored as textfile;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2');
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2');
select * from tab1;
drop table tab1;
@@ -17,7 +17,7 @@
-- begin part(string, date) pass(string, date)
create table tab1 (id1 int, id2 string) PARTITIONED BY(month string,day date) stored as textfile;
alter table tab1 add partition (month='June', day='2008-01-01');
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01');
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01');
select id1, id2, day from tab1 where day='2008-01-01';
drop table tab1;
diff --git a/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q b/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q
index e9b574c..3cf488f 100644
--- a/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q
+++ b/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q
@@ -3,9 +3,9 @@
-- CustomSerDe(1, 2, 3) irrespective of the inserted values
DROP TABLE PW17;
-ADD JAR ../build/ql/test/test-serdes.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-custom-serde/${system:hive.version}/hive-it-custom-serde-${system:hive.version}.jar;
CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY<INT>) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1';
-LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1');
+LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1');
ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2';
ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1';
-- Without the fix HIVE-5199, will throw cast exception via FetchOperator
@@ -14,13 +14,13 @@
-- Test for non-parititioned table.
DROP TABLE PW17_2;
CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY<INT>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1';
-LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2;
+LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_2;
-- Without the fix HIVE-5199, will throw cast exception via MapOperator
SELECT COUNT(*) FROM PW17_2;
DROP TABLE PW17_3;
CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3';
-LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1');
+LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1');
ALTER TABLE PW17_3 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2';
ALTER TABLE PW17_3 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3';
-- Without the fix HIVE-5285, will throw cast exception via FetchOperator
@@ -28,7 +28,7 @@
DROP TABLE PW17_4;
CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3';
-LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4;
+LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_4;
-- Without the fix HIVE-5285, will throw cast exception via MapOperator
SELECT COUNT(*) FROM PW17_4;
diff --git a/ql/src/test/queries/clientpositive/partition_wise_fileformat18.q b/ql/src/test/queries/clientpositive/partition_wise_fileformat18.q
index 70a2b37..40ed258 100644
--- a/ql/src/test/queries/clientpositive/partition_wise_fileformat18.q
+++ b/ql/src/test/queries/clientpositive/partition_wise_fileformat18.q
@@ -4,9 +4,9 @@
-- CustomSerDe(4, 5) irrespective of the inserted values
DROP TABLE PW18;
-ADD JAR ../build/ql/test/test-serdes.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-custom-serde/${system:hive.version}/hive-it-custom-serde-${system:hive.version}.jar;
CREATE TABLE PW18(USER STRING, COMPLEXDT UNIONTYPE<INT, DOUBLE>) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5';
-LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1');
+LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1');
ALTER TABLE PW18 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe4';
-- Without the fix HIVE-5202, will throw unsupported data type exception.
SELECT * FROM PW18;
@@ -14,6 +14,6 @@
-- Test for non-parititioned table.
DROP TABLE PW18_2;
CREATE TABLE PW18_2(USER STRING, COMPLEXDT UNIONTYPE<INT, DOUBLE>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5';
-LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18_2;
+LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18_2;
-- Without the fix HIVE-5202, will throw unsupported data type exception
SELECT COUNT(*) FROM PW18_2;
diff --git a/ql/src/test/queries/clientpositive/pcr.q b/ql/src/test/queries/clientpositive/pcr.q
index 09a39ae..3be0ff2 100644
--- a/ql/src/test/queries/clientpositive/pcr.q
+++ b/ql/src/test/queries/clientpositive/pcr.q
@@ -127,7 +127,7 @@
row format delimited
fields terminated by '\t'
collection items terminated by '\001';
-load data local inpath '../data/files/kv1.txt'
+load data local inpath '../../data/files/kv1.txt'
overwrite into table ab;
-- Create partitioned table with struct data:
diff --git a/ql/src/test/queries/clientpositive/ppd_multi_insert.q b/ql/src/test/queries/clientpositive/ppd_multi_insert.q
index a802df1..06fe7ce 100644
--- a/ql/src/test/queries/clientpositive/ppd_multi_insert.q
+++ b/ql/src/test/queries/clientpositive/ppd_multi_insert.q
@@ -10,18 +10,18 @@
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
SELECT mi1.* FROM mi1;
SELECT mi2.* FROM mi2;
SELECT mi3.* FROM mi3;
-dfs -cat ../build/ql/test/data/warehouse/mi4.out/*;
+dfs -cat ${system:test.warehouse.dir}/mi4.out/*;
set hive.ppd.remove.duplicatefilters=true;
@@ -31,15 +31,15 @@
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300;
SELECT mi1.* FROM mi1;
SELECT mi2.* FROM mi2;
SELECT mi3.* FROM mi3;
-dfs -cat ../build/ql/test/data/warehouse/mi4.out/*;
+dfs -cat ${system:test.warehouse.dir}/mi4.out/*;
diff --git a/ql/src/test/queries/clientpositive/progress_1.q b/ql/src/test/queries/clientpositive/progress_1.q
index ad908a0..22ee926 100644
--- a/ql/src/test/queries/clientpositive/progress_1.q
+++ b/ql/src/test/queries/clientpositive/progress_1.q
@@ -2,7 +2,7 @@
CREATE TABLE PROGRESS_1(key int, value string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv6.txt' INTO TABLE PROGRESS_1;
+LOAD DATA LOCAL INPATH '../../data/files/kv6.txt' INTO TABLE PROGRESS_1;
select count(1) from PROGRESS_1 t1 join PROGRESS_1 t2 on t1.key=t2.key;
diff --git a/ql/src/test/queries/clientpositive/ptf.q b/ql/src/test/queries/clientpositive/ptf.q
index eea5415..d56b412 100644
--- a/ql/src/test/queries/clientpositive/ptf.q
+++ b/ql/src/test/queries/clientpositive/ptf.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
--1. test1
select p_mfgr, p_name, p_size,
diff --git a/ql/src/test/queries/clientpositive/ptf_decimal.q b/ql/src/test/queries/clientpositive/ptf_decimal.q
index 7fbdcaa..9799534 100644
--- a/ql/src/test/queries/clientpositive/ptf_decimal.q
+++ b/ql/src/test/queries/clientpositive/ptf_decimal.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- 1. aggregate functions with decimal type
diff --git a/ql/src/test/queries/clientpositive/ptf_general_queries.q b/ql/src/test/queries/clientpositive/ptf_general_queries.q
index 885c3b3..4fe9710 100644
--- a/ql/src/test/queries/clientpositive/ptf_general_queries.q
+++ b/ql/src/test/queries/clientpositive/ptf_general_queries.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- 1. testNoPTFNoWindowing
select p_mfgr, p_name, p_size
diff --git a/ql/src/test/queries/clientpositive/ptf_matchpath.q b/ql/src/test/queries/clientpositive/ptf_matchpath.q
index 72eeb10..0cde350 100644
--- a/ql/src/test/queries/clientpositive/ptf_matchpath.q
+++ b/ql/src/test/queries/clientpositive/ptf_matchpath.q
@@ -10,7 +10,7 @@
FL_NUM string
);
-LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny;
+LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny;
-- 1. basic Matchpath test
select origin_city_name, fl_num, year, month, day_of_month, sz, tpath
diff --git a/ql/src/test/queries/clientpositive/ptf_rcfile.q b/ql/src/test/queries/clientpositive/ptf_rcfile.q
index 535a233..a68c578 100644
--- a/ql/src/test/queries/clientpositive/ptf_rcfile.q
+++ b/ql/src/test/queries/clientpositive/ptf_rcfile.q
@@ -12,7 +12,7 @@
p_comment STRING
) STORED AS RCFILE ;
-LOAD DATA LOCAL INPATH '../data/files/part.rc' overwrite into table part_rc;
+LOAD DATA LOCAL INPATH '../../data/files/part.rc' overwrite into table part_rc;
-- testWindowingPTFWithPartRC
select p_mfgr, p_name, p_size,
diff --git a/ql/src/test/queries/clientpositive/ptf_register_tblfn.q b/ql/src/test/queries/clientpositive/ptf_register_tblfn.q
index a2140cd..4b508e9 100644
--- a/ql/src/test/queries/clientpositive/ptf_register_tblfn.q
+++ b/ql/src/test/queries/clientpositive/ptf_register_tblfn.q
@@ -10,7 +10,7 @@
FL_NUM string
);
-LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny;
+LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny;
create temporary function matchpathtest as 'org.apache.hadoop.hive.ql.udf.ptf.MatchPath$MatchPathResolver';
diff --git a/ql/src/test/queries/clientpositive/ptf_seqfile.q b/ql/src/test/queries/clientpositive/ptf_seqfile.q
index 4aa8ce1..c5d65f0 100644
--- a/ql/src/test/queries/clientpositive/ptf_seqfile.q
+++ b/ql/src/test/queries/clientpositive/ptf_seqfile.q
@@ -12,7 +12,7 @@
p_comment STRING
) STORED AS SEQUENCEFILE ;
-LOAD DATA LOCAL INPATH '../data/files/part.seq' overwrite into table part_seq;
+LOAD DATA LOCAL INPATH '../../data/files/part.seq' overwrite into table part_seq;
-- testWindowingPTFWithPartSeqFile
select p_mfgr, p_name, p_size,
diff --git a/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q b/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q
index f198baa..640e502 100644
--- a/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q
+++ b/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q
@@ -19,7 +19,7 @@
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '|';
-LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
+LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
CREATE INDEX lineitem_lshipdate_idx ON TABLE lineitem(l_shipdate) AS 'org.apache.hadoop.hive.ql.index.AggregateIndexHandler' WITH DEFERRED REBUILD IDXPROPERTIES("AGGREGATES"="count(l_shipdate)");
ALTER INDEX lineitem_lshipdate_idx ON lineitem REBUILD;
@@ -156,7 +156,7 @@
DROP TABLE tblpart;
CREATE TABLE tbl(key int, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|';
-LOAD DATA LOCAL INPATH '../data/files/tbl.txt' OVERWRITE INTO TABLE tbl;
+LOAD DATA LOCAL INPATH '../../data/files/tbl.txt' OVERWRITE INTO TABLE tbl;
CREATE INDEX tbl_key_idx ON TABLE tbl(key) AS 'org.apache.hadoop.hive.ql.index.AggregateIndexHandler' WITH DEFERRED REBUILD IDXPROPERTIES("AGGREGATES"="count(key)");
ALTER INDEX tbl_key_idx ON tbl REBUILD;
diff --git a/ql/src/test/queries/clientpositive/rcfile_bigdata.q b/ql/src/test/queries/clientpositive/rcfile_bigdata.q
index 3e83e66..df460c8 100644
--- a/ql/src/test/queries/clientpositive/rcfile_bigdata.q
+++ b/ql/src/test/queries/clientpositive/rcfile_bigdata.q
@@ -1,7 +1,7 @@
set hive.map.aggr.hash.percentmemory = 0.3;
set hive.mapred.local.mem = 256;
-add file ../data/scripts/dumpdata_script.py;
+add file ../../data/scripts/dumpdata_script.py;
CREATE table columnTable_Bigdata (key STRING, value STRING)
ROW FORMAT SERDE
diff --git a/ql/src/test/queries/clientpositive/remote_script.q b/ql/src/test/queries/clientpositive/remote_script.q
index 926601c..c4fcaaf 100644
--- a/ql/src/test/queries/clientpositive/remote_script.q
+++ b/ql/src/test/queries/clientpositive/remote_script.q
@@ -1,4 +1,4 @@
-dfs -put ../data/scripts/newline.py /newline.py;
+dfs -put ../../data/scripts/newline.py /newline.py;
add file hdfs:///newline.py;
set hive.transform.escape.input=true;
diff --git a/ql/src/test/queries/clientpositive/repair.q b/ql/src/test/queries/clientpositive/repair.q
index 8d04d3e..df199b0 100644
--- a/ql/src/test/queries/clientpositive/repair.q
+++ b/ql/src/test/queries/clientpositive/repair.q
@@ -1,10 +1,12 @@
+DROP TABLE IF EXISTS repairtable;
+
CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
MSCK TABLE repairtable;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
-dfs -touchz ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a/datafile;
+dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=a/p2=a;
+dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=b/p2=a;
+dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=b/p2=a/datafile;
MSCK TABLE repairtable;
@@ -12,4 +14,4 @@
MSCK TABLE repairtable;
-
+DROP TABLE repairtable;
diff --git a/ql/src/test/queries/clientpositive/scriptfile1.q b/ql/src/test/queries/clientpositive/scriptfile1.q
index 9d2256d..2dfb129 100644
--- a/ql/src/test/queries/clientpositive/scriptfile1.q
+++ b/ql/src/test/queries/clientpositive/scriptfile1.q
@@ -3,7 +3,7 @@
-- EXCLUDE_OS_WINDOWS
CREATE TABLE dest1(key INT, value STRING);
-ADD FILE src/test/scripts/testgrep;
+ADD FILE ../../ql/src/test/scripts/testgrep;
FROM (
FROM src
diff --git a/ql/src/test/queries/clientpositive/serde_regex.q b/ql/src/test/queries/clientpositive/serde_regex.q
index bedf9b0..b7b611a 100644
--- a/ql/src/test/queries/clientpositive/serde_regex.q
+++ b/ql/src/test/queries/clientpositive/serde_regex.q
@@ -31,8 +31,8 @@
)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex;
-LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
SELECT * FROM serde_regex ORDER BY time;
@@ -59,7 +59,7 @@
)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH "../data/files/kv7.txt" INTO TABLE serde_regex1;
+LOAD DATA LOCAL INPATH "../../data/files/kv7.txt" INTO TABLE serde_regex1;
SELECT key, value FROM serde_regex1 ORDER BY key, value;
diff --git a/ql/src/test/queries/clientpositive/set_processor_namespaces.q b/ql/src/test/queries/clientpositive/set_processor_namespaces.q
index 7e3d1f4..d10239c 100644
--- a/ql/src/test/queries/clientpositive/set_processor_namespaces.q
+++ b/ql/src/test/queries/clientpositive/set_processor_namespaces.q
@@ -24,7 +24,7 @@
set c=${hiveconf:${hiveconf:b}};
set c;
-set jar=${system:build.ivy.lib.dir}/default/derby-${system:derby.version}.jar;
+set jar=${system:maven.local.repository}/org/apache/derby/derby/${system:derby.version}/derby-${system:derby.version}.jar;
add file ${hiveconf:jar};
delete file ${hiveconf:jar};
diff --git a/ql/src/test/queries/clientpositive/skewjoin.q b/ql/src/test/queries/clientpositive/skewjoin.q
index ad917be..47535ea 100644
--- a/ql/src/test/queries/clientpositive/skewjoin.q
+++ b/ql/src/test/queries/clientpositive/skewjoin.q
@@ -13,10 +13,10 @@
CREATE TABLE T4(key STRING, val STRING) STORED AS TEXTFILE;
CREATE TABLE dest_j1(key INT, value STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4;
EXPLAIN
diff --git a/ql/src/test/queries/clientpositive/skewjoin_union_remove_1.q b/ql/src/test/queries/clientpositive/skewjoin_union_remove_1.q
index 03eab4c..fc07742 100644
--- a/ql/src/test/queries/clientpositive/skewjoin_union_remove_1.q
+++ b/ql/src/test/queries/clientpositive/skewjoin_union_remove_1.q
@@ -20,12 +20,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- a simple join query with skew on both the tables on the join key
diff --git a/ql/src/test/queries/clientpositive/skewjoin_union_remove_2.q b/ql/src/test/queries/clientpositive/skewjoin_union_remove_2.q
index 9cb9195..50cfc61 100644
--- a/ql/src/test/queries/clientpositive/skewjoin_union_remove_2.q
+++ b/ql/src/test/queries/clientpositive/skewjoin_union_remove_2.q
@@ -12,16 +12,16 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
-- This is to test the union->selectstar->filesink and skewjoin optimization
-- Union of 3 map-reduce subqueries is performed for the skew join
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt1.q b/ql/src/test/queries/clientpositive/skewjoinopt1.q
index af446bb..504ba8b 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt1.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt1.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- a simple join query with skew on both the tables on the join key
-- adding a order by at the end to make the results deterministic
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt10.q b/ql/src/test/queries/clientpositive/skewjoinopt10.q
index 199f320..f35af90 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt10.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt10.q
@@ -4,7 +4,7 @@
CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
drop table array_valued_T1;
create table array_valued_T1 (key string, value array<string>) SKEWED BY (key) ON ((8));
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt11.q b/ql/src/test/queries/clientpositive/skewjoinopt11.q
index ef61fb2..9e00bdcd 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt11.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt11.q
@@ -5,11 +5,11 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- This test is to verify the skew join compile optimization when the join is followed
-- by a union. Both sides of a union consist of a join, which should have used
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt12.q b/ql/src/test/queries/clientpositive/skewjoinopt12.q
index b5d9d9b..1719950 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt12.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt12.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- Both the join tables are skewed by 2 keys, and one of the skewed values
-- is common to both the tables. The join key matches the skewed key set.
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt13.q b/ql/src/test/queries/clientpositive/skewjoinopt13.q
index 0634c4f..5ef217c 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt13.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt13.q
@@ -4,16 +4,16 @@
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
CREATE TABLE T3(key STRING, val STRING)
SKEWED BY (val) ON ((12)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
-- This test is for skewed join compile time optimization for more than 2 tables.
-- The join key for table 3 is different from the join key used for joining
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt14.q b/ql/src/test/queries/clientpositive/skewjoinopt14.q
index 0f031dd..df1a26b 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt14.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt14.q
@@ -5,16 +5,16 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
CREATE TABLE T3(key STRING, val STRING)
SKEWED BY (val) ON ((12)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
-- This test is for skewed join compile time optimization for more than 2 tables.
-- The join key for table 3 is different from the join key used for joining
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt15.q b/ql/src/test/queries/clientpositive/skewjoinopt15.q
index d5474a4..1db5472 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt15.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt15.q
@@ -4,7 +4,7 @@
CREATE TABLE tmpT1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE tmpT1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1;
-- testing skew on other data types - int
CREATE TABLE T1(key INT, val STRING) SKEWED BY (key) ON ((2));
@@ -12,7 +12,7 @@
CREATE TABLE tmpT2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE tmpT2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE tmpT2;
CREATE TABLE T2(key INT, val STRING) SKEWED BY (key) ON ((3));
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt16.q b/ql/src/test/queries/clientpositive/skewjoinopt16.q
index 46b4f6d..915de61 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt16.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt16.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- One of the tables is skewed by 2 columns, and the other table is
-- skewed by one column. Ths join is performed on the both the columns
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt17.q b/ql/src/test/queries/clientpositive/skewjoinopt17.q
index 0592ca8..2ee79cc 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt17.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt17.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- One of the tables is skewed by 2 columns, and the other table is
-- skewed by one column. Ths join is performed on the first skewed column
@@ -31,12 +31,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- One of the tables is skewed by 2 columns, and the other table is
-- skewed by one column. Ths join is performed on the both the columns
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt18.q b/ql/src/test/queries/clientpositive/skewjoinopt18.q
index 433fea3..9d06cc0 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt18.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt18.q
@@ -4,7 +4,7 @@
CREATE TABLE tmpT1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE tmpT1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1;
-- testing skew on other data types - int
CREATE TABLE T1(key INT, val STRING) SKEWED BY (key) ON ((2));
@@ -16,7 +16,7 @@
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- Once HIVE-3445 is fixed, the compile time skew join optimization would be
-- applicable here. Till the above jira is fixed, it would be performed as a
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt19.q b/ql/src/test/queries/clientpositive/skewjoinopt19.q
index 0b11ebe..075645f 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt19.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt19.q
@@ -6,11 +6,11 @@
CLUSTERED BY (key) INTO 4 BUCKETS
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- add a test where the skewed key is also the bucketized key
-- it should not matter, and the compile time skewed join
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt2.q b/ql/src/test/queries/clientpositive/skewjoinopt2.q
index 34fcdbf..f7acaad 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt2.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt2.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2), (7)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- a simple query with skew on both the tables on the join key
-- multiple skew values are present for the skewed keys
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt20.q b/ql/src/test/queries/clientpositive/skewjoinopt20.q
index f217052..9b908ce 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt20.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt20.q
@@ -6,11 +6,11 @@
CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- add a test where the skewed key is also the bucketized/sorted key
-- it should not matter, and the compile time skewed join
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt3.q b/ql/src/test/queries/clientpositive/skewjoinopt3.q
index f6002ad..22ea4f0 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt3.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt3.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- a simple query with skew on both the tables. One of the skewed
-- value is common to both the tables. The skewed value should not be
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt4.q b/ql/src/test/queries/clientpositive/skewjoinopt4.q
index ca83c44..8496b1a 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt4.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt4.q
@@ -5,11 +5,11 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- only of the tables of the join (the left table of the join) is skewed
-- the skewed filter would still be applied to both the tables
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt5.q b/ql/src/test/queries/clientpositive/skewjoinopt5.q
index 3d7884c..152de5b 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt5.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt5.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- One of the tables is skewed by 2 columns, and the other table is
-- skewed by one column. Ths join is performed on the first skewed column
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt6.q b/ql/src/test/queries/clientpositive/skewjoinopt6.q
index 36cf8ce..2e261bd 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt6.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt6.q
@@ -5,12 +5,12 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- Both the join tables are skewed by 2 keys, and one of the skewed values
-- is common to both the tables. The join key is a subset of the skewed key set:
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt7.q b/ql/src/test/queries/clientpositive/skewjoinopt7.q
index cf84f67..e4d9605 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt7.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt7.q
@@ -5,16 +5,16 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
-- This test is for validating skewed join compile time optimization for more than
-- 2 tables. The join key is the same, and so a 3-way join would be performed.
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt8.q b/ql/src/test/queries/clientpositive/skewjoinopt8.q
index d0ac845..85746d9 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt8.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt8.q
@@ -4,16 +4,16 @@
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING)
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
-- This test is for validating skewed join compile time optimization for more than
-- 2 tables. The join key is the same, and so a 3-way join would be performed.
diff --git a/ql/src/test/queries/clientpositive/skewjoinopt9.q b/ql/src/test/queries/clientpositive/skewjoinopt9.q
index 0483403..889ab6c 100644
--- a/ql/src/test/queries/clientpositive/skewjoinopt9.q
+++ b/ql/src/test/queries/clientpositive/skewjoinopt9.q
@@ -5,11 +5,11 @@
CREATE TABLE T1(key STRING, val STRING)
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-- no skew join compile time optimization would be performed if one of the
-- join sources is a sub-query consisting of a union all
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_1.q b/ql/src/test/queries/clientpositive/smb_mapjoin_1.q
index 359513e..9dee411 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_1.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_1.q
@@ -6,9 +6,9 @@
create table smb_bucket_2(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
-load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
-load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
-load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
+load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
+load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
+load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_10.q b/ql/src/test/queries/clientpositive/smb_mapjoin_10.q
index a79ebf6..1fbe209 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_10.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_10.q
@@ -6,11 +6,11 @@
-- add dummy files to make sure that the number of files in each partition is same as number of buckets
-load data local inpath '../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1');
-load data local inpath '../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1');
+load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1');
+load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1');
-load data local inpath '../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2');
-load data local inpath '../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2');
+load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2');
+load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2');
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_2.q b/ql/src/test/queries/clientpositive/smb_mapjoin_2.q
index 9d86314..e2b2433 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_2.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_2.q
@@ -6,9 +6,9 @@
create table smb_bucket_2(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
-load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
-load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
-load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
+load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
+load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
+load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_25.q b/ql/src/test/queries/clientpositive/smb_mapjoin_25.q
index 8b534e8..e43174b 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_25.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_25.q
@@ -10,9 +10,9 @@
create table smb_bucket_2(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
-load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
-load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
-load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
+load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
+load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
+load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
explain
select * from (select a.key from smb_bucket_1 a join smb_bucket_2 b on (a.key = b.key) where a.key = 5) t1 left outer join (select c.key from smb_bucket_2 c join smb_bucket_3 d on (c.key = d.key) where c.key=5) t2 on (t1.key=t2.key) where t2.key=5;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_3.q b/ql/src/test/queries/clientpositive/smb_mapjoin_3.q
index 73b21fa..b379706 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_3.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_3.q
@@ -6,9 +6,9 @@
create table smb_bucket_2(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
-load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
-load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
-load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
+load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
+load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
+load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_4.q b/ql/src/test/queries/clientpositive/smb_mapjoin_4.q
index 83143b1..2b3f67e 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_4.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_4.q
@@ -6,9 +6,9 @@
create table smb_bucket_2(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
-load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
-load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
-load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
+load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
+load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
+load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_5.q b/ql/src/test/queries/clientpositive/smb_mapjoin_5.q
index 61ec084..406604e 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_5.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_5.q
@@ -6,9 +6,9 @@
create table smb_bucket_2(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE;
-load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
-load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
-load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
+load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1;
+load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2;
+load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3;
set hive.optimize.bucketmapjoin = true;
set hive.optimize.bucketmapjoin.sortedmerge = true;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_7.q b/ql/src/test/queries/clientpositive/smb_mapjoin_7.q
index 1488b1f..ca1c749 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_7.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_7.q
@@ -15,8 +15,8 @@
create table smb_join_results_empty_bigtable(k1 int, v1 string, k2 int, v2 string);
create table normal_join_results(k1 int, v1 string, k2 int, v2 string);
-load data local inpath '../data/files/empty1.txt' into table smb_bucket4_1;
-load data local inpath '../data/files/empty2.txt' into table smb_bucket4_1;
+load data local inpath '../../data/files/empty1.txt' into table smb_bucket4_1;
+load data local inpath '../../data/files/empty2.txt' into table smb_bucket4_1;
insert overwrite table smb_bucket4_2
select * from src;
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_8.q b/ql/src/test/queries/clientpositive/smb_mapjoin_8.q
index 6f282ed..f296057 100644
--- a/ql/src/test/queries/clientpositive/smb_mapjoin_8.q
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_8.q
@@ -5,7 +5,7 @@
create table smb_bucket_input (key int, value string) stored as rcfile;
-load data local inpath '../data/files/smb_bucket_input.rc' into table smb_bucket_input;
+load data local inpath '../../data/files/smb_bucket_input.rc' into table smb_bucket_input;
CREATE TABLE smb_bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS;
diff --git a/ql/src/test/queries/clientpositive/source.q b/ql/src/test/queries/clientpositive/source.q
index 6fe3d21..76ca152 100644
--- a/ql/src/test/queries/clientpositive/source.q
+++ b/ql/src/test/queries/clientpositive/source.q
@@ -1 +1 @@
-source ../data/files/source.txt;
+source ../../data/files/source.txt;
diff --git a/ql/src/test/queries/clientpositive/split.q b/ql/src/test/queries/clientpositive/split.q
index 9960fd6..f5d7ff8 100644
--- a/ql/src/test/queries/clientpositive/split.q
+++ b/ql/src/test/queries/clientpositive/split.q
@@ -2,7 +2,7 @@
CREATE table tmp_jo_tab_test (message_line STRING)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/input.txt'
+LOAD DATA LOCAL INPATH '../../data/files/input.txt'
OVERWRITE INTO TABLE tmp_jo_tab_test;
select size(split(message_line, '\t')) from tmp_jo_tab_test;
diff --git a/ql/src/test/queries/clientpositive/stats1.q b/ql/src/test/queries/clientpositive/stats1.q
index 0b783de..359d27b 100644
--- a/ql/src/test/queries/clientpositive/stats1.q
+++ b/ql/src/test/queries/clientpositive/stats1.q
@@ -26,5 +26,5 @@
-- Load a file into a existing table
-- Some stats (numFiles, totalSize) should be updated correctly
-- Some other stats (numRows, rawDataSize) should be cleared
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE tmptable;
DESCRIBE FORMATTED tmptable;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/stats11.q b/ql/src/test/queries/clientpositive/stats11.q
index 6618c91..d037c003b 100644
--- a/ql/src/test/queries/clientpositive/stats11.q
+++ b/ql/src/test/queries/clientpositive/stats11.q
@@ -2,25 +2,25 @@
set hive.stats.autogather=true;
CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin;
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin;
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE;
explain
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
+load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint);
create table bucketmapjoin_hash_result_2 (key bigint , value1 bigint, value2 bigint);
diff --git a/ql/src/test/queries/clientpositive/stats18.q b/ql/src/test/queries/clientpositive/stats18.q
index 425de64..e773cd7 100644
--- a/ql/src/test/queries/clientpositive/stats18.q
+++ b/ql/src/test/queries/clientpositive/stats18.q
@@ -13,7 +13,7 @@
-- Some other stats (numRows, rawDataSize) should be cleared
desc formatted stats_part partition (ds='2010-04-08', hr='13');
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13');
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13');
desc formatted stats_part partition (ds='2010-04-08', hr='13');
diff --git a/ql/src/test/queries/clientpositive/stats3.q b/ql/src/test/queries/clientpositive/stats3.q
index 5962348..fd7e0ea 100644
--- a/ql/src/test/queries/clientpositive/stats3.q
+++ b/ql/src/test/queries/clientpositive/stats3.q
@@ -5,9 +5,9 @@
create table hive_test_src ( col1 string ) stored as textfile ;
explain extended
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src ;
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src ;
desc formatted hive_test_src;
diff --git a/ql/src/test/queries/clientpositive/stats_noscan_2.q b/ql/src/test/queries/clientpositive/stats_noscan_2.q
index c934fb2..a19d01b 100644
--- a/ql/src/test/queries/clientpositive/stats_noscan_2.q
+++ b/ql/src/test/queries/clientpositive/stats_noscan_2.q
@@ -1,7 +1,7 @@
-- test analyze table compute statistiscs [noscan] on external table
-- 1 test table
-CREATE EXTERNAL TABLE anaylyze_external (a INT) LOCATION '${system:test.src.data.dir}/files/ext_test';
+CREATE EXTERNAL TABLE anaylyze_external (a INT) LOCATION '${system:hive.root}/data/files/ext_test';
SELECT * FROM anaylyze_external;
analyze table anaylyze_external compute statistics;
describe formatted anaylyze_external;
diff --git a/ql/src/test/queries/clientpositive/subq.q b/ql/src/test/queries/clientpositive/subq.q
index 3fb1558..14fa321 100644
--- a/ql/src/test/queries/clientpositive/subq.q
+++ b/ql/src/test/queries/clientpositive/subq.q
@@ -2,12 +2,12 @@
FROM (
FROM src select src.* WHERE src.key < 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*;
FROM (
FROM src select src.* WHERE src.key < 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*;
-dfs -cat ../build/ql/test/data/warehouse/union.out/*;
+dfs -cat ${system:test.warehouse.dir}/union.out/*;
diff --git a/ql/src/test/queries/clientpositive/subquery_in.q b/ql/src/test/queries/clientpositive/subquery_in.q
index 6f83a88..37bb5d7 100644
--- a/ql/src/test/queries/clientpositive/subquery_in.q
+++ b/ql/src/test/queries/clientpositive/subquery_in.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
DROP TABLE lineitem;
CREATE TABLE lineitem (L_ORDERKEY INT,
@@ -35,7 +35,7 @@
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '|';
-LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
+LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
-- non agg, non corr
explain
diff --git a/ql/src/test/queries/clientpositive/subquery_notin.q b/ql/src/test/queries/clientpositive/subquery_notin.q
index 7bc7a1b..d7eca3e 100644
--- a/ql/src/test/queries/clientpositive/subquery_notin.q
+++ b/ql/src/test/queries/clientpositive/subquery_notin.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
DROP TABLE lineitem;
CREATE TABLE lineitem (L_ORDERKEY INT,
@@ -35,7 +35,7 @@
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '|';
-LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
+LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
-- non agg, non corr
explain
diff --git a/ql/src/test/queries/clientpositive/symlink_text_input_format.q b/ql/src/test/queries/clientpositive/symlink_text_input_format.q
index bb9d6f3..d633b97 100644
--- a/ql/src/test/queries/clientpositive/symlink_text_input_format.q
+++ b/ql/src/test/queries/clientpositive/symlink_text_input_format.q
@@ -1,12 +1,12 @@
-
+DROP TABLE IF EXISTS symlink_text_input_format;
EXPLAIN
CREATE TABLE symlink_text_input_format (key STRING, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
CREATE TABLE symlink_text_input_format (key STRING, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
-dfs -cp ../data/files/symlink1.txt ../build/ql/test/data/warehouse/symlink_text_input_format/symlink1.txt;
-dfs -cp ../data/files/symlink2.txt ../build/ql/test/data/warehouse/symlink_text_input_format/symlink2.txt;
+dfs -cp ../../data/files/symlink1.txt ${system:test.warehouse.dir}/symlink_text_input_format/symlink1.txt;
+dfs -cp ../../data/files/symlink2.txt ${system:test.warehouse.dir}/symlink_text_input_format/symlink2.txt;
EXPLAIN SELECT * FROM symlink_text_input_format order by key, value;
@@ -20,5 +20,4 @@
SELECT count(1) FROM symlink_text_input_format;
-
-
+DROP TABLE symlink_text_input_format;
diff --git a/ql/src/test/queries/clientpositive/table_access_keys_stats.q b/ql/src/test/queries/clientpositive/table_access_keys_stats.q
index 8b1a390..23209d8 100644
--- a/ql/src/test/queries/clientpositive/table_access_keys_stats.q
+++ b/ql/src/test/queries/clientpositive/table_access_keys_stats.q
@@ -4,7 +4,7 @@
-- This test is used for testing the TableAccessAnalyzer
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
diff --git a/ql/src/test/queries/clientpositive/timestamp_null.q b/ql/src/test/queries/clientpositive/timestamp_null.q
index efd5bc4..36f3541 100644
--- a/ql/src/test/queries/clientpositive/timestamp_null.q
+++ b/ql/src/test/queries/clientpositive/timestamp_null.q
@@ -1,6 +1,6 @@
DROP TABLE IF EXISTS timestamp_null;
CREATE TABLE timestamp_null (t1 TIMESTAMP);
-LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE timestamp_null;
+LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE timestamp_null;
SELECT * FROM timestamp_null LIMIT 1;
diff --git a/ql/src/test/queries/clientpositive/truncate_table.q b/ql/src/test/queries/clientpositive/truncate_table.q
index c0e81e9..975c0f1 100644
--- a/ql/src/test/queries/clientpositive/truncate_table.q
+++ b/ql/src/test/queries/clientpositive/truncate_table.q
@@ -1,5 +1,5 @@
create table src_truncate (key string, value string);
-load data local inpath '../data/files/kv1.txt' into table src_truncate;;
+load data local inpath '../../data/files/kv1.txt' into table src_truncate;;
create table srcpart_truncate (key string, value string) partitioned by (ds string, hr string);
alter table srcpart_truncate add partition (ds='2008-04-08', hr='11');
@@ -7,10 +7,10 @@
alter table srcpart_truncate add partition (ds='2008-04-09', hr='11');
alter table srcpart_truncate add partition (ds='2008-04-09', hr='12');
-load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11');
-load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12');
-load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11');
-load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12');
+load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11');
+load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12');
+load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11');
+load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12');
set hive.fetch.task.convertion=more;
diff --git a/ql/src/test/queries/clientpositive/udaf_context_ngrams.q b/ql/src/test/queries/clientpositive/udaf_context_ngrams.q
index dda7aaa..f065385 100644
--- a/ql/src/test/queries/clientpositive/udaf_context_ngrams.q
+++ b/ql/src/test/queries/clientpositive/udaf_context_ngrams.q
@@ -1,5 +1,5 @@
CREATE TABLE kafka (contents STRING);
-LOAD DATA LOCAL INPATH '../data/files/text-en.txt' INTO TABLE kafka;
+LOAD DATA LOCAL INPATH '../../data/files/text-en.txt' INTO TABLE kafka;
set mapred.reduce.tasks=1;
set hive.exec.reducers.max=1;
diff --git a/ql/src/test/queries/clientpositive/udaf_corr.q b/ql/src/test/queries/clientpositive/udaf_corr.q
index 6cc9ce2..a2edec4 100644
--- a/ql/src/test/queries/clientpositive/udaf_corr.q
+++ b/ql/src/test/queries/clientpositive/udaf_corr.q
@@ -2,7 +2,7 @@
CREATE TABLE covar_tab (a INT, b INT, c INT)
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab;
DESCRIBE FUNCTION corr;
diff --git a/ql/src/test/queries/clientpositive/udaf_covar_pop.q b/ql/src/test/queries/clientpositive/udaf_covar_pop.q
index 0f5d5f3..a9937ba 100644
--- a/ql/src/test/queries/clientpositive/udaf_covar_pop.q
+++ b/ql/src/test/queries/clientpositive/udaf_covar_pop.q
@@ -2,7 +2,7 @@
CREATE TABLE covar_tab (a INT, b INT, c INT)
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab;
DESCRIBE FUNCTION covar_pop;
diff --git a/ql/src/test/queries/clientpositive/udaf_covar_samp.q b/ql/src/test/queries/clientpositive/udaf_covar_samp.q
index 72b9c4b..2b50d8f 100644
--- a/ql/src/test/queries/clientpositive/udaf_covar_samp.q
+++ b/ql/src/test/queries/clientpositive/udaf_covar_samp.q
@@ -2,7 +2,7 @@
CREATE TABLE covar_tab (a INT, b INT, c INT)
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab;
DESCRIBE FUNCTION covar_samp;
diff --git a/ql/src/test/queries/clientpositive/udaf_ngrams.q b/ql/src/test/queries/clientpositive/udaf_ngrams.q
index 31ffd29..6a2fde5 100644
--- a/ql/src/test/queries/clientpositive/udaf_ngrams.q
+++ b/ql/src/test/queries/clientpositive/udaf_ngrams.q
@@ -1,5 +1,5 @@
CREATE TABLE kafka (contents STRING);
-LOAD DATA LOCAL INPATH '../data/files/text-en.txt' INTO TABLE kafka;
+LOAD DATA LOCAL INPATH '../../data/files/text-en.txt' INTO TABLE kafka;
set mapred.reduce.tasks=1;
set hive.exec.reducers.max=1;
diff --git a/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q b/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q
index 66c408d..5b8ad7a 100644
--- a/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q
+++ b/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q
@@ -1,10 +1,10 @@
-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
CREATE TABLE bucket (key double, value string) CLUSTERED BY (key) SORTED BY (key DESC) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket;
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket;
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket;
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket;
create table t1 (result double);
create table t2 (result double);
diff --git a/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q b/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q
index 07bfb6e..1efa295 100644
--- a/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q
+++ b/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q
@@ -2,10 +2,10 @@
-- 0.23 changed input order of data in reducer task, which affects result of percentile_approx
CREATE TABLE bucket (key double, value string) CLUSTERED BY (key) SORTED BY (key DESC) INTO 4 BUCKETS STORED AS TEXTFILE;
-load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket;
-load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket;
-load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket;
-load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket;
+load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket;
create table t1 (result double);
create table t2 (result double);
diff --git a/ql/src/test/queries/clientpositive/udf_field.q b/ql/src/test/queries/clientpositive/udf_field.q
index e995f5c..71a135e 100644
--- a/ql/src/test/queries/clientpositive/udf_field.q
+++ b/ql/src/test/queries/clientpositive/udf_field.q
@@ -25,7 +25,7 @@
CREATE TABLE test_table(col1 STRING, col2 STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table;
select col1,col2,
field("66",col1),
@@ -42,7 +42,7 @@
CREATE TABLE test_table1(col1 int, col2 string) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table1;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table1;
select col1,col2,
field(66,col1),
diff --git a/ql/src/test/queries/clientpositive/udf_in_file.q b/ql/src/test/queries/clientpositive/udf_in_file.q
index 4da4789..9d9efe8 100644
--- a/ql/src/test/queries/clientpositive/udf_in_file.q
+++ b/ql/src/test/queries/clientpositive/udf_in_file.q
@@ -1,12 +1,12 @@
DESCRIBE FUNCTION in_file;
EXPLAIN
-SELECT in_file("303", "../data/files/test2.dat"),
- in_file("304", "../data/files/test2.dat"),
- in_file(CAST(NULL AS STRING), "../data/files/test2.dat")
+SELECT in_file("303", "../../data/files/test2.dat"),
+ in_file("304", "../../data/files/test2.dat"),
+ in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
FROM src LIMIT 1;
-SELECT in_file("303", "../data/files/test2.dat"),
- in_file("304", "../data/files/test2.dat"),
- in_file(CAST(NULL AS STRING), "../data/files/test2.dat")
+SELECT in_file("303", "../../data/files/test2.dat"),
+ in_file("304", "../../data/files/test2.dat"),
+ in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
FROM src LIMIT 1;
diff --git a/ql/src/test/queries/clientpositive/udf_length.q b/ql/src/test/queries/clientpositive/udf_length.q
index b843079..b376e92 100644
--- a/ql/src/test/queries/clientpositive/udf_length.q
+++ b/ql/src/test/queries/clientpositive/udf_length.q
@@ -9,6 +9,6 @@
-- Test with non-ascii characters.
CREATE TABLE dest1(name STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1;
+LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE dest1;
EXPLAIN SELECT length(dest1.name) FROM dest1;
SELECT length(dest1.name) FROM dest1;
diff --git a/ql/src/test/queries/clientpositive/udf_printf.q b/ql/src/test/queries/clientpositive/udf_printf.q
index 99e89cc..1289a79 100644
--- a/ql/src/test/queries/clientpositive/udf_printf.q
+++ b/ql/src/test/queries/clientpositive/udf_printf.q
@@ -27,7 +27,7 @@
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '9'
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE binay_udf;
+LOAD DATA LOCAL INPATH '../../data/files/string.txt' INTO TABLE binay_udf;
create table dest1 (key binary, value int);
insert overwrite table dest1 select transform(*) using 'cat' as key binary, value int from binay_udf;
select value, printf("format key: %s", key) from dest1;
diff --git a/ql/src/test/queries/clientpositive/udf_reverse.q b/ql/src/test/queries/clientpositive/udf_reverse.q
index 81f765e..89aafe3 100644
--- a/ql/src/test/queries/clientpositive/udf_reverse.q
+++ b/ql/src/test/queries/clientpositive/udf_reverse.q
@@ -11,5 +11,5 @@
-- kv4.txt contains the text 0xE982B5E993AE, which should be reversed to
-- 0xE993AEE982B5
CREATE TABLE dest1(name STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1;
+LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE dest1;
SELECT count(1) FROM dest1 WHERE reverse(dest1.name) = _UTF-8 0xE993AEE982B5;
diff --git a/ql/src/test/queries/clientpositive/udf_sort_array.q b/ql/src/test/queries/clientpositive/udf_sort_array.q
index ef09732..8443f33 100644
--- a/ql/src/test/queries/clientpositive/udf_sort_array.q
+++ b/ql/src/test/queries/clientpositive/udf_sort_array.q
@@ -30,7 +30,7 @@
timestamps ARRAY<TIMESTAMP>
) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/primitive_type_arrays.txt' OVERWRITE INTO TABLE dest1;
+LOAD DATA LOCAL INPATH '../../data/files/primitive_type_arrays.txt' OVERWRITE INTO TABLE dest1;
SELECT sort_array(tinyints), sort_array(smallints), sort_array(ints),
sort_array(bigints), sort_array(booleans), sort_array(floats),
diff --git a/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q b/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q
index 3024074..b2e7a15 100644
--- a/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q
+++ b/ql/src/test/queries/clientpositive/udf_to_unix_timestamp.q
@@ -2,7 +2,7 @@
DESCRIBE FUNCTION EXTENDED to_unix_timestamp;
create table oneline(key int, value string);
-load data local inpath '../data/files/things.txt' into table oneline;
+load data local inpath '../../data/files/things.txt' into table oneline;
SELECT
'2009-03-20 11:30:01',
diff --git a/ql/src/test/queries/clientpositive/udf_unix_timestamp.q b/ql/src/test/queries/clientpositive/udf_unix_timestamp.q
index 89288a1..4c0ebea 100644
--- a/ql/src/test/queries/clientpositive/udf_unix_timestamp.q
+++ b/ql/src/test/queries/clientpositive/udf_unix_timestamp.q
@@ -2,7 +2,7 @@
DESCRIBE FUNCTION EXTENDED unix_timestamp;
create table oneline(key int, value string);
-load data local inpath '../data/files/things.txt' into table oneline;
+load data local inpath '../../data/files/things.txt' into table oneline;
SELECT
'2009-03-20 11:30:01',
diff --git a/ql/src/test/queries/clientpositive/udtf_posexplode.q b/ql/src/test/queries/clientpositive/udtf_posexplode.q
index 3c0be89..343f08b 100644
--- a/ql/src/test/queries/clientpositive/udtf_posexplode.q
+++ b/ql/src/test/queries/clientpositive/udtf_posexplode.q
@@ -5,7 +5,7 @@
deductions MAP<STRING, FLOAT>,
address STRUCT<street:STRING, city:STRING, state:STRING, zip:INT>);
-LOAD DATA LOCAL INPATH '../data/files/posexplode_data.txt' INTO TABLE employees;
+LOAD DATA LOCAL INPATH '../../data/files/posexplode_data.txt' INTO TABLE employees;
SELECT
name, pos, sub
diff --git a/ql/src/test/queries/clientpositive/union.q b/ql/src/test/queries/clientpositive/union.q
index 91bbd1b..525eccb 100644
--- a/ql/src/test/queries/clientpositive/union.q
+++ b/ql/src/test/queries/clientpositive/union.q
@@ -6,13 +6,13 @@
UNION ALL
FROM src SELECT src.* WHERE src.key > 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*;
FROM (
FROM src select src.key, src.value WHERE src.key < 100
UNION ALL
FROM src SELECT src.* WHERE src.key > 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*;
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*;
-dfs -cat ../build/ql/test/data/warehouse/union.out/*;
+dfs -cat ${system:test.warehouse.dir}/union.out/*;
diff --git a/ql/src/test/queries/clientpositive/union_date.q b/ql/src/test/queries/clientpositive/union_date.q
index e332a8a..dd6f08e 100644
--- a/ql/src/test/queries/clientpositive/union_date.q
+++ b/ql/src/test/queries/clientpositive/union_date.q
@@ -17,8 +17,8 @@
FL_NUM int
);
-LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_1;
-LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_2;
+LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_1;
+LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_2;
select * from (
select fl_num, fl_date from union_date_1
diff --git a/ql/src/test/queries/clientpositive/union_remove_1.q b/ql/src/test/queries/clientpositive/union_remove_1.q
index c9f920c..c87b3fe 100644
--- a/ql/src/test/queries/clientpositive/union_remove_1.q
+++ b/ql/src/test/queries/clientpositive/union_remove_1.q
@@ -19,7 +19,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_10.q b/ql/src/test/queries/clientpositive/union_remove_10.q
index a072fe3..6701952 100644
--- a/ql/src/test/queries/clientpositive/union_remove_10.q
+++ b/ql/src/test/queries/clientpositive/union_remove_10.q
@@ -24,7 +24,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_11.q b/ql/src/test/queries/clientpositive/union_remove_11.q
index 6250a20..4b2fa42 100644
--- a/ql/src/test/queries/clientpositive/union_remove_11.q
+++ b/ql/src/test/queries/clientpositive/union_remove_11.q
@@ -24,7 +24,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_12.q b/ql/src/test/queries/clientpositive/union_remove_12.q
index 168eac3..69d0d0a 100644
--- a/ql/src/test/queries/clientpositive/union_remove_12.q
+++ b/ql/src/test/queries/clientpositive/union_remove_12.q
@@ -24,7 +24,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_13.q b/ql/src/test/queries/clientpositive/union_remove_13.q
index a88a83e..7605f0e 100644
--- a/ql/src/test/queries/clientpositive/union_remove_13.q
+++ b/ql/src/test/queries/clientpositive/union_remove_13.q
@@ -24,7 +24,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_14.q b/ql/src/test/queries/clientpositive/union_remove_14.q
index e588e8f..a4fdfc8 100644
--- a/ql/src/test/queries/clientpositive/union_remove_14.q
+++ b/ql/src/test/queries/clientpositive/union_remove_14.q
@@ -25,7 +25,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_15.q b/ql/src/test/queries/clientpositive/union_remove_15.q
index 237f2e7..e3c937b 100644
--- a/ql/src/test/queries/clientpositive/union_remove_15.q
+++ b/ql/src/test/queries/clientpositive/union_remove_15.q
@@ -25,7 +25,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1 partition (ds)
diff --git a/ql/src/test/queries/clientpositive/union_remove_16.q b/ql/src/test/queries/clientpositive/union_remove_16.q
index 06d5043..537078b 100644
--- a/ql/src/test/queries/clientpositive/union_remove_16.q
+++ b/ql/src/test/queries/clientpositive/union_remove_16.q
@@ -25,7 +25,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as rcfile ;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1 partition (ds)
diff --git a/ql/src/test/queries/clientpositive/union_remove_17.q b/ql/src/test/queries/clientpositive/union_remove_17.q
index 65b8255..d70f3d3 100644
--- a/ql/src/test/queries/clientpositive/union_remove_17.q
+++ b/ql/src/test/queries/clientpositive/union_remove_17.q
@@ -22,7 +22,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1 partition (ds)
diff --git a/ql/src/test/queries/clientpositive/union_remove_18.q b/ql/src/test/queries/clientpositive/union_remove_18.q
index 265acfd..4786500 100644
--- a/ql/src/test/queries/clientpositive/union_remove_18.q
+++ b/ql/src/test/queries/clientpositive/union_remove_18.q
@@ -24,7 +24,7 @@
create table inputTbl1(key string, ds string) stored as textfile;
create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1 partition (ds)
diff --git a/ql/src/test/queries/clientpositive/union_remove_19.q b/ql/src/test/queries/clientpositive/union_remove_19.q
index 1450caa..8c45953 100644
--- a/ql/src/test/queries/clientpositive/union_remove_19.q
+++ b/ql/src/test/queries/clientpositive/union_remove_19.q
@@ -19,7 +19,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_2.q b/ql/src/test/queries/clientpositive/union_remove_2.q
index 015c146..83cd288 100644
--- a/ql/src/test/queries/clientpositive/union_remove_2.q
+++ b/ql/src/test/queries/clientpositive/union_remove_2.q
@@ -20,7 +20,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_20.q b/ql/src/test/queries/clientpositive/union_remove_20.q
index ac72788..f80f7c1 100644
--- a/ql/src/test/queries/clientpositive/union_remove_20.q
+++ b/ql/src/test/queries/clientpositive/union_remove_20.q
@@ -20,7 +20,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(values bigint, key string) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_21.q b/ql/src/test/queries/clientpositive/union_remove_21.q
index f897446..8963c25 100644
--- a/ql/src/test/queries/clientpositive/union_remove_21.q
+++ b/ql/src/test/queries/clientpositive/union_remove_21.q
@@ -20,7 +20,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_22.q b/ql/src/test/queries/clientpositive/union_remove_22.q
index f01053d..b0c1ccd 100644
--- a/ql/src/test/queries/clientpositive/union_remove_22.q
+++ b/ql/src/test/queries/clientpositive/union_remove_22.q
@@ -19,7 +19,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint, values2 bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_23.q b/ql/src/test/queries/clientpositive/union_remove_23.q
index 805dd76..a1b989a 100644
--- a/ql/src/test/queries/clientpositive/union_remove_23.q
+++ b/ql/src/test/queries/clientpositive/union_remove_23.q
@@ -20,7 +20,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_24.q b/ql/src/test/queries/clientpositive/union_remove_24.q
index 36fd947..ec561e0 100644
--- a/ql/src/test/queries/clientpositive/union_remove_24.q
+++ b/ql/src/test/queries/clientpositive/union_remove_24.q
@@ -18,7 +18,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key double, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
EXPLAIN
INSERT OVERWRITE TABLE outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_3.q b/ql/src/test/queries/clientpositive/union_remove_3.q
index da0f1c0..9617f73 100644
--- a/ql/src/test/queries/clientpositive/union_remove_3.q
+++ b/ql/src/test/queries/clientpositive/union_remove_3.q
@@ -20,7 +20,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_4.q b/ql/src/test/queries/clientpositive/union_remove_4.q
index 18d4730..cae323b 100644
--- a/ql/src/test/queries/clientpositive/union_remove_4.q
+++ b/ql/src/test/queries/clientpositive/union_remove_4.q
@@ -20,7 +20,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_5.q b/ql/src/test/queries/clientpositive/union_remove_5.q
index a6fbeb0..5df84e1 100644
--- a/ql/src/test/queries/clientpositive/union_remove_5.q
+++ b/ql/src/test/queries/clientpositive/union_remove_5.q
@@ -22,7 +22,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_6.q b/ql/src/test/queries/clientpositive/union_remove_6.q
index 7ae5af3..bfce26d 100644
--- a/ql/src/test/queries/clientpositive/union_remove_6.q
+++ b/ql/src/test/queries/clientpositive/union_remove_6.q
@@ -17,7 +17,7 @@
create table outputTbl1(key string, values bigint) stored as textfile;
create table outputTbl2(key string, values bigint) stored as textfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
FROM (
diff --git a/ql/src/test/queries/clientpositive/union_remove_7.q b/ql/src/test/queries/clientpositive/union_remove_7.q
index 5a639ca..3a95674 100644
--- a/ql/src/test/queries/clientpositive/union_remove_7.q
+++ b/ql/src/test/queries/clientpositive/union_remove_7.q
@@ -21,7 +21,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_8.q b/ql/src/test/queries/clientpositive/union_remove_8.q
index 79b84e8..a83a43e 100644
--- a/ql/src/test/queries/clientpositive/union_remove_8.q
+++ b/ql/src/test/queries/clientpositive/union_remove_8.q
@@ -22,7 +22,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/union_remove_9.q b/ql/src/test/queries/clientpositive/union_remove_9.q
index f603838..e71f6dd 100644
--- a/ql/src/test/queries/clientpositive/union_remove_9.q
+++ b/ql/src/test/queries/clientpositive/union_remove_9.q
@@ -22,7 +22,7 @@
create table inputTbl1(key string, val string) stored as textfile;
create table outputTbl1(key string, values bigint) stored as rcfile;
-load data local inpath '../data/files/T1.txt' into table inputTbl1;
+load data local inpath '../../data/files/T1.txt' into table inputTbl1;
explain
insert overwrite table outputTbl1
diff --git a/ql/src/test/queries/clientpositive/uniquejoin.q b/ql/src/test/queries/clientpositive/uniquejoin.q
index 51bcf22..3bc8ef9 100644
--- a/ql/src/test/queries/clientpositive/uniquejoin.q
+++ b/ql/src/test/queries/clientpositive/uniquejoin.q
@@ -2,9 +2,9 @@
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
-LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
-LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3;
FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key)
SELECT a.key, b.key, c.key;
diff --git a/ql/src/test/queries/clientpositive/varchar_1.q b/ql/src/test/queries/clientpositive/varchar_1.q
index 53273b3..bed61f3 100644
--- a/ql/src/test/queries/clientpositive/varchar_1.q
+++ b/ql/src/test/queries/clientpositive/varchar_1.q
@@ -5,7 +5,7 @@
create table varchar1_1 (key string, value string);
-- load from file
-load data local inpath '../data/files/srcbucket0.txt' overwrite into table varchar1;
+load data local inpath '../../data/files/srcbucket0.txt' overwrite into table varchar1;
select * from varchar1 order by key, value limit 2;
-- insert overwrite, from same/different length varchar
diff --git a/ql/src/test/queries/clientpositive/varchar_join1.q b/ql/src/test/queries/clientpositive/varchar_join1.q
index 6a19efa..9422687 100644
--- a/ql/src/test/queries/clientpositive/varchar_join1.q
+++ b/ql/src/test/queries/clientpositive/varchar_join1.q
@@ -17,9 +17,9 @@
c2 string
);
-load data local inpath '../data/files/vc1.txt' into table varchar_join1_vc1;
-load data local inpath '../data/files/vc1.txt' into table varchar_join1_vc2;
-load data local inpath '../data/files/vc1.txt' into table varchar_join1_str;
+load data local inpath '../../data/files/vc1.txt' into table varchar_join1_vc1;
+load data local inpath '../../data/files/vc1.txt' into table varchar_join1_vc2;
+load data local inpath '../../data/files/vc1.txt' into table varchar_join1_str;
-- Join varchar with same length varchar
select * from varchar_join1_vc1 a join varchar_join1_vc1 b on (a.c2 = b.c2) order by a.c1;
diff --git a/ql/src/test/queries/clientpositive/varchar_serde.q b/ql/src/test/queries/clientpositive/varchar_serde.q
index 7351b68..ea2a022 100644
--- a/ql/src/test/queries/clientpositive/varchar_serde.q
+++ b/ql/src/test/queries/clientpositive/varchar_serde.q
@@ -18,7 +18,7 @@
)
stored as textfile;
-load data local inpath '../data/files/srcbucket0.txt' overwrite into table varchar_serde_regex;
+load data local inpath '../../data/files/srcbucket0.txt' overwrite into table varchar_serde_regex;
select * from varchar_serde_regex limit 5;
select value, count(*) from varchar_serde_regex group by value limit 5;
diff --git a/ql/src/test/queries/clientpositive/varchar_union1.q b/ql/src/test/queries/clientpositive/varchar_union1.q
index cf90eab..dd3cffe 100644
--- a/ql/src/test/queries/clientpositive/varchar_union1.q
+++ b/ql/src/test/queries/clientpositive/varchar_union1.q
@@ -17,9 +17,9 @@
c2 string
);
-load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc1;
-load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc2;
-load data local inpath '../data/files/vc1.txt' into table varchar_union1_str;
+load data local inpath '../../data/files/vc1.txt' into table varchar_union1_vc1;
+load data local inpath '../../data/files/vc1.txt' into table varchar_union1_vc2;
+load data local inpath '../../data/files/vc1.txt' into table varchar_union1_str;
-- union varchar with same length varchar
select * from (
diff --git a/ql/src/test/queries/clientpositive/view.q b/ql/src/test/queries/clientpositive/view.q
index 4e3d057..bc19355 100644
--- a/ql/src/test/queries/clientpositive/view.q
+++ b/ql/src/test/queries/clientpositive/view.q
@@ -4,13 +4,13 @@
CREATE TABLE table1 (key STRING, value STRING)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE table1;
CREATE TABLE table2 (key STRING, value STRING)
STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE table2;
-- relative reference, no alias
diff --git a/ql/src/test/queries/clientpositive/view_cast.q b/ql/src/test/queries/clientpositive/view_cast.q
index b0b078e..95517c3 100644
--- a/ql/src/test/queries/clientpositive/view_cast.q
+++ b/ql/src/test/queries/clientpositive/view_cast.q
@@ -1,11 +1,11 @@
DROP TABLE IF EXISTS atab;
CREATE TABLE IF NOT EXISTS atab (ks_uid BIGINT, sr_uid STRING, sr_id STRING, tstamp STRING, m_id STRING, act STRING, at_sr_uid STRING, tstamp_type STRING, original_m_id STRING, original_tstamp STRING, registered_flag TINYINT, at_ks_uid BIGINT) PARTITIONED BY (dt STRING,nt STRING);
-LOAD DATA LOCAL INPATH '../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130312', nt='tw');
-LOAD DATA LOCAL INPATH '../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130311', nt='tw');
+LOAD DATA LOCAL INPATH '../../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130312', nt='tw');
+LOAD DATA LOCAL INPATH '../../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130311', nt='tw');
DROP TABLE IF EXISTS mstab;
CREATE TABLE mstab(ks_uid INT, csc INT) PARTITIONED BY (dt STRING);
-LOAD DATA LOCAL INPATH '../data/files/v2.txt' INTO TABLE mstab PARTITION (dt='20130311');
+LOAD DATA LOCAL INPATH '../../data/files/v2.txt' INTO TABLE mstab PARTITION (dt='20130311');
DROP VIEW IF EXISTS aa_view_tw;
CREATE VIEW aa_view_tw AS SELECT ks_uid, sr_id, act, at_ks_uid, at_sr_uid, from_unixtime(CAST(CAST( tstamp as BIGINT)/1000 AS BIGINT),'yyyyMMdd') AS act_date, from_unixtime(CAST(CAST( original_tstamp AS BIGINT)/1000 AS BIGINT),'yyyyMMdd') AS content_creation_date FROM atab WHERE dt='20130312' AND nt='tw' AND ks_uid != at_ks_uid;
diff --git a/ql/src/test/queries/clientpositive/windowing.q b/ql/src/test/queries/clientpositive/windowing.q
index a7297db..2f22145 100644
--- a/ql/src/test/queries/clientpositive/windowing.q
+++ b/ql/src/test/queries/clientpositive/windowing.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- 1. testWindowing
select p_mfgr, p_name, p_size,
diff --git a/ql/src/test/queries/clientpositive/windowing_adjust_rowcontainer_sz.q b/ql/src/test/queries/clientpositive/windowing_adjust_rowcontainer_sz.q
index 9c7625d..67cab9f 100644
--- a/ql/src/test/queries/clientpositive/windowing_adjust_rowcontainer_sz.q
+++ b/ql/src/test/queries/clientpositive/windowing_adjust_rowcontainer_sz.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
set hive.join.cache.size=1;
diff --git a/ql/src/test/queries/clientpositive/windowing_columnPruning.q b/ql/src/test/queries/clientpositive/windowing_columnPruning.q
index 7c4ab38..24f9ff7 100644
--- a/ql/src/test/queries/clientpositive/windowing_columnPruning.q
+++ b/ql/src/test/queries/clientpositive/windowing_columnPruning.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-- 1. testQueryLevelPartitionColsNotInSelect
select p_size,
diff --git a/ql/src/test/queries/clientpositive/windowing_expressions.q b/ql/src/test/queries/clientpositive/windowing_expressions.q
index 1df630d..7e27c6b 100644
--- a/ql/src/test/queries/clientpositive/windowing_expressions.q
+++ b/ql/src/test/queries/clientpositive/windowing_expressions.q
@@ -13,7 +13,7 @@
p_comment STRING
);
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
drop table over10k;
@@ -32,7 +32,7 @@
row format delimited
fields terminated by '|';
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
select p_mfgr, p_retailprice, p_size,
round(sum(p_retailprice) over w1 , 2) = round(sum(lag(p_retailprice,1,0.0)) over w1 + last_value(p_retailprice) over w1 , 2),
diff --git a/ql/src/test/queries/clientpositive/windowing_multipartitioning.q b/ql/src/test/queries/clientpositive/windowing_multipartitioning.q
index f023a7b..1c6e1aa 100644
--- a/ql/src/test/queries/clientpositive/windowing_multipartitioning.q
+++ b/ql/src/test/queries/clientpositive/windowing_multipartitioning.q
@@ -15,7 +15,7 @@
row format delimited
fields terminated by '|';
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
select s, rank() over (partition by s order by si), sum(b) over (partition by s order by si) from over10k limit 100;
diff --git a/ql/src/test/queries/clientpositive/windowing_navfn.q b/ql/src/test/queries/clientpositive/windowing_navfn.q
index 62bfda9..05da2ba 100644
--- a/ql/src/test/queries/clientpositive/windowing_navfn.q
+++ b/ql/src/test/queries/clientpositive/windowing_navfn.q
@@ -15,7 +15,7 @@
row format delimited
fields terminated by '|';
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
select s, row_number() over (partition by d order by dec) from over10k limit 100;
diff --git a/ql/src/test/queries/clientpositive/windowing_ntile.q b/ql/src/test/queries/clientpositive/windowing_ntile.q
index 4487bd9..73e8192 100644
--- a/ql/src/test/queries/clientpositive/windowing_ntile.q
+++ b/ql/src/test/queries/clientpositive/windowing_ntile.q
@@ -15,7 +15,7 @@
row format delimited
fields terminated by '|';
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
select i, ntile(10) over (partition by s order by i) from over10k limit 100;
diff --git a/ql/src/test/queries/clientpositive/windowing_rank.q b/ql/src/test/queries/clientpositive/windowing_rank.q
index ea80410..4b95117 100644
--- a/ql/src/test/queries/clientpositive/windowing_rank.q
+++ b/ql/src/test/queries/clientpositive/windowing_rank.q
@@ -15,7 +15,7 @@
row format delimited
fields terminated by '|';
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
select s, rank() over (partition by f order by t) from over10k limit 100;
diff --git a/ql/src/test/queries/clientpositive/windowing_udaf.q b/ql/src/test/queries/clientpositive/windowing_udaf.q
index f22b992..0173ab7 100644
--- a/ql/src/test/queries/clientpositive/windowing_udaf.q
+++ b/ql/src/test/queries/clientpositive/windowing_udaf.q
@@ -15,7 +15,7 @@
row format delimited
fields terminated by '|';
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
select s, min(i) over (partition by s) from over10k limit 100;
diff --git a/ql/src/test/queries/clientpositive/windowing_windowspec.q b/ql/src/test/queries/clientpositive/windowing_windowspec.q
index 7cc1367..6d8ce67 100644
--- a/ql/src/test/queries/clientpositive/windowing_windowspec.q
+++ b/ql/src/test/queries/clientpositive/windowing_windowspec.q
@@ -15,7 +15,7 @@
row format delimited
fields terminated by '|';
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
select s, sum(b) over (partition by i order by s,b rows unbounded preceding) from over10k limit 100;
diff --git a/ql/src/test/results/beelinepositive/input37.q.out b/ql/src/test/results/beelinepositive/input37.q.out
index e211c2b..676b72d 100644
--- a/ql/src/test/results/beelinepositive/input37.q.out
+++ b/ql/src/test/results/beelinepositive/input37.q.out
@@ -12,7 +12,7 @@
(
FROM documents
MAP documents.contents
-USING 'java -cp ../build/ql/test/classes org.apache.hadoop.hive.scripts.extracturl' AS (url, count)
+USING 'java -cp ../util/target/classes/ org.apache.hadoop.hive.scripts.extracturl' AS (url, count)
) subq
group by url;
'url','_c1'
diff --git a/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out b/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out
index 4efa8fb..1fcb213 100644
--- a/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out
+++ b/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out
@@ -3,22 +3,22 @@
POSTHOOK: query: create table src_rc_concatenate_test(key int, value string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_rc_concatenate_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test
PREHOOK: query: show table extended like `src_rc_concatenate_test`
diff --git a/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out b/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out
index d9f6c37..d16123c 100644
--- a/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out
+++ b/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out
@@ -6,18 +6,18 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_part_invalidspec
PREHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_invalidspec
POSTHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_invalidspec
POSTHOOK: Output: default@alter_part_invalidspec@year=1996/month=10
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_invalidspec
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_invalidspec
POSTHOOK: Output: default@alter_part_invalidspec@year=1996/month=12
diff --git a/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out b/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out
index bf1c3fd..6580190 100644
--- a/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out
+++ b/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out
@@ -6,18 +6,18 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_part_nodrop_part
PREHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_nodrop_part
POSTHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_nodrop_part
POSTHOOK: Output: default@alter_part_nodrop_part@year=1996/month=10
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_nodrop_part
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_nodrop_part
POSTHOOK: Output: default@alter_part_nodrop_part@year=1996/month=12
diff --git a/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out b/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out
index 1cf067a..742031e 100644
--- a/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out
+++ b/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out
@@ -6,18 +6,18 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_part_nodrop_table
PREHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_nodrop_table
POSTHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_nodrop_table
POSTHOOK: Output: default@alter_part_nodrop_table@year=1996/month=10
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_nodrop_table
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_nodrop_table
POSTHOOK: Output: default@alter_part_nodrop_table@year=1996/month=12
diff --git a/ql/src/test/results/clientnegative/alter_partition_offline.q.out b/ql/src/test/results/clientnegative/alter_partition_offline.q.out
index cc9816f..7a14bf2 100644
--- a/ql/src/test/results/clientnegative/alter_partition_offline.q.out
+++ b/ql/src/test/results/clientnegative/alter_partition_offline.q.out
@@ -6,18 +6,18 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_part_offline
PREHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_offline
POSTHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_offline
POSTHOOK: Output: default@alter_part_offline@year=1996/month=10
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_offline
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_offline
POSTHOOK: Output: default@alter_part_offline@year=1996/month=12
diff --git a/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out b/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out
index c26234c..1ba491e 100644
--- a/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out
+++ b/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_rename_partition_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_rename_partition_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_rename_partition_src
PREHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
diff --git a/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out b/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out
index 2a5f921..743a3ea 100644
--- a/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out
+++ b/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_rename_partition_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_rename_partition_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_rename_partition_src
PREHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
diff --git a/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out b/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out
index 384fcbe..458b3d6 100644
--- a/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out
+++ b/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_rename_partition_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_rename_partition_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_rename_partition_src
PREHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
diff --git a/ql/src/test/results/clientnegative/archive_corrupt.q.out b/ql/src/test/results/clientnegative/archive_corrupt.q.out
index de2f7dd..a3f3cb0 100644
--- a/ql/src/test/results/clientnegative/archive_corrupt.q.out
+++ b/ql/src/test/results/clientnegative/archive_corrupt.q.out
@@ -18,7 +18,7 @@
-- to be thrown during the LOAD step. This former behavior is tested
-- in clientpositive/archive_corrupt.q
-load data local inpath '../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11')
+load data local inpath '../../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstsrcpart
Failed with exception Wrong file format. Please check the file's format.
diff --git a/ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out b/ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out
index 4ff4b31..6e0c382 100644
--- a/ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out
+++ b/ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out
@@ -7,28 +7,28 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt'
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt'
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt'
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt'
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt'
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt'
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
@@ -41,20 +41,20 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt'
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt'
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt'
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt'
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
diff --git a/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q.out b/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q.out
index ccb9b46..80d97f9 100644
--- a/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q.out
+++ b/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q.out
@@ -24,22 +24,22 @@
into 2 BUCKETS stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@table2
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table1
PREHOOK: type: LOAD
PREHOOK: Output: default@table1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table2
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table2
PREHOOK: type: LOAD
PREHOOK: Output: default@table2
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table2
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table2
-PREHOOK: query: load data local inpath '../data/files/T2.txt' overwrite into table table2
+PREHOOK: query: load data local inpath '../../data/files/T2.txt' overwrite into table table2
PREHOOK: type: LOAD
PREHOOK: Output: default@table2
-POSTHOOK: query: load data local inpath '../data/files/T2.txt' overwrite into table table2
+POSTHOOK: query: load data local inpath '../../data/files/T2.txt' overwrite into table table2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table2
FAILED: SemanticException [Error 10141]: Bucketed table metadata is not correct. Fix the metadata or don't use bucketed mapjoin, by setting hive.enforce.bucketmapjoin to false. The number of buckets for table table1 is 2, whereas the number of files is 1
diff --git a/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q.out b/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q.out
index 5dc7241..099b8fa 100644
--- a/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q.out
+++ b/ql/src/test/results/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q.out
@@ -24,36 +24,36 @@
into 2 BUCKETS stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@table2
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='1')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@table1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='1')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table1
POSTHOOK: Output: default@table1@ds=1
-PREHOOK: query: load data local inpath '../data/files/T2.txt' overwrite into table table1 partition (ds='1')
+PREHOOK: query: load data local inpath '../../data/files/T2.txt' overwrite into table table1 partition (ds='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@table1@ds=1
-POSTHOOK: query: load data local inpath '../data/files/T2.txt' overwrite into table table1 partition (ds='1')
+POSTHOOK: query: load data local inpath '../../data/files/T2.txt' overwrite into table table1 partition (ds='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table1@ds=1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='2')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@table1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='2')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table1
POSTHOOK: Output: default@table1@ds=2
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table2
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table2
PREHOOK: type: LOAD
PREHOOK: Output: default@table2
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table table2
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table table2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table2
-PREHOOK: query: load data local inpath '../data/files/T2.txt' overwrite into table table2
+PREHOOK: query: load data local inpath '../../data/files/T2.txt' overwrite into table table2
PREHOOK: type: LOAD
PREHOOK: Output: default@table2
-POSTHOOK: query: load data local inpath '../data/files/T2.txt' overwrite into table table2
+POSTHOOK: query: load data local inpath '../../data/files/T2.txt' overwrite into table table2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table2
FAILED: SemanticException [Error 10141]: Bucketed table metadata is not correct. Fix the metadata or don't use bucketed mapjoin, by setting hive.enforce.bucketmapjoin to false. The number of buckets for table table1 partition ds=1 is 2, whereas the number of files is 1
diff --git a/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out b/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out
index 715ad6b..a306dd6 100644
--- a/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out
+++ b/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out
@@ -9,45 +9,45 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@Employee_Part
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
diff --git a/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out b/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out
index 444e6e3..46c74bd 100644
--- a/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out
+++ b/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out
@@ -9,45 +9,45 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@Employee_Part
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
diff --git a/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out b/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out
index 23f90bb..2bb6c2f 100644
--- a/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out
+++ b/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out
@@ -9,45 +9,45 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@Employee_Part
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
diff --git a/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out b/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out
index 1bda7fa..56a085f 100644
--- a/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out
+++ b/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out
@@ -9,45 +9,45 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@Employee_Part
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
diff --git a/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out b/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out
index 4957557..071d747 100644
--- a/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out
+++ b/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out
@@ -27,10 +27,10 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@UserVisits_web_text_none
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
PREHOOK: type: LOAD
PREHOOK: Output: default@uservisits_web_text_none
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
POSTHOOK: type: LOAD
POSTHOOK: Output: default@uservisits_web_text_none
FAILED: SemanticException [Error 10002]: Invalid column reference 'destIP' (possible columns are [sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite])
diff --git a/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out b/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out
index ac82bde..5662f2a 100644
--- a/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out
+++ b/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out
@@ -17,10 +17,10 @@
) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@table_complex_type
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type
PREHOOK: type: LOAD
PREHOOK: Output: default@table_complex_type
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table_complex_type
FAILED: UDFArgumentTypeException Only primitive type arguments are accepted but map<string,array<string>> is passed.
diff --git a/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out b/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out
index 4957557..071d747 100644
--- a/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out
+++ b/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out
@@ -27,10 +27,10 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@UserVisits_web_text_none
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
PREHOOK: type: LOAD
PREHOOK: Output: default@uservisits_web_text_none
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
POSTHOOK: type: LOAD
POSTHOOK: Output: default@uservisits_web_text_none
FAILED: SemanticException [Error 10002]: Invalid column reference 'destIP' (possible columns are [sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite])
diff --git a/ql/src/test/results/clientnegative/compute_stats_long.q.out b/ql/src/test/results/clientnegative/compute_stats_long.q.out
index f4ad813..7d7abf3 100644
--- a/ql/src/test/results/clientnegative/compute_stats_long.q.out
+++ b/ql/src/test/results/clientnegative/compute_stats_long.q.out
@@ -4,11 +4,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab_int
PREHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int
+LOAD DATA LOCAL INPATH "../../data/files/int.txt" INTO TABLE tab_int
PREHOOK: type: LOAD
PREHOOK: Output: default@tab_int
POSTHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int
+LOAD DATA LOCAL INPATH "../../data/files/int.txt" INTO TABLE tab_int
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab_int
PREHOOK: query: -- compute stats should raise an error since the number of bit vectors > 1024
diff --git a/ql/src/test/results/clientnegative/dynamic_partitions_with_whitelist.q.out b/ql/src/test/results/clientnegative/dynamic_partitions_with_whitelist.q.out
index 9f03796..3dd2121 100644
--- a/ql/src/test/results/clientnegative/dynamic_partitions_with_whitelist.q.out
+++ b/ql/src/test/results/clientnegative/dynamic_partitions_with_whitelist.q.out
@@ -8,10 +8,10 @@
POSTHOOK: query: create table dest_table like srcpart
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@dest_table
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@source_table
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@source_table
POSTHOOK: Output: default@source_table@ds=2008-04-08/hr=11
diff --git a/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out b/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out
index 2c5186d..6d87826 100644
--- a/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out
+++ b/ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out b/ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out
index d6773bd..0c49c45 100644
--- a/ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out
+++ b/ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
@@ -47,10 +47,10 @@
tblproperties("maker"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: importer@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_department
FAILED: SemanticException [Error 10119]: Table exists and contains data files
diff --git a/ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out b/ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out
index 30013fe..ef3380f 100644
--- a/ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out
+++ b/ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
@@ -91,11 +91,11 @@
tblproperties("maker"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: importer@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_employee
diff --git a/ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out b/ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out
index 31eb236..10dee16 100644
--- a/ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out
+++ b/ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out b/ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out
index a59e7d3..518a9a1 100644
--- a/ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out
+++ b/ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out b/ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out
index b3705db..ad295c2 100644
--- a/ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out
+++ b/ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out b/ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out
index 170a0f5..dc3e782 100644
--- a/ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out
+++ b/ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out b/ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out
index ec297a7..2d1283e 100644
--- a/ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out
+++ b/ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out b/ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out
index 01df09b..864cf24 100644
--- a/ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out
+++ b/ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out b/ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out
index c9c0f13..78a6eb8 100644
--- a/ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out
+++ b/ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out b/ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out
index eb837a0..2d8e411 100644
--- a/ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out
+++ b/ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out b/ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out
index 1cad51f..ec65f5e 100644
--- a/ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out
+++ b/ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out
@@ -9,10 +9,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out b/ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out
index ba66fe3..f76e8f6 100644
--- a/ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out
+++ b/ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out b/ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out
index 87767ed..8bc96e1 100644
--- a/ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out
+++ b/ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out b/ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out
index f360e13..f70f1ef 100644
--- a/ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out
+++ b/ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out
@@ -9,10 +9,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
POSTHOOK: Output: default@exim_department@dep_org=hr
diff --git a/ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out b/ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out
index 672a3ed..0049759 100644
--- a/ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out
+++ b/ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out
@@ -9,10 +9,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department partition (dep_org="hr")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
POSTHOOK: Output: default@exim_department@dep_org=hr
diff --git a/ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out b/ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out
index 3fe1106..b833fcf 100644
--- a/ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out
+++ b/ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out b/ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out
index 3fe1106..b833fcf 100644
--- a/ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out
+++ b/ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out b/ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out
index 1ac02f5..985704d 100644
--- a/ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out
+++ b/ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out b/ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out
index 79534c3..9914351 100644
--- a/ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out
+++ b/ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out b/ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out
index 3e4894b..11c158d 100644
--- a/ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out
+++ b/ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out b/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out
index cad6c18..6260433 100644
--- a/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out
+++ b/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out b/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out
index 2dcb355..e43c4d2 100644
--- a/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out
+++ b/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out
@@ -11,11 +11,11 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out b/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out
index 48df452..db4578f 100644
--- a/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out
+++ b/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out b/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out
index 72ba241..7447a28 100644
--- a/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out
+++ b/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out
@@ -7,10 +7,10 @@
VALUE STRING) STORED AS SEQUENCEFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@fetchtask_ioexception
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception
PREHOOK: type: LOAD
PREHOOK: Output: default@fetchtask_ioexception
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception
POSTHOOK: type: LOAD
POSTHOOK: Output: default@fetchtask_ioexception
PREHOOK: query: SELECT * FROM fetchtask_ioexception
diff --git a/ql/src/test/results/clientnegative/insertover_dynapart_ifnotexists.q.out b/ql/src/test/results/clientnegative/insertover_dynapart_ifnotexists.q.out
index 2b61605..3516e93 100644
--- a/ql/src/test/results/clientnegative/insertover_dynapart_ifnotexists.q.out
+++ b/ql/src/test/results/clientnegative/insertover_dynapart_ifnotexists.q.out
@@ -8,10 +8,10 @@
POSTHOOK: query: create table destpart_dp like srcpart
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@destpart_dp
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_dp
POSTHOOK: Output: default@srcpart_dp@ds=2008-04-08/hr=11
diff --git a/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out b/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
index c02adea..da65353 100644
--- a/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
+++ b/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
@@ -11,7 +11,7 @@
CREATE TABLE load_wrong_fileformat_T1(name STRING) STORED AS SEQUENCEFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@load_wrong_fileformat_T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE load_wrong_fileformat_T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE load_wrong_fileformat_T1
PREHOOK: type: LOAD
PREHOOK: Output: default@load_wrong_fileformat_t1
Failed with exception Wrong file format. Please check the file's format.
diff --git a/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out b/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out
index 2303bdf..1c6a759 100644
--- a/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out
+++ b/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out
@@ -11,7 +11,7 @@
CREATE TABLE T1(name STRING) STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
Failed with exception Wrong file format. Please check the file's format.
diff --git a/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out b/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out
index 25eddfc..1c455e1 100644
--- a/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out
+++ b/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out
@@ -11,7 +11,7 @@
CREATE TABLE T1(name STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
Failed with exception Wrong file format. Please check the file's format.
diff --git a/ql/src/test/results/clientnegative/load_wrong_noof_part.q.out b/ql/src/test/results/clientnegative/load_wrong_noof_part.q.out
index d05e57b..bfab09f 100644
--- a/ql/src/test/results/clientnegative/load_wrong_noof_part.q.out
+++ b/ql/src/test/results/clientnegative/load_wrong_noof_part.q.out
@@ -3,4 +3,4 @@
POSTHOOK: query: CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING,ds1 STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@loadpart1
-FAILED: SemanticException [Error 10006]: Line 2:79 Partition not found ''2009-05-05''
+FAILED: SemanticException [Error 10006]: Line 2:82 Partition not found ''2009-05-05''
diff --git a/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out b/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
index 1b9f5a6..60df1cd 100644
--- a/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
+++ b/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'python ../data/scripts/cat_error.py' AS (key, value)
+PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'python ../../data/scripts/cat_error.py' AS (key, value)
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientnegative/protectmode_part2.q.out b/ql/src/test/results/clientnegative/protectmode_part2.q.out
index dd1940d..3865b09 100644
--- a/ql/src/test/results/clientnegative/protectmode_part2.q.out
+++ b/ql/src/test/results/clientnegative/protectmode_part2.q.out
@@ -18,10 +18,10 @@
POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@tbl_protectmode6
POSTHOOK: Output: default@tbl_protectmode6@p=p1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1')
PREHOOK: type: LOAD
PREHOOK: Output: default@tbl_protectmode6@p=p1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tbl_protectmode6@p=p1
PREHOOK: query: alter table tbl_protectmode6 partition (p='p1') enable offline
diff --git a/ql/src/test/results/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q.out b/ql/src/test/results/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q.out
index 8a06d82..41e83c5 100644
--- a/ql/src/test/results/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q.out
+++ b/ql/src/test/results/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q.out
@@ -27,10 +27,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
FAILED: SemanticException [Error 10025]: Line 4:7 Expression not in GROUP BY key 'p_mfgr'
diff --git a/ql/src/test/results/clientnegative/ptf_negative_AmbiguousWindowDefn.q.out b/ql/src/test/results/clientnegative/ptf_negative_AmbiguousWindowDefn.q.out
index cc02d3b..1e50755 100644
--- a/ql/src/test/results/clientnegative/ptf_negative_AmbiguousWindowDefn.q.out
+++ b/ql/src/test/results/clientnegative/ptf_negative_AmbiguousWindowDefn.q.out
@@ -27,10 +27,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
FAILED: SemanticException Cycle in Window references [w3, w3]
diff --git a/ql/src/test/results/clientnegative/script_error.q.out b/ql/src/test/results/clientnegative/script_error.q.out
index 5dad1cd..3d5c84b 100644
--- a/ql/src/test/results/clientnegative/script_error.q.out
+++ b/ql/src/test/results/clientnegative/script_error.q.out
@@ -1,13 +1,13 @@
PREHOOK: query: EXPLAIN
-SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+SELECT TRANSFORM(src.key, src.value) USING '../../data/scripts/error_script' AS (tkey, tvalue)
FROM src
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
-SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+SELECT TRANSFORM(src.key, src.value) USING '../../data/scripts/error_script' AS (tkey, tvalue)
FROM src
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '../data/scripts/error_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue))))))
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '../../data/scripts/error_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue))))))
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -28,7 +28,7 @@
type: string
outputColumnNames: _col0, _col1
Transform Operator
- command: ../data/scripts/error_script
+ command: ../../data/scripts/error_script
output info:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -46,7 +46,7 @@
limit: -1
-PREHOOK: query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+PREHOOK: query: SELECT TRANSFORM(src.key, src.value) USING '../../data/scripts/error_script' AS (tkey, tvalue)
FROM src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
diff --git a/ql/src/test/results/clientnegative/serde_regex2.q.out b/ql/src/test/results/clientnegative/serde_regex2.q.out
index a19dcd8..e9e452d 100644
--- a/ql/src/test/results/clientnegative/serde_regex2.q.out
+++ b/ql/src/test/results/clientnegative/serde_regex2.q.out
@@ -37,16 +37,16 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@serde_regex
PREHOOK: query: -- raise an exception
diff --git a/ql/src/test/results/clientnegative/subquery_windowing_corr.q.out b/ql/src/test/results/clientnegative/subquery_windowing_corr.q.out
index c7de7b2..3cc2fa4 100644
--- a/ql/src/test/results/clientnegative/subquery_windowing_corr.q.out
+++ b/ql/src/test/results/clientnegative/subquery_windowing_corr.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
FAILED: SemanticException Line 9:8 Unsupported SubQuery Expression '1' in definition of SubQuery sq_1 [
diff --git a/ql/src/test/results/clientnegative/udfnull.q.out b/ql/src/test/results/clientnegative/udfnull.q.out
deleted file mode 100644
index 06b63e7..0000000
--- a/ql/src/test/results/clientnegative/udfnull.q.out
+++ /dev/null
@@ -1,19 +0,0 @@
-PREHOOK: query: CREATE TEMPORARY FUNCTION example_arraysum AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum'
-PREHOOK: type: CREATEFUNCTION
-POSTHOOK: query: CREATE TEMPORARY FUNCTION example_arraysum AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum'
-POSTHOOK: type: CREATEFUNCTION
-PREHOOK: query: SELECT example_arraysum(lint)FROM src_thrift
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_thrift
-#### A masked pattern was here ####
-Execution failed with exit status: 2
-Obtaining error information
-
-Task failed!
-Task ID:
- Stage-1
-
-Logs:
-
-#### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
diff --git a/ql/src/test/results/clientnegative/windowing_ll_no_neg.q.out b/ql/src/test/results/clientnegative/windowing_ll_no_neg.q.out
index bde437d..240b04e 100644
--- a/ql/src/test/results/clientnegative/windowing_ll_no_neg.q.out
+++ b/ql/src/test/results/clientnegative/windowing_ll_no_neg.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
FAILED: SemanticException Failed to breakup Windowing invocations into Groups. At least 1 group must only depend on input columns. Also check for circular dependencies.
diff --git a/ql/src/test/results/clientpositive/alter3.q.out b/ql/src/test/results/clientpositive/alter3.q.out
index aeb366e..96e8191 100644
--- a/ql/src/test/results/clientpositive/alter3.q.out
+++ b/ql/src/test/results/clientpositive/alter3.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table alter3_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter3_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter3_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter3_src
PREHOOK: type: LOAD
PREHOOK: Output: default@alter3_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter3_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter3_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter3_src
PREHOOK: query: create table alter3 ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
@@ -222,10 +222,10 @@
POSTHOOK: Output: alter3_db@alter3_src
POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part:,pcol2=test_part:).col1 SIMPLE [(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ]
POSTHOOK: Lineage: alter3_like PARTITION(pcol1=test_part:,pcol2=test_part:).col1 SIMPLE [(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter3_src
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE alter3_src
PREHOOK: type: LOAD
PREHOOK: Output: alter3_db@alter3_src
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter3_src
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE alter3_src
POSTHOOK: type: LOAD
POSTHOOK: Output: alter3_db@alter3_src
POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part:,pcol2=test_part:).col1 SIMPLE [(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/alter5.q.out b/ql/src/test/results/clientpositive/alter5.q.out
index 2af1cd3..0241a62 100644
--- a/ql/src/test/results/clientpositive/alter5.q.out
+++ b/ql/src/test/results/clientpositive/alter5.q.out
@@ -11,10 +11,10 @@
create table alter5_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter5_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter5_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter5_src
PREHOOK: type: LOAD
PREHOOK: Output: default@alter5_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter5_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter5_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter5_src
PREHOOK: query: create table alter5 ( col1 string ) partitioned by (dt string)
@@ -149,10 +149,10 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: alter5_db@alter5_src
POSTHOOK: Lineage: alter5 PARTITION(dt=a).col1 SIMPLE [(alter5_src)alter5_src.FieldSchema(name:col1, type:string, comment:null), ]
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter5_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter5_src
PREHOOK: type: LOAD
PREHOOK: Output: alter5_db@alter5_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter5_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter5_src
POSTHOOK: type: LOAD
POSTHOOK: Output: alter5_db@alter5_src
POSTHOOK: Lineage: alter5 PARTITION(dt=a).col1 SIMPLE [(alter5_src)alter5_src.FieldSchema(name:col1, type:string, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out b/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out
index 7bd5d61..570649e 100644
--- a/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out
+++ b/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out
@@ -3,22 +3,22 @@
POSTHOOK: query: create table src_rc_concatenate_test(key int, value string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_rc_concatenate_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test
PREHOOK: query: show table extended like `src_rc_concatenate_test`
@@ -125,22 +125,22 @@
POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@src_rc_concatenate_test_part
POSTHOOK: Output: default@src_rc_concatenate_test_part@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test_part partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test_part partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test_part@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test_part partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test_part partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test_part@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test_part partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test_part partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test_part@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test_part partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test_part partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test_part@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test_part partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test_part partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_concatenate_test_part@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test_part partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test_part partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_concatenate_test_part@ds=2011
PREHOOK: query: show table extended like `src_rc_concatenate_test_part` partition (ds='2011')
diff --git a/ql/src/test/results/clientpositive/alter_merge.q.out b/ql/src/test/results/clientpositive/alter_merge.q.out
index 6109b45..c296b81 100644
--- a/ql/src/test/results/clientpositive/alter_merge.q.out
+++ b/ql/src/test/results/clientpositive/alter_merge.q.out
@@ -3,22 +3,22 @@
POSTHOOK: query: create table src_rc_merge_test(key int, value string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_rc_merge_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test
PREHOOK: query: show table extended like `src_rc_merge_test`
@@ -111,22 +111,22 @@
POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@src_rc_merge_test_part
POSTHOOK: Output: default@src_rc_merge_test_part@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part@ds=2011
PREHOOK: query: show table extended like `src_rc_merge_test_part` partition (ds='2011')
diff --git a/ql/src/test/results/clientpositive/alter_merge_2.q.out b/ql/src/test/results/clientpositive/alter_merge_2.q.out
index fe71635..1081b93 100644
--- a/ql/src/test/results/clientpositive/alter_merge_2.q.out
+++ b/ql/src/test/results/clientpositive/alter_merge_2.q.out
@@ -26,22 +26,22 @@
ts string None
#### A masked pattern was here ####
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part@ds=2012-01-03/ts=2012-01-03+14%3A46%3A31
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part@ds=2012-01-03/ts=2012-01-03+14%3A46%3A31
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part@ds=2012-01-03/ts=2012-01-03+14%3A46%3A31
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part@ds=2012-01-03/ts=2012-01-03+14%3A46%3A31
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part@ds=2012-01-03/ts=2012-01-03+14%3A46%3A31
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part partition (ds='2012-01-03', ts='2012-01-03+14:46:31')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part@ds=2012-01-03/ts=2012-01-03+14%3A46%3A31
PREHOOK: query: select count(1) from src_rc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31'
diff --git a/ql/src/test/results/clientpositive/alter_merge_stats.q.out b/ql/src/test/results/clientpositive/alter_merge_stats.q.out
index 2746de2..dcdf2b1 100644
--- a/ql/src/test/results/clientpositive/alter_merge_stats.q.out
+++ b/ql/src/test/results/clientpositive/alter_merge_stats.q.out
@@ -3,22 +3,22 @@
POSTHOOK: query: create table src_rc_merge_test_stat(key int, value string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_rc_merge_test_stat
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_stat
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_stat
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_stat
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_stat
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_stat
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_stat
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_stat
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_stat
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_stat
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_stat
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_stat
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_stat
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_stat
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_stat
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_stat
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_stat
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_stat
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_stat
PREHOOK: query: show table extended like `src_rc_merge_test_stat`
@@ -107,22 +107,22 @@
POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@src_rc_merge_test_part_stat
POSTHOOK: Output: default@src_rc_merge_test_part_stat@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part_stat@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part_stat@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part_stat@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part_stat@ds=2011
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test_part_stat@ds=2011
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_merge_test_part_stat partition (ds='2011')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test_part_stat@ds=2011
PREHOOK: query: show table extended like `src_rc_merge_test_part_stat` partition (ds='2011')
diff --git a/ql/src/test/results/clientpositive/alter_partition_protect_mode.q.out b/ql/src/test/results/clientpositive/alter_partition_protect_mode.q.out
index 45cfe20..fc9a67a 100644
--- a/ql/src/test/results/clientpositive/alter_partition_protect_mode.q.out
+++ b/ql/src/test/results/clientpositive/alter_partition_protect_mode.q.out
@@ -6,32 +6,32 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_part_protect_mode
PREHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='10')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_protect_mode
POSTHOOK: query: -- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='10')
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='10')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_protect_mode
POSTHOOK: Output: default@alter_part_protect_mode@year=1996/month=10
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='12')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_protect_mode
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='12')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1996', month='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_protect_mode
POSTHOOK: Output: default@alter_part_protect_mode@year=1996/month=12
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1995', month='09')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1995', month='09')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_protect_mode
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1995', month='09')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1995', month='09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_protect_mode
POSTHOOK: Output: default@alter_part_protect_mode@year=1995/month=09
-PREHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1994', month='07')
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1994', month='07')
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_part_protect_mode
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1994', month='07')
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_protect_mode partition (year='1994', month='07')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_part_protect_mode
POSTHOOK: Output: default@alter_part_protect_mode@year=1994/month=07
diff --git a/ql/src/test/results/clientpositive/alter_rename_partition.q.out b/ql/src/test/results/clientpositive/alter_rename_partition.q.out
index 35474c9..9808773 100644
--- a/ql/src/test/results/clientpositive/alter_rename_partition.q.out
+++ b/ql/src/test/results/clientpositive/alter_rename_partition.q.out
@@ -26,10 +26,10 @@
POSTHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@alter_rename_partition_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
PREHOOK: type: LOAD
PREHOOK: Output: default@alter_rename_partition_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@alter_rename_partition_src
PREHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
@@ -165,10 +165,10 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: alter_rename_partition_db@alter_rename_partition_src
POSTHOOK: Lineage: alter_rename_partition PARTITION(pcol1=old_part1:,pcol2=old_part2:).col1 SIMPLE [(alter_rename_partition_src)alter_rename_partition_src.FieldSchema(name:col1, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter_rename_partition_src
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE alter_rename_partition_src
PREHOOK: type: LOAD
PREHOOK: Output: alter_rename_partition_db@alter_rename_partition_src
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter_rename_partition_src
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE alter_rename_partition_src
POSTHOOK: type: LOAD
POSTHOOK: Output: alter_rename_partition_db@alter_rename_partition_src
POSTHOOK: Lineage: alter_rename_partition PARTITION(pcol1=old_part1:,pcol2=old_part2:).col1 SIMPLE [(alter_rename_partition_src)alter_rename_partition_src.FieldSchema(name:col1, type:string, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/archive_corrupt.q.out b/ql/src/test/results/clientpositive/archive_corrupt.q.out
index 0a9d722..8aba24e 100644
--- a/ql/src/test/results/clientpositive/archive_corrupt.q.out
+++ b/ql/src/test/results/clientpositive/archive_corrupt.q.out
@@ -18,7 +18,7 @@
-- to be thrown during the LOAD step. This behavior is now tested in
-- clientnegative/archive_corrupt.q
-load data local inpath '../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11')
+load data local inpath '../../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstsrcpart
POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
@@ -28,7 +28,7 @@
-- to be thrown during the LOAD step. This behavior is now tested in
-- clientnegative/archive_corrupt.q
-load data local inpath '../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11')
+load data local inpath '../../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstsrcpart
POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
diff --git a/ql/src/test/results/clientpositive/auto_join32.q.out b/ql/src/test/results/clientpositive/auto_join32.q.out
index 0ee8d36..2d38c47 100644
--- a/ql/src/test/results/clientpositive/auto_join32.q.out
+++ b/ql/src/test/results/clientpositive/auto_join32.q.out
@@ -273,28 +273,28 @@
POSTHOOK: Input: default@studenttab10k_smb
POSTHOOK: Input: default@votertab10k_smb
#### A masked pattern was here ####
-PREHOOK: query: load data local inpath '../data/files/empty1.txt' into table studenttab10k_smb
+PREHOOK: query: load data local inpath '../../data/files/empty1.txt' into table studenttab10k_smb
PREHOOK: type: LOAD
PREHOOK: Output: default@studenttab10k_smb
-POSTHOOK: query: load data local inpath '../data/files/empty1.txt' into table studenttab10k_smb
+POSTHOOK: query: load data local inpath '../../data/files/empty1.txt' into table studenttab10k_smb
POSTHOOK: type: LOAD
POSTHOOK: Output: default@studenttab10k_smb
-PREHOOK: query: load data local inpath '../data/files/empty2.txt' into table studenttab10k_smb
+PREHOOK: query: load data local inpath '../../data/files/empty2.txt' into table studenttab10k_smb
PREHOOK: type: LOAD
PREHOOK: Output: default@studenttab10k_smb
-POSTHOOK: query: load data local inpath '../data/files/empty2.txt' into table studenttab10k_smb
+POSTHOOK: query: load data local inpath '../../data/files/empty2.txt' into table studenttab10k_smb
POSTHOOK: type: LOAD
POSTHOOK: Output: default@studenttab10k_smb
-PREHOOK: query: load data local inpath '../data/files/empty1.txt' into table votertab10k_smb
+PREHOOK: query: load data local inpath '../../data/files/empty1.txt' into table votertab10k_smb
PREHOOK: type: LOAD
PREHOOK: Output: default@votertab10k_smb
-POSTHOOK: query: load data local inpath '../data/files/empty1.txt' into table votertab10k_smb
+POSTHOOK: query: load data local inpath '../../data/files/empty1.txt' into table votertab10k_smb
POSTHOOK: type: LOAD
POSTHOOK: Output: default@votertab10k_smb
-PREHOOK: query: load data local inpath '../data/files/empty2.txt' into table votertab10k_smb
+PREHOOK: query: load data local inpath '../../data/files/empty2.txt' into table votertab10k_smb
PREHOOK: type: LOAD
PREHOOK: Output: default@votertab10k_smb
-POSTHOOK: query: load data local inpath '../data/files/empty2.txt' into table votertab10k_smb
+POSTHOOK: query: load data local inpath '../../data/files/empty2.txt' into table votertab10k_smb
POSTHOOK: type: LOAD
POSTHOOK: Output: default@votertab10k_smb
PREHOOK: query: explain select s.name, count(distinct registration)
@@ -423,30 +423,30 @@
POSTHOOK: query: create table votertab10k_part (name string, age int, registration string, contributions float) partitioned by (p string) clustered by (name) sorted by (name) into 2 buckets
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@votertab10k_part
-PREHOOK: query: load data local inpath '../data/files/empty1.txt' into table studenttab10k_part partition (p='foo')
+PREHOOK: query: load data local inpath '../../data/files/empty1.txt' into table studenttab10k_part partition (p='foo')
PREHOOK: type: LOAD
PREHOOK: Output: default@studenttab10k_part
-POSTHOOK: query: load data local inpath '../data/files/empty1.txt' into table studenttab10k_part partition (p='foo')
+POSTHOOK: query: load data local inpath '../../data/files/empty1.txt' into table studenttab10k_part partition (p='foo')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@studenttab10k_part
POSTHOOK: Output: default@studenttab10k_part@p=foo
-PREHOOK: query: load data local inpath '../data/files/empty2.txt' into table studenttab10k_part partition (p='foo')
+PREHOOK: query: load data local inpath '../../data/files/empty2.txt' into table studenttab10k_part partition (p='foo')
PREHOOK: type: LOAD
PREHOOK: Output: default@studenttab10k_part@p=foo
-POSTHOOK: query: load data local inpath '../data/files/empty2.txt' into table studenttab10k_part partition (p='foo')
+POSTHOOK: query: load data local inpath '../../data/files/empty2.txt' into table studenttab10k_part partition (p='foo')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@studenttab10k_part@p=foo
-PREHOOK: query: load data local inpath '../data/files/empty1.txt' into table votertab10k_part partition (p='foo')
+PREHOOK: query: load data local inpath '../../data/files/empty1.txt' into table votertab10k_part partition (p='foo')
PREHOOK: type: LOAD
PREHOOK: Output: default@votertab10k_part
-POSTHOOK: query: load data local inpath '../data/files/empty1.txt' into table votertab10k_part partition (p='foo')
+POSTHOOK: query: load data local inpath '../../data/files/empty1.txt' into table votertab10k_part partition (p='foo')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@votertab10k_part
POSTHOOK: Output: default@votertab10k_part@p=foo
-PREHOOK: query: load data local inpath '../data/files/empty2.txt' into table votertab10k_part partition (p='foo')
+PREHOOK: query: load data local inpath '../../data/files/empty2.txt' into table votertab10k_part partition (p='foo')
PREHOOK: type: LOAD
PREHOOK: Output: default@votertab10k_part@p=foo
-POSTHOOK: query: load data local inpath '../data/files/empty2.txt' into table votertab10k_part partition (p='foo')
+POSTHOOK: query: load data local inpath '../../data/files/empty2.txt' into table votertab10k_part partition (p='foo')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@votertab10k_part@p=foo
PREHOOK: query: explain select s.name, count(distinct registration)
diff --git a/ql/src/test/results/clientpositive/auto_join_filters.q.out b/ql/src/test/results/clientpositive/auto_join_filters.q.out
index 896580a..6ad858d 100644
--- a/ql/src/test/results/clientpositive/auto_join_filters.q.out
+++ b/ql/src/test/results/clientpositive/auto_join_filters.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE myinput1(key int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@myinput1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in3.txt' INTO TABLE myinput1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in3.txt' INTO TABLE myinput1
PREHOOK: type: LOAD
PREHOOK: Output: default@myinput1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in3.txt' INTO TABLE myinput1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in3.txt' INTO TABLE myinput1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@myinput1
PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value
@@ -253,28 +253,28 @@
POSTHOOK: query: CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value
diff --git a/ql/src/test/results/clientpositive/auto_join_nulls.q.out b/ql/src/test/results/clientpositive/auto_join_nulls.q.out
index ad8123f..4880801 100644
--- a/ql/src/test/results/clientpositive/auto_join_nulls.q.out
+++ b/ql/src/test/results/clientpositive/auto_join_nulls.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE myinput1(key int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@myinput1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE myinput1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' INTO TABLE myinput1
PREHOOK: type: LOAD
PREHOOK: Output: default@myinput1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE myinput1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' INTO TABLE myinput1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@myinput1
PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b
diff --git a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
index 87b79d0..2328928 100644
--- a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
+++ b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
@@ -7,10 +7,10 @@
create table testsrc ( `key` int,`val` string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@testsrc
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table testsrc
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table testsrc
PREHOOK: type: LOAD
PREHOOK: Output: default@testsrc
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table testsrc
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table testsrc
POSTHOOK: type: LOAD
POSTHOOK: Output: default@testsrc
PREHOOK: query: drop table if exists orderpayment_small
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
index 61463ae..595c6c5 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
@@ -9,17 +9,17 @@
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -27,54 +27,54 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: -- Since size is being used to find the big table, the order of the tables in the join does not matter
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
index 6bc1e65..41ccbdf 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
@@ -7,17 +7,17 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -25,54 +25,54 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: explain extended select count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
index 865627b..891f225 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
@@ -9,17 +9,17 @@
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -27,54 +27,54 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_medium (key string, value string) partitioned by (ds string)
@@ -84,23 +84,23 @@
CLUSTERED BY (key) SORTED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_medium
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_medium
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_medium
POSTHOOK: Output: default@bucket_medium@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_medium@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_medium@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_medium@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_medium partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_medium@ds=2008-04-08
PREHOOK: query: explain extended select count(*) FROM bucket_small a JOIN bucket_medium b ON a.key = b.key JOIN bucket_big c ON c.key = b.key JOIN bucket_medium d ON c.key = b.key
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
index 09e1479..4701a9b 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
@@ -5,29 +5,29 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -35,30 +35,30 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: -- Since the leftmost table is assumed as the big table, arrange the tables in the join accordingly
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
index b0d0009..e40d698 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
@@ -5,30 +5,30 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -36,29 +36,29 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
PREHOOK: query: -- Since size is being used to find the big table, the order of the tables in the join does not matter
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
index 57cc1c4..bd40c2b 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
@@ -5,54 +5,54 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -60,17 +60,17 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
PREHOOK: query: -- Since size is being used to find the big table, the order of the tables in the join does not matter
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out
index 1ccbc09..b38c2ad 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out
@@ -5,28 +5,28 @@
CREATE TABLE bucket_small (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -34,16 +34,16 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
PREHOOK: query: -- Since size is being used to find the big table, the order of the tables in the join does not matter
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
index 719eb04..af73581 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
@@ -5,54 +5,54 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -60,30 +60,30 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: -- Since size is being used to find the big table, the order of the tables in the join does not matter
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
index 7497513..0a4e2c4 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
@@ -5,30 +5,30 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/smallsrcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -36,54 +36,54 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: -- Since size is being used to find the big table, the order of the tables in the join does not matter
diff --git a/ql/src/test/results/clientpositive/avro_compression_enabled.q.out b/ql/src/test/results/clientpositive/avro_compression_enabled.q.out
index eefcfe7..ac7df11 100644
--- a/ql/src/test/results/clientpositive/avro_compression_enabled.q.out
+++ b/ql/src/test/results/clientpositive/avro_compression_enabled.q.out
@@ -73,10 +73,10 @@
}')
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@doctors4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4
PREHOOK: type: LOAD
PREHOOK: Output: default@doctors4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@doctors4
PREHOOK: query: select count(*) from src
diff --git a/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out b/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out
index 6bcad05..a34d863 100644
--- a/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out
+++ b/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out
@@ -79,10 +79,10 @@
first_name string from deserializer
last_name string from deserializer
extra_field string from deserializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors_with_new_field
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors_with_new_field
PREHOOK: type: LOAD
PREHOOK: Output: default@doctors_with_new_field
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors_with_new_field
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors_with_new_field
POSTHOOK: type: LOAD
POSTHOOK: Output: default@doctors_with_new_field
PREHOOK: query: SELECT * FROM doctors_with_new_field ORDER BY first_name
diff --git a/ql/src/test/results/clientpositive/avro_joins.q.out b/ql/src/test/results/clientpositive/avro_joins.q.out
index 9f9a1fb..8298902 100644
--- a/ql/src/test/results/clientpositive/avro_joins.q.out
+++ b/ql/src/test/results/clientpositive/avro_joins.q.out
@@ -81,10 +81,10 @@
first_name string from deserializer
last_name string from deserializer
extra_field string from deserializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4
PREHOOK: type: LOAD
PREHOOK: Output: default@doctors4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@doctors4
PREHOOK: query: CREATE TABLE episodes
@@ -153,10 +153,10 @@
title string from deserializer
air_date string from deserializer
doctor int from deserializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/episodes.avro' INTO TABLE episodes
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes
PREHOOK: type: LOAD
PREHOOK: Output: default@episodes
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/episodes.avro' INTO TABLE episodes
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes
POSTHOOK: type: LOAD
POSTHOOK: Output: default@episodes
PREHOOK: query: SELECT e.title, e.air_date, d.first_name, d.last_name, d.extra_field, e.air_date
diff --git a/ql/src/test/results/clientpositive/avro_nullable_fields.q.out b/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
index 78bf257..81950b0 100644
--- a/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
+++ b/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
@@ -37,10 +37,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@test_serializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/csv.txt' INTO TABLE test_serializer
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
PREHOOK: type: LOAD
PREHOOK: Output: default@test_serializer
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/csv.txt' INTO TABLE test_serializer
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test_serializer
PREHOOK: query: CREATE TABLE as_avro
diff --git a/ql/src/test/results/clientpositive/avro_partitioned.q.out b/ql/src/test/results/clientpositive/avro_partitioned.q.out
index 78a0f6e..d53e252 100644
--- a/ql/src/test/results/clientpositive/avro_partitioned.q.out
+++ b/ql/src/test/results/clientpositive/avro_partitioned.q.out
@@ -59,10 +59,10 @@
}')
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@episodes
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/episodes.avro' INTO TABLE episodes
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes
PREHOOK: type: LOAD
PREHOOK: Output: default@episodes
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/episodes.avro' INTO TABLE episodes
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes
POSTHOOK: type: LOAD
POSTHOOK: Output: default@episodes
PREHOOK: query: CREATE TABLE episodes_partitioned
diff --git a/ql/src/test/results/clientpositive/avro_sanity_test.q.out b/ql/src/test/results/clientpositive/avro_sanity_test.q.out
index 9227f8f..c6af33b 100644
--- a/ql/src/test/results/clientpositive/avro_sanity_test.q.out
+++ b/ql/src/test/results/clientpositive/avro_sanity_test.q.out
@@ -66,10 +66,10 @@
number int from deserializer
first_name string from deserializer
last_name string from deserializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors
PREHOOK: type: LOAD
PREHOOK: Output: default@doctors
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors
POSTHOOK: type: LOAD
POSTHOOK: Output: default@doctors
PREHOOK: query: SELECT * FROM doctors ORDER BY number
diff --git a/ql/src/test/results/clientpositive/binarysortable_1.q.out b/ql/src/test/results/clientpositive/binarysortable_1.q.out
index f416674..4baa6fc 100644
--- a/ql/src/test/results/clientpositive/binarysortable_1.q.out
+++ b/ql/src/test/results/clientpositive/binarysortable_1.q.out
Binary files differ
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
index 0259356..e652ceb 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out
@@ -20,16 +20,16 @@
sorted by (value, key) into 1 BUCKETS stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@table2
-PREHOOK: query: load data local inpath '../data/files/SortCol1Col2.txt' overwrite into table table1
+PREHOOK: query: load data local inpath '../../data/files/SortCol1Col2.txt' overwrite into table table1
PREHOOK: type: LOAD
PREHOOK: Output: default@table1
-POSTHOOK: query: load data local inpath '../data/files/SortCol1Col2.txt' overwrite into table table1
+POSTHOOK: query: load data local inpath '../../data/files/SortCol1Col2.txt' overwrite into table table1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table1
-PREHOOK: query: load data local inpath '../data/files/SortCol2Col1.txt' overwrite into table table2
+PREHOOK: query: load data local inpath '../../data/files/SortCol2Col1.txt' overwrite into table table2
PREHOOK: type: LOAD
PREHOOK: Output: default@table2
-POSTHOOK: query: load data local inpath '../data/files/SortCol2Col1.txt' overwrite into table table2
+POSTHOOK: query: load data local inpath '../../data/files/SortCol2Col1.txt' overwrite into table table2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table2
PREHOOK: query: -- The tables are bucketed in same columns in different order,
diff --git a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
index ccef868..4446277 100644
--- a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
+++ b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out
@@ -20,16 +20,16 @@
sorted by (value desc, key desc) into 1 BUCKETS stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@table2
-PREHOOK: query: load data local inpath '../data/files/SortCol1Col2.txt' overwrite into table table1
+PREHOOK: query: load data local inpath '../../data/files/SortCol1Col2.txt' overwrite into table table1
PREHOOK: type: LOAD
PREHOOK: Output: default@table1
-POSTHOOK: query: load data local inpath '../data/files/SortCol1Col2.txt' overwrite into table table1
+POSTHOOK: query: load data local inpath '../../data/files/SortCol1Col2.txt' overwrite into table table1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table1
-PREHOOK: query: load data local inpath '../data/files/SortCol2Col1.txt' overwrite into table table2
+PREHOOK: query: load data local inpath '../../data/files/SortCol2Col1.txt' overwrite into table table2
PREHOOK: type: LOAD
PREHOOK: Output: default@table2
-POSTHOOK: query: load data local inpath '../data/files/SortCol2Col1.txt' overwrite into table table2
+POSTHOOK: query: load data local inpath '../../data/files/SortCol2Col1.txt' overwrite into table table2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table2
PREHOOK: query: -- The tables are bucketed in same columns in different order,
diff --git a/ql/src/test/results/clientpositive/bucketcontext_1.q.out b/ql/src/test/results/clientpositive/bucketcontext_1.q.out
index 3d0fc91..95ee59c 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_1.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_1.q.out
@@ -5,17 +5,17 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -23,54 +23,54 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketcontext_2.q.out b/ql/src/test/results/clientpositive/bucketcontext_2.q.out
index 2c04d6e..70b1da1 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_2.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_2.q.out
@@ -5,29 +5,29 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -35,30 +35,30 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketcontext_3.q.out b/ql/src/test/results/clientpositive/bucketcontext_3.q.out
index e9b3dad..742abc9 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_3.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_3.q.out
@@ -5,30 +5,30 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -36,29 +36,29 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketcontext_4.q.out b/ql/src/test/results/clientpositive/bucketcontext_4.q.out
index 368071f..b9ec65b 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_4.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_4.q.out
@@ -5,54 +5,54 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -60,17 +60,17 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketcontext_5.q.out b/ql/src/test/results/clientpositive/bucketcontext_5.q.out
index 2f1dd61..cf1e610 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_5.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_5.q.out
@@ -5,28 +5,28 @@
CREATE TABLE bucket_small (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -34,16 +34,16 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketcontext_6.q.out b/ql/src/test/results/clientpositive/bucketcontext_6.q.out
index 92a1855..b8eea23 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_6.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_6.q.out
@@ -5,28 +5,28 @@
CREATE TABLE bucket_small (key string, value string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -34,30 +34,30 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketcontext_7.q.out b/ql/src/test/results/clientpositive/bucketcontext_7.q.out
index a9ed87b..ea66e63 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_7.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_7.q.out
@@ -5,54 +5,54 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -60,30 +60,30 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketcontext_8.q.out b/ql/src/test/results/clientpositive/bucketcontext_8.q.out
index 817d0e1..1f4c955 100644
--- a/ql/src/test/results/clientpositive/bucketcontext_8.q.out
+++ b/ql/src/test/results/clientpositive/bucketcontext_8.q.out
@@ -5,30 +5,30 @@
CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-09
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -36,54 +36,54 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: explain extended select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out b/ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out
index 77e5395..4aa9fa7 100644
--- a/ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out
+++ b/ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE
@@ -281,17 +281,17 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
POSTHOOK: Lineage: t2.name SIMPLE [(t1)t1.FieldSchema(name:name, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
POSTHOOK: Lineage: t2.name SIMPLE [(t1)t1.FieldSchema(name:name, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
POSTHOOK: Lineage: t2.name SIMPLE [(t1)t1.FieldSchema(name:name, type:string, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/bucketizedhiveinputformat_auto.q.out b/ql/src/test/results/clientpositive/bucketizedhiveinputformat_auto.q.out
index cfceff8..557cd1a 100644
--- a/ql/src/test/results/clientpositive/bucketizedhiveinputformat_auto.q.out
+++ b/ql/src/test/results/clientpositive/bucketizedhiveinputformat_auto.q.out
@@ -3,17 +3,17 @@
POSTHOOK: query: CREATE TABLE bucket_small (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_small
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_small@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_small partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_small@ds=2008-04-08
PREHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -21,54 +21,54 @@
POSTHOOK: query: CREATE TABLE bucket_big (key string, value string) partitioned by (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket_big
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket_big@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket_big partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket_big@ds=2008-04-09
PREHOOK: query: select /* + MAPJOIN(a) */ count(*) FROM bucket_small a JOIN bucket_big b ON a.key = b.key
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
index 3838fda..c074c8e 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
@@ -257,54 +257,54 @@
POSTHOOK: Input: default@srcbucket_mapjoin_part
POSTHOOK: Input: default@srcbucket_mapjoin_part_2
#### A masked pattern was here ####
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
PREHOOK: query: create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint)
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
index af6544a..712dfa3 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
@@ -5,17 +5,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_1 CLUSTERED BY (key) INTO 3 BUCKETS
@@ -26,23 +26,23 @@
POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
POSTHOOK: Input: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
@@ -52,23 +52,23 @@
CLUSTERED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS
@@ -79,17 +79,17 @@
POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
POSTHOOK: Input: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=2
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 3 BUCKETS
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
index 8d2b30b..2c27c83 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
@@ -5,17 +5,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_1 CLUSTERED BY (key) INTO 4 BUCKETS
@@ -26,29 +26,29 @@
POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
POSTHOOK: Input: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=2
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
@@ -58,29 +58,29 @@
CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS
@@ -91,17 +91,17 @@
POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
POSTHOOK: Input: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=2
PREHOOK: query: -- The table and partition bucketing metadata doesn't match but the bucket numbers of all partitions is
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
index eae421e..fe22c61 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
@@ -5,17 +5,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
@@ -25,17 +25,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_2 NOT CLUSTERED
@@ -53,17 +53,17 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_3
POSTHOOK: Output: default@srcbucket_mapjoin_part_3@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_3@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_3 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_3@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_3 CLUSTERED BY (key) INTO 2 BUCKETS
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
index 842617b..f955679 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
@@ -3,29 +3,29 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -33,17 +33,17 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
PREHOOK: query: create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint)
@@ -1262,11 +1262,11 @@
POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ]
0 0 0
PREHOOK: query: -- HIVE-3210
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: query: -- HIVE-3210
-load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
@@ -1294,10 +1294,10 @@
POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
index 20dfe93..09d78d1 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -20,29 +20,29 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -50,17 +50,17 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
PREHOOK: query: create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint)
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
index c759927..9b5890a 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -20,29 +20,29 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -50,17 +50,17 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
PREHOOK: query: create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint)
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
index c92a0ab..6ebd7ae 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -20,54 +20,54 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-09
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -75,30 +75,30 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
PREHOOK: query: create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint)
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin7.q.out b/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
index 92aaab8ba..753f236 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
@@ -5,17 +5,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (ds STRING, hr STRING)
@@ -25,17 +25,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
PREHOOK: query: -- Tests that bucket map join works with a table with more than one level of partitioning
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
index bcec2e8..6c50602 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
@@ -5,17 +5,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
@@ -25,17 +25,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 3 BUCKETS
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
index fef4aea..5e8581b 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
@@ -5,17 +5,17 @@
CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_1 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_1@part=1
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) PARTITIONED BY (part STRING)
@@ -25,23 +25,23 @@
CLUSTERED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS
@@ -301,17 +301,17 @@
POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
POSTHOOK: Input: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part_2 PARTITION (part='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@part=1
PREHOOK: query: ALTER TABLE srcbucket_mapjoin_part_2 CLUSTERED BY (key) INTO 2 BUCKETS
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
index 03ebcd4..177af54 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE
@@ -20,23 +20,23 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 3 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
PREHOOK: query: create table bucketmapjoin_tmp_result (key string , value1 string, value2 string)
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
index acc8ae0..6f92975 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
@@ -20,30 +20,30 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-09
PREHOOK: query: create table bucketmapjoin_tmp_result (key string , value1 string, value2 string)
diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
index ae48f88..1fb2f20 100644
--- a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
+++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
@@ -34,76 +34,76 @@
POSTHOOK: query: create table test4 (key string, value string) clustered by (value, key) sorted by (value, key) into 3 buckets
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@test4
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test1
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test1
PREHOOK: type: LOAD
PREHOOK: Output: default@test1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test1
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test1
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test1
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test1
PREHOOK: type: LOAD
PREHOOK: Output: default@test1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test1
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test1
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test1
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test1
PREHOOK: type: LOAD
PREHOOK: Output: default@test1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test1
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test1
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test2
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test2
PREHOOK: type: LOAD
PREHOOK: Output: default@test2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test2
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test2
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test2
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test2
PREHOOK: type: LOAD
PREHOOK: Output: default@test2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test2
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test2
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test2
PREHOOK: type: LOAD
PREHOOK: Output: default@test2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test2
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test2
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test3
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test3
PREHOOK: type: LOAD
PREHOOK: Output: default@test3
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test3
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test3
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test3
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test3
PREHOOK: type: LOAD
PREHOOK: Output: default@test3
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test3
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test3
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test3
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test3
PREHOOK: type: LOAD
PREHOOK: Output: default@test3
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test3
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test3
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test4
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test4
PREHOOK: type: LOAD
PREHOOK: Output: default@test4
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE test4
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE test4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test4
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test4
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test4
PREHOOK: type: LOAD
PREHOOK: Output: default@test4
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE test4
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE test4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test4
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test4
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test4
PREHOOK: type: LOAD
PREHOOK: Output: default@test4
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE test4
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE test4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test4
PREHOOK: query: -- should be allowed
diff --git a/ql/src/test/results/clientpositive/column_access_stats.q.out b/ql/src/test/results/clientpositive/column_access_stats.q.out
index c0c36b1..356fe5f 100644
--- a/ql/src/test/results/clientpositive/column_access_stats.q.out
+++ b/ql/src/test/results/clientpositive/column_access_stats.q.out
@@ -2,7 +2,7 @@
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
diff --git a/ql/src/test/results/clientpositive/columnstats_partlvl.q.out b/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
index fd4ba8f..3f0bfe9 100644
--- a/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
+++ b/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
@@ -9,17 +9,17 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@Employee_Part
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0)
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0)
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0)
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=2000.0
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0)
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0)
PREHOOK: type: LOAD
PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0)
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employee_part
POSTHOOK: Output: default@employee_part@employeesalary=4000.0
diff --git a/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out b/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
index 3c27096..7b0fb32 100644
--- a/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
+++ b/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
@@ -27,10 +27,10 @@
row format delimited fields terminated by '|' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@UserVisits_web_text_none
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
PREHOOK: type: LOAD
PREHOOK: Output: default@uservisits_web_text_none
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
POSTHOOK: type: LOAD
POSTHOOK: Output: default@uservisits_web_text_none
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/compute_stats_binary.q.out b/ql/src/test/results/clientpositive/compute_stats_binary.q.out
index 3982505..b3d373d 100644
--- a/ql/src/test/results/clientpositive/compute_stats_binary.q.out
+++ b/ql/src/test/results/clientpositive/compute_stats_binary.q.out
@@ -4,11 +4,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab_binary
PREHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/binary.txt" INTO TABLE tab_binary
+LOAD DATA LOCAL INPATH "../../data/files/binary.txt" INTO TABLE tab_binary
PREHOOK: type: LOAD
PREHOOK: Output: default@tab_binary
POSTHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/binary.txt" INTO TABLE tab_binary
+LOAD DATA LOCAL INPATH "../../data/files/binary.txt" INTO TABLE tab_binary
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab_binary
PREHOOK: query: select count(*) from tab_binary
diff --git a/ql/src/test/results/clientpositive/compute_stats_boolean.q.out b/ql/src/test/results/clientpositive/compute_stats_boolean.q.out
index 88836ab..7bf7c4a 100644
--- a/ql/src/test/results/clientpositive/compute_stats_boolean.q.out
+++ b/ql/src/test/results/clientpositive/compute_stats_boolean.q.out
@@ -4,11 +4,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab_bool
PREHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/bool.txt" INTO TABLE tab_bool
+LOAD DATA LOCAL INPATH "../../data/files/bool.txt" INTO TABLE tab_bool
PREHOOK: type: LOAD
PREHOOK: Output: default@tab_bool
POSTHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/bool.txt" INTO TABLE tab_bool
+LOAD DATA LOCAL INPATH "../../data/files/bool.txt" INTO TABLE tab_bool
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab_bool
PREHOOK: query: select count(*) from tab_bool
diff --git a/ql/src/test/results/clientpositive/compute_stats_double.q.out b/ql/src/test/results/clientpositive/compute_stats_double.q.out
index 3456d58..6ff34b7 100644
--- a/ql/src/test/results/clientpositive/compute_stats_double.q.out
+++ b/ql/src/test/results/clientpositive/compute_stats_double.q.out
@@ -4,11 +4,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab_double
PREHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/double.txt" INTO TABLE tab_double
+LOAD DATA LOCAL INPATH "../../data/files/double.txt" INTO TABLE tab_double
PREHOOK: type: LOAD
PREHOOK: Output: default@tab_double
POSTHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/double.txt" INTO TABLE tab_double
+LOAD DATA LOCAL INPATH "../../data/files/double.txt" INTO TABLE tab_double
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab_double
PREHOOK: query: select count(*) from tab_double
diff --git a/ql/src/test/results/clientpositive/compute_stats_long.q.out b/ql/src/test/results/clientpositive/compute_stats_long.q.out
index bb1aa6d..5ad8e08 100644
--- a/ql/src/test/results/clientpositive/compute_stats_long.q.out
+++ b/ql/src/test/results/clientpositive/compute_stats_long.q.out
@@ -4,11 +4,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab_int
PREHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int
+LOAD DATA LOCAL INPATH "../../data/files/int.txt" INTO TABLE tab_int
PREHOOK: type: LOAD
PREHOOK: Output: default@tab_int
POSTHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int
+LOAD DATA LOCAL INPATH "../../data/files/int.txt" INTO TABLE tab_int
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab_int
PREHOOK: query: select count(*) from tab_int
diff --git a/ql/src/test/results/clientpositive/compute_stats_string.q.out b/ql/src/test/results/clientpositive/compute_stats_string.q.out
index 814024a..c450159 100644
--- a/ql/src/test/results/clientpositive/compute_stats_string.q.out
+++ b/ql/src/test/results/clientpositive/compute_stats_string.q.out
@@ -4,11 +4,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab_string
PREHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/string.txt" INTO TABLE tab_string
+LOAD DATA LOCAL INPATH "../../data/files/string.txt" INTO TABLE tab_string
PREHOOK: type: LOAD
PREHOOK: Output: default@tab_string
POSTHOOK: query: -- insert some data
-LOAD DATA LOCAL INPATH "../data/files/string.txt" INTO TABLE tab_string
+LOAD DATA LOCAL INPATH "../../data/files/string.txt" INTO TABLE tab_string
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab_string
PREHOOK: query: select count(*) from tab_string
diff --git a/ql/src/test/results/clientpositive/correlationoptimizer4.q.out b/ql/src/test/results/clientpositive/correlationoptimizer4.q.out
index ecc1d08..e655043 100644
--- a/ql/src/test/results/clientpositive/correlationoptimizer4.q.out
+++ b/ql/src/test/results/clientpositive/correlationoptimizer4.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key INT, val STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key INT, val STRING)
@@ -14,10 +14,10 @@
POSTHOOK: query: CREATE TABLE T2(key INT, val STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: CREATE TABLE T3(key INT, val STRING)
@@ -25,10 +25,10 @@
POSTHOOK: query: CREATE TABLE T3(key INT, val STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: -- When Correlation Optimizer is turned off, this query will be evaluated
diff --git a/ql/src/test/results/clientpositive/correlationoptimizer5.q.out b/ql/src/test/results/clientpositive/correlationoptimizer5.q.out
index b1e5b86..102ffc7 100644
--- a/ql/src/test/results/clientpositive/correlationoptimizer5.q.out
+++ b/ql/src/test/results/clientpositive/correlationoptimizer5.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key INT, val STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key INT, val STRING)
@@ -14,10 +14,10 @@
POSTHOOK: query: CREATE TABLE T2(key INT, val STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: CREATE TABLE T3(key INT, val STRING)
@@ -25,10 +25,10 @@
POSTHOOK: query: CREATE TABLE T3(key INT, val STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: CREATE TABLE T4(key INT, val STRING)
@@ -36,10 +36,10 @@
POSTHOOK: query: CREATE TABLE T4(key INT, val STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv5.txt' INTO TABLE T4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE T4
PREHOOK: type: LOAD
PREHOOK: Output: default@t4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv5.txt' INTO TABLE T4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE T4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t4
PREHOOK: query: CREATE TABLE dest_co1(key INT, val STRING)
diff --git a/ql/src/test/results/clientpositive/count.q.out b/ql/src/test/results/clientpositive/count.q.out
index 296f727..b8ed7bd 100644
--- a/ql/src/test/results/clientpositive/count.q.out
+++ b/ql/src/test/results/clientpositive/count.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table abcd (a int, b int, c int, d int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@abcd
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in4.txt' INTO TABLE abcd
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in4.txt' INTO TABLE abcd
PREHOOK: type: LOAD
PREHOOK: Output: default@abcd
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in4.txt' INTO TABLE abcd
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in4.txt' INTO TABLE abcd
POSTHOOK: type: LOAD
POSTHOOK: Output: default@abcd
PREHOOK: query: select * from abcd
diff --git a/ql/src/test/results/clientpositive/create_merge_compressed.q.out b/ql/src/test/results/clientpositive/create_merge_compressed.q.out
index 9e21240..4340878 100644
--- a/ql/src/test/results/clientpositive/create_merge_compressed.q.out
+++ b/ql/src/test/results/clientpositive/create_merge_compressed.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table src_rc_merge_test(key int, value string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_rc_merge_test
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test
PREHOOK: type: LOAD
PREHOOK: Output: default@src_rc_merge_test
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_merge_test
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_merge_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_rc_merge_test
PREHOOK: query: create table tgt_rc_merge_test(key int, value string) stored as rcfile
diff --git a/ql/src/test/results/clientpositive/create_nested_type.q.out b/ql/src/test/results/clientpositive/create_nested_type.q.out
index 12d0da5..6a7b533 100644
--- a/ql/src/test/results/clientpositive/create_nested_type.q.out
+++ b/ql/src/test/results/clientpositive/create_nested_type.q.out
@@ -31,10 +31,10 @@
d map<string,array<string>> None
#### A masked pattern was here ####
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1
PREHOOK: type: LOAD
PREHOOK: Output: default@table1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table1
PREHOOK: query: SELECT * from table1
diff --git a/ql/src/test/results/clientpositive/create_struct_table.q.out b/ql/src/test/results/clientpositive/create_struct_table.q.out
index 8b15e9e..a6bc49e 100644
--- a/ql/src/test/results/clientpositive/create_struct_table.q.out
+++ b/ql/src/test/results/clientpositive/create_struct_table.q.out
@@ -9,11 +9,11 @@
collection items terminated by '\001'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@abc
-PREHOOK: query: load data local inpath '../data/files/kv1.txt'
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt'
overwrite into table abc
PREHOOK: type: LOAD
PREHOOK: Output: default@abc
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt'
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt'
overwrite into table abc
POSTHOOK: type: LOAD
POSTHOOK: Output: default@abc
diff --git a/ql/src/test/results/clientpositive/create_union_table.q.out b/ql/src/test/results/clientpositive/create_union_table.q.out
index b7e3359..cde6bfc 100644
--- a/ql/src/test/results/clientpositive/create_union_table.q.out
+++ b/ql/src/test/results/clientpositive/create_union_table.q.out
@@ -30,11 +30,11 @@
strct struct<a:int, b:string, c:string>)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@abc
-PREHOOK: query: load data local inpath '../data/files/union_input.txt'
+PREHOOK: query: load data local inpath '../../data/files/union_input.txt'
overwrite into table abc
PREHOOK: type: LOAD
PREHOOK: Output: default@abc
-POSTHOOK: query: load data local inpath '../data/files/union_input.txt'
+POSTHOOK: query: load data local inpath '../../data/files/union_input.txt'
overwrite into table abc
POSTHOOK: type: LOAD
POSTHOOK: Output: default@abc
diff --git a/ql/src/test/results/clientpositive/custom_input_output_format.q.out b/ql/src/test/results/clientpositive/custom_input_output_format.q.out
index dd5f0fc..8f54c96 100644
--- a/ql/src/test/results/clientpositive/custom_input_output_format.q.out
+++ b/ql/src/test/results/clientpositive/custom_input_output_format.q.out
@@ -7,6 +7,47 @@
OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.udf.Rot13OutputFormat'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src1_rot13_iof
+PREHOOK: query: DESCRIBE EXTENDED src1_rot13_iof
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED src1_rot13_iof
+POSTHOOK: type: DESCTABLE
+key string None
+value string None
+
+#### A masked pattern was here ####
+PREHOOK: query: SELECT * FROM src1 ORDER BY key, value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM src1 ORDER BY key, value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+#### A masked pattern was here ####
+
+
+
+
+ val_165
+ val_193
+ val_265
+ val_27
+ val_409
+ val_484
+128
+146 val_146
+150 val_150
+213 val_213
+224
+238 val_238
+255 val_255
+273 val_273
+278 val_278
+311 val_311
+369
+401 val_401
+406 val_406
+66 val_66
+98 val_98
PREHOOK: query: INSERT OVERWRITE TABLE src1_rot13_iof SELECT * FROM src1
PREHOOK: type: QUERY
PREHOOK: Input: default@src1
@@ -17,38 +58,38 @@
POSTHOOK: Output: default@src1_rot13_iof
POSTHOOK: Lineage: src1_rot13_iof.key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: src1_rot13_iof.value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT * FROM src1_rot13_iof
+PREHOOK: query: SELECT * FROM src1_rot13_iof ORDER BY key, value
PREHOOK: type: QUERY
PREHOOK: Input: default@src1_rot13_iof
#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM src1_rot13_iof
+POSTHOOK: query: SELECT * FROM src1_rot13_iof ORDER BY key, value
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src1_rot13_iof
#### A masked pattern was here ####
POSTHOOK: Lineage: src1_rot13_iof.key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: src1_rot13_iof.value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-238 val_238
-311 val_311
- val_27
+
+
+
val_165
- val_409
-255 val_255
-278 val_278
-98 val_98
- val_484
- val_265
val_193
-401 val_401
-150 val_150
-273 val_273
-224
-369
-66 val_66
+ val_265
+ val_27
+ val_409
+ val_484
128
-213 val_213
146 val_146
+150 val_150
+213 val_213
+224
+238 val_238
+255 val_255
+273 val_273
+278 val_278
+311 val_311
+369
+401 val_401
406 val_406
-
-
-
+66 val_66
+98 val_98
diff --git a/ql/src/test/results/clientpositive/database.q.out b/ql/src/test/results/clientpositive/database.q.out
index 364aa20..2c67815 100644
--- a/ql/src/test/results/clientpositive/database.q.out
+++ b/ql/src/test/results/clientpositive/database.q.out
@@ -160,12 +160,12 @@
#### A masked pattern was here ####
PREHOOK: query: -- LOAD and SELECT
-LOAD DATA LOCAL INPATH '../data/files/test.dat'
+LOAD DATA LOCAL INPATH '../../data/files/test.dat'
OVERWRITE INTO TABLE test_table
PREHOOK: type: LOAD
PREHOOK: Output: test_db@test_table
POSTHOOK: query: -- LOAD and SELECT
-LOAD DATA LOCAL INPATH '../data/files/test.dat'
+LOAD DATA LOCAL INPATH '../../data/files/test.dat'
OVERWRITE INTO TABLE test_table
POSTHOOK: type: LOAD
POSTHOOK: Output: test_db@test_table
@@ -503,12 +503,12 @@
POSTHOOK: Output: db1@src
POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- LOAD into foreign table
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.src
PREHOOK: type: LOAD
PREHOOK: Output: db1@src
POSTHOOK: query: -- LOAD into foreign table
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.src
POSTHOOK: type: LOAD
POSTHOOK: Output: db1@src
@@ -1037,13 +1037,13 @@
POSTHOOK: Output: db1@srcpart
POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ]
PREHOOK: query: -- LOAD data into Partitioned foreign table
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.srcpart
PARTITION (ds='2008-04-08', hr='11')
PREHOOK: type: LOAD
PREHOOK: Output: db1@srcpart
POSTHOOK: query: -- LOAD data into Partitioned foreign table
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE db1.srcpart
PARTITION (ds='2008-04-08', hr='11')
POSTHOOK: type: LOAD
diff --git a/ql/src/test/results/clientpositive/database_drop.q.out b/ql/src/test/results/clientpositive/database_drop.q.out
index 6c4440f..37b0d31 100644
--- a/ql/src/test/results/clientpositive/database_drop.q.out
+++ b/ql/src/test/results/clientpositive/database_drop.q.out
@@ -32,10 +32,10 @@
CREATE TABLE temp_tbl (id INT, name STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: db5@temp_tbl
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE temp_tbl
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE temp_tbl
PREHOOK: type: LOAD
PREHOOK: Output: db5@temp_tbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE temp_tbl
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE temp_tbl
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@temp_tbl
PREHOOK: query: CREATE VIEW temp_tbl_view AS SELECT * FROM temp_tbl
@@ -69,10 +69,10 @@
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__._offsets EXPRESSION [(temp_tbl)temp_tbl.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:id, type:int, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' into table temp_tbl2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' into table temp_tbl2
PREHOOK: type: LOAD
PREHOOK: Output: db5@temp_tbl2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' into table temp_tbl2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' into table temp_tbl2
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@temp_tbl2
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
@@ -121,10 +121,10 @@
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__._offsets EXPRESSION [(temp_tbl)temp_tbl.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:id, type:int, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: db5@part_tab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@part_tab
POSTHOOK: Output: db5@part_tab@ds=2008-04-09
@@ -134,10 +134,10 @@
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__._offsets EXPRESSION [(temp_tbl)temp_tbl.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:id, type:int, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2009-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2009-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: db5@part_tab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2009-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab PARTITION (ds='2009-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@part_tab
POSTHOOK: Output: db5@part_tab@ds=2009-04-09
@@ -220,10 +220,10 @@
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__._offsets EXPRESSION [(temp_tbl)temp_tbl.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:id, type:int, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: db5@part_tab2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@part_tab2
POSTHOOK: Output: db5@part_tab2@ds=2008-04-09
@@ -239,10 +239,10 @@
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__._offsets EXPRESSION [(temp_tbl)temp_tbl.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:id, type:int, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2009-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2009-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: db5@part_tab2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2009-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab2 PARTITION (ds='2009-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@part_tab2
POSTHOOK: Output: db5@part_tab2@ds=2009-04-09
@@ -382,10 +382,10 @@
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__._offsets EXPRESSION [(temp_tbl)temp_tbl.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:id, type:int, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: db5@part_tab3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@part_tab3
POSTHOOK: Output: db5@part_tab3@ds=2008-04-09
@@ -407,10 +407,10 @@
POSTHOOK: Lineage: db5__temp_tbl_idx1__._bucketname SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__._offsets EXPRESSION [(temp_tbl)temp_tbl.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchema(name:id, type:int, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2009-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2009-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: db5@part_tab3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2009-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE part_tab3 PARTITION (ds='2009-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: db5@part_tab3
POSTHOOK: Output: db5@part_tab3@ds=2009-04-09
diff --git a/ql/src/test/results/clientpositive/date_2.q.out b/ql/src/test/results/clientpositive/date_2.q.out
index eee4ef1..0bbf0bb 100644
--- a/ql/src/test/results/clientpositive/date_2.q.out
+++ b/ql/src/test/results/clientpositive/date_2.q.out
@@ -19,10 +19,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@date_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_2
PREHOOK: type: LOAD
PREHOOK: Output: default@date_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@date_2
PREHOOK: query: select fl_date, fl_num from date_2 order by fl_date asc, fl_num desc
diff --git a/ql/src/test/results/clientpositive/date_join1.q.out b/ql/src/test/results/clientpositive/date_join1.q.out
index 0ba749c..cc45cf2 100644
--- a/ql/src/test/results/clientpositive/date_join1.q.out
+++ b/ql/src/test/results/clientpositive/date_join1.q.out
@@ -19,10 +19,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@date_join1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE date_join1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE date_join1
PREHOOK: type: LOAD
PREHOOK: Output: default@date_join1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE date_join1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE date_join1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@date_join1
PREHOOK: query: -- Note that there are 2 rows with date 2000-11-28, so we should expect 4 rows with that date in the join results
diff --git a/ql/src/test/results/clientpositive/date_serde.q.out b/ql/src/test/results/clientpositive/date_serde.q.out
index 1624e5e..60f8919 100644
--- a/ql/src/test/results/clientpositive/date_serde.q.out
+++ b/ql/src/test/results/clientpositive/date_serde.q.out
@@ -55,10 +55,10 @@
stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@date_serde_regex
-PREHOOK: query: load data local inpath '../data/files/flights_tiny.txt.1' overwrite into table date_serde_regex
+PREHOOK: query: load data local inpath '../../data/files/flights_tiny.txt.1' overwrite into table date_serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@date_serde_regex
-POSTHOOK: query: load data local inpath '../data/files/flights_tiny.txt.1' overwrite into table date_serde_regex
+POSTHOOK: query: load data local inpath '../../data/files/flights_tiny.txt.1' overwrite into table date_serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@date_serde_regex
PREHOOK: query: select * from date_serde_regex
diff --git a/ql/src/test/results/clientpositive/date_udf.q.out b/ql/src/test/results/clientpositive/date_udf.q.out
index 33af984..28e4f08 100644
--- a/ql/src/test/results/clientpositive/date_udf.q.out
+++ b/ql/src/test/results/clientpositive/date_udf.q.out
@@ -59,10 +59,10 @@
POSTHOOK: Output: default@date_udf_flight
POSTHOOK: Lineage: date_udf.d EXPRESSION []
POSTHOOK: Lineage: date_udf_string.d SIMPLE []
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight
PREHOOK: type: LOAD
PREHOOK: Output: default@date_udf_flight
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight
POSTHOOK: type: LOAD
POSTHOOK: Output: default@date_udf_flight
POSTHOOK: Lineage: date_udf.d EXPRESSION []
diff --git a/ql/src/test/results/clientpositive/decimal_3.q.out b/ql/src/test/results/clientpositive/decimal_3.q.out
index 49cdc63..3fc814b 100644
--- a/ql/src/test/results/clientpositive/decimal_3.q.out
+++ b/ql/src/test/results/clientpositive/decimal_3.q.out
@@ -13,10 +13,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@DECIMAL_3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_3
PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value
diff --git a/ql/src/test/results/clientpositive/decimal_4.q.out b/ql/src/test/results/clientpositive/decimal_4.q.out
index ef5ef7f..898b8ec 100644
--- a/ql/src/test/results/clientpositive/decimal_4.q.out
+++ b/ql/src/test/results/clientpositive/decimal_4.q.out
@@ -24,10 +24,10 @@
STORED AS ORC
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@DECIMAL_4_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_4_1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_4_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_4_1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_4_1
PREHOOK: query: INSERT OVERWRITE TABLE DECIMAL_4_2 SELECT key, key * 3 FROM DECIMAL_4_1
diff --git a/ql/src/test/results/clientpositive/decimal_5.q.out b/ql/src/test/results/clientpositive/decimal_5.q.out
index 13f4117..77c3724 100644
--- a/ql/src/test/results/clientpositive/decimal_5.q.out
+++ b/ql/src/test/results/clientpositive/decimal_5.q.out
@@ -13,10 +13,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@DECIMAL_5
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_5
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_5
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_5
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_5
PREHOOK: query: SELECT key FROM DECIMAL_5 ORDER BY key
diff --git a/ql/src/test/results/clientpositive/decimal_6.q.out b/ql/src/test/results/clientpositive/decimal_6.q.out
index d0e2f5a..043321f 100644
--- a/ql/src/test/results/clientpositive/decimal_6.q.out
+++ b/ql/src/test/results/clientpositive/decimal_6.q.out
@@ -32,16 +32,16 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@DECIMAL_6_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv9.txt' INTO TABLE DECIMAL_6_1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_6_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv9.txt' INTO TABLE DECIMAL_6_1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_6_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv9.txt' INTO TABLE DECIMAL_6_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_6_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv9.txt' INTO TABLE DECIMAL_6_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_6_2
PREHOOK: query: SELECT T.key from (
diff --git a/ql/src/test/results/clientpositive/decimal_join.q.out b/ql/src/test/results/clientpositive/decimal_join.q.out
index 0c93878..c47eefa 100644
--- a/ql/src/test/results/clientpositive/decimal_join.q.out
+++ b/ql/src/test/results/clientpositive/decimal_join.q.out
@@ -7,10 +7,10 @@
create table src_dec (key decimal(3,0), value string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_dec
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table src_dec
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table src_dec
PREHOOK: type: LOAD
PREHOOK: Output: default@src_dec
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table src_dec
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table src_dec
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_dec
PREHOOK: query: select * from src_dec a join src_dec b on a.key=b.key+450
diff --git a/ql/src/test/results/clientpositive/decimal_precision.q.out b/ql/src/test/results/clientpositive/decimal_precision.q.out
index df35fba..8af799b 100644
--- a/ql/src/test/results/clientpositive/decimal_precision.q.out
+++ b/ql/src/test/results/clientpositive/decimal_precision.q.out
@@ -13,10 +13,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@DECIMAL_PRECISION
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_precision
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_precision
PREHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY dec
diff --git a/ql/src/test/results/clientpositive/decimal_serde.q.out b/ql/src/test/results/clientpositive/decimal_serde.q.out
index bb670b2..9ab0b5e 100644
--- a/ql/src/test/results/clientpositive/decimal_serde.q.out
+++ b/ql/src/test/results/clientpositive/decimal_serde.q.out
@@ -25,10 +25,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@DECIMAL_TEXT
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_text
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_text
PREHOOK: query: SELECT * FROM DECIMAL_TEXT ORDER BY key, value
diff --git a/ql/src/test/results/clientpositive/decimal_udf.q.out b/ql/src/test/results/clientpositive/decimal_udf.q.out
index 652d1a8..242cbbd 100644
--- a/ql/src/test/results/clientpositive/decimal_udf.q.out
+++ b/ql/src/test/results/clientpositive/decimal_udf.q.out
@@ -13,10 +13,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@DECIMAL_UDF
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_UDF
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF
PREHOOK: type: LOAD
PREHOOK: Output: default@decimal_udf
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv7.txt' INTO TABLE DECIMAL_UDF
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF
POSTHOOK: type: LOAD
POSTHOOK: Output: default@decimal_udf
PREHOOK: query: -- addition
diff --git a/ql/src/test/results/clientpositive/delimiter.q.out b/ql/src/test/results/clientpositive/delimiter.q.out
index 848bbdc..5e84890 100644
--- a/ql/src/test/results/clientpositive/delimiter.q.out
+++ b/ql/src/test/results/clientpositive/delimiter.q.out
@@ -11,10 +11,10 @@
stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@impressions
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in7.txt' INTO TABLE impressions
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in7.txt' INTO TABLE impressions
PREHOOK: type: LOAD
PREHOOK: Output: default@impressions
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in7.txt' INTO TABLE impressions
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in7.txt' INTO TABLE impressions
POSTHOOK: type: LOAD
POSTHOOK: Output: default@impressions
PREHOOK: query: select * from impressions
diff --git a/ql/src/test/results/clientpositive/disable_file_format_check.q.out b/ql/src/test/results/clientpositive/disable_file_format_check.q.out
index 1d39417..8182dfb 100644
--- a/ql/src/test/results/clientpositive/disable_file_format_check.q.out
+++ b/ql/src/test/results/clientpositive/disable_file_format_check.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table kv_fileformat_check_txt (key string, value string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@kv_fileformat_check_txt
-PREHOOK: query: load data local inpath '../data/files/kv1.seq' overwrite into table kv_fileformat_check_txt
+PREHOOK: query: load data local inpath '../../data/files/kv1.seq' overwrite into table kv_fileformat_check_txt
PREHOOK: type: LOAD
PREHOOK: Output: default@kv_fileformat_check_txt
-POSTHOOK: query: load data local inpath '../data/files/kv1.seq' overwrite into table kv_fileformat_check_txt
+POSTHOOK: query: load data local inpath '../../data/files/kv1.seq' overwrite into table kv_fileformat_check_txt
POSTHOOK: type: LOAD
POSTHOOK: Output: default@kv_fileformat_check_txt
PREHOOK: query: create table kv_fileformat_check_seq (key string, value string) stored as sequencefile
@@ -14,9 +14,9 @@
POSTHOOK: query: create table kv_fileformat_check_seq (key string, value string) stored as sequencefile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@kv_fileformat_check_seq
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table kv_fileformat_check_seq
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table kv_fileformat_check_seq
PREHOOK: type: LOAD
PREHOOK: Output: default@kv_fileformat_check_seq
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table kv_fileformat_check_seq
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table kv_fileformat_check_seq
POSTHOOK: type: LOAD
POSTHOOK: Output: default@kv_fileformat_check_seq
diff --git a/ql/src/test/results/clientpositive/escape1.q.out b/ql/src/test/results/clientpositive/escape1.q.out
index 6c6ea1c..48ac8d4 100644
--- a/ql/src/test/results/clientpositive/escape1.q.out
+++ b/ql/src/test/results/clientpositive/escape1.q.out
Binary files differ
diff --git a/ql/src/test/results/clientpositive/escape2.q.out b/ql/src/test/results/clientpositive/escape2.q.out
index df54dd7..a554ceb 100644
--- a/ql/src/test/results/clientpositive/escape2.q.out
+++ b/ql/src/test/results/clientpositive/escape2.q.out
Binary files differ
diff --git a/ql/src/test/results/clientpositive/exim_01_nonpart.q.out b/ql/src/test/results/clientpositive/exim_01_nonpart.q.out
index a48c776..a13f6e1 100644
--- a/ql/src/test/results/clientpositive/exim_01_nonpart.q.out
+++ b/ql/src/test/results/clientpositive/exim_01_nonpart.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_02_part.q.out b/ql/src/test/results/clientpositive/exim_02_part.q.out
index 6efd684..667a22f 100644
--- a/ql/src/test/results/clientpositive/exim_02_part.q.out
+++ b/ql/src/test/results/clientpositive/exim_02_part.q.out
@@ -11,11 +11,11 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out b/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
index 656a690..22c06d8 100644
--- a/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
+++ b/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_04_all_part.q.out b/ql/src/test/results/clientpositive/exim_04_all_part.q.out
index f776333..612fde0 100644
--- a/ql/src/test/results/clientpositive/exim_04_all_part.q.out
+++ b/ql/src/test/results/clientpositive/exim_04_all_part.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_05_some_part.q.out b/ql/src/test/results/clientpositive/exim_05_some_part.q.out
index b7661bc..0796180 100644
--- a/ql/src/test/results/clientpositive/exim_05_some_part.q.out
+++ b/ql/src/test/results/clientpositive/exim_05_some_part.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_06_one_part.q.out b/ql/src/test/results/clientpositive/exim_06_one_part.q.out
index 1ba7dde..c3269fe 100644
--- a/ql/src/test/results/clientpositive/exim_06_one_part.q.out
+++ b/ql/src/test/results/clientpositive/exim_06_one_part.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out b/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
index e677702..e3682b9 100644
--- a/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
+++ b/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
@@ -91,11 +91,11 @@
tblproperties("maker"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: importer@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="al")
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="al")
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out b/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
index 1506edd..c873133 100644
--- a/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
+++ b/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
@@ -49,10 +49,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: importer@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr")
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department partition (emp_org="hr")
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr")
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department partition (emp_org="hr")
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_department
POSTHOOK: Output: importer@exim_department@emp_org=hr
diff --git a/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out b/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
index b4ea0e3..9c3ff9c 100644
--- a/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
+++ b/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
@@ -91,20 +91,20 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: importer@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_employee
POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_10_external_managed.q.out b/ql/src/test/results/clientpositive/exim_10_external_managed.q.out
index 5f755a5..27bf602 100644
--- a/ql/src/test/results/clientpositive/exim_10_external_managed.q.out
+++ b/ql/src/test/results/clientpositive/exim_10_external_managed.q.out
@@ -10,10 +10,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_11_managed_external.q.out b/ql/src/test/results/clientpositive/exim_11_managed_external.q.out
index c2baa82..cf6b66a 100644
--- a/ql/src/test/results/clientpositive/exim_11_managed_external.q.out
+++ b/ql/src/test/results/clientpositive/exim_11_managed_external.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_12_external_location.q.out b/ql/src/test/results/clientpositive/exim_12_external_location.q.out
index 1668448..36618b4 100644
--- a/ql/src/test/results/clientpositive/exim_12_external_location.q.out
+++ b/ql/src/test/results/clientpositive/exim_12_external_location.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_13_managed_location.q.out b/ql/src/test/results/clientpositive/exim_13_managed_location.q.out
index 2f6bcfe..89a0958 100644
--- a/ql/src/test/results/clientpositive/exim_13_managed_location.q.out
+++ b/ql/src/test/results/clientpositive/exim_13_managed_location.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out b/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
index c185325..6e8eda5 100644
--- a/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
+++ b/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
@@ -7,10 +7,10 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_15_external_part.q.out b/ql/src/test/results/clientpositive/exim_15_external_part.q.out
index a895bb9..7685af4 100644
--- a/ql/src/test/results/clientpositive/exim_15_external_part.q.out
+++ b/ql/src/test/results/clientpositive/exim_15_external_part.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
@@ -94,20 +94,20 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: importer@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_employee
POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: importer@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: importer@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_16_part_external.q.out b/ql/src/test/results/clientpositive/exim_16_part_external.q.out
index 7d328cd..f0898e5 100644
--- a/ql/src/test/results/clientpositive/exim_16_part_external.q.out
+++ b/ql/src/test/results/clientpositive/exim_16_part_external.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_17_part_managed.q.out b/ql/src/test/results/clientpositive/exim_17_part_managed.q.out
index 3f1cc25..badd68f 100644
--- a/ql/src/test/results/clientpositive/exim_17_part_managed.q.out
+++ b/ql/src/test/results/clientpositive/exim_17_part_managed.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_18_part_external.q.out b/ql/src/test/results/clientpositive/exim_18_part_external.q.out
index ffcc606..5a79ac7 100644
--- a/ql/src/test/results/clientpositive/exim_18_part_external.q.out
+++ b/ql/src/test/results/clientpositive/exim_18_part_external.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out b/ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out
index 49033c4..f711b8f 100644
--- a/ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out
+++ b/ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out
@@ -11,20 +11,20 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test2.dat"
+PREHOOK: query: load data local inpath "../../data/files/test2.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test2.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test2.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out b/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
index d78aa0d..6b7c5f9 100644
--- a/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
+++ b/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out b/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
index 846f9dc..0c15654 100644
--- a/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
+++ b/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
@@ -11,38 +11,38 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_21_export_authsuccess.q.out b/ql/src/test/results/clientpositive/exim_21_export_authsuccess.q.out
index efbf8c0..5b9b81c 100644
--- a/ql/src/test/results/clientpositive/exim_21_export_authsuccess.q.out
+++ b/ql/src/test/results/clientpositive/exim_21_export_authsuccess.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
PREHOOK: query: grant Select on table exim_department to user hive_test_user
diff --git a/ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out b/ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out
index 263ccb2..dc48f4d 100644
--- a/ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out
+++ b/ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out b/ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out
index f8dd286..07b6ebe 100644
--- a/ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out
+++ b/ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out
@@ -11,11 +11,11 @@
tblproperties("creator"="krishna")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_employee
-PREHOOK: query: load data local inpath "../data/files/test.dat"
+PREHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_employee
-POSTHOOK: query: load data local inpath "../data/files/test.dat"
+POSTHOOK: query: load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn")
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_employee
diff --git a/ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out b/ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out
index 0975f29..980e6a2 100644
--- a/ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out
+++ b/ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@exim_department
-PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
PREHOOK: type: LOAD
PREHOOK: Output: default@exim_department
-POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: query: load data local inpath "../../data/files/test.dat" into table exim_department
POSTHOOK: type: LOAD
POSTHOOK: Output: default@exim_department
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/global_limit.q.out b/ql/src/test/results/clientpositive/global_limit.q.out
index 395ef93..cdb8526 100644
--- a/ql/src/test/results/clientpositive/global_limit.q.out
+++ b/ql/src/test/results/clientpositive/global_limit.q.out
@@ -19,22 +19,22 @@
POSTHOOK: query: create table gl_src1 (key int, value string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@gl_src1
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1
PREHOOK: type: LOAD
PREHOOK: Output: default@gl_src1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@gl_src1
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1
PREHOOK: type: LOAD
PREHOOK: Output: default@gl_src1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@gl_src1
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1
PREHOOK: type: LOAD
PREHOOK: Output: default@gl_src1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src1
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@gl_src1
PREHOOK: query: -- need one file
@@ -1134,33 +1134,33 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@gl_src_part1
POSTHOOK: Lineage: gl_tgt.key EXPRESSION [(gl_src1)gl_src1.FieldSchema(name:key, type:int, comment:null), ]
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE gl_src_part1 partition(p='11')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE gl_src_part1 partition(p='11')
PREHOOK: type: LOAD
PREHOOK: Output: default@gl_src_part1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE gl_src_part1 partition(p='11')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE gl_src_part1 partition(p='11')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@gl_src_part1
POSTHOOK: Output: default@gl_src_part1@p=11
POSTHOOK: Lineage: gl_tgt.key EXPRESSION [(gl_src1)gl_src1.FieldSchema(name:key, type:int, comment:null), ]
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@gl_src_part1
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@gl_src_part1
POSTHOOK: Output: default@gl_src_part1@p=12
POSTHOOK: Lineage: gl_tgt.key EXPRESSION [(gl_src1)gl_src1.FieldSchema(name:key, type:int, comment:null), ]
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@gl_src_part1@p=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@gl_src_part1@p=12
POSTHOOK: Lineage: gl_tgt.key EXPRESSION [(gl_src1)gl_src1.FieldSchema(name:key, type:int, comment:null), ]
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@gl_src_part1@p=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE gl_src_part1 partition(p='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@gl_src_part1@p=12
POSTHOOK: Lineage: gl_tgt.key EXPRESSION [(gl_src1)gl_src1.FieldSchema(name:key, type:int, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/groupby10.q.out b/ql/src/test/results/clientpositive/groupby10.q.out
index 7ae4b95..3f53ac7 100644
--- a/ql/src/test/results/clientpositive/groupby10.q.out
+++ b/ql/src/test/results/clientpositive/groupby10.q.out
@@ -13,10 +13,10 @@
POSTHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUT
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv5.txt' INTO TABLE INPUT
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT
PREHOOK: type: LOAD
PREHOOK: Output: default@input
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv5.txt' INTO TABLE INPUT
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT
POSTHOOK: type: LOAD
POSTHOOK: Output: default@input
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/groupby_cube1.q.out b/ql/src/test/results/clientpositive/groupby_cube1.q.out
index 34fcd90..583cd06 100644
--- a/ql/src/test/results/clientpositive/groupby_cube1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_cube1.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_id1.q.out b/ql/src/test/results/clientpositive/groupby_grouping_id1.q.out
index 8ea5b44..b972173 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_id1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_id1.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: SELECT key, val, GROUPING__ID from T1 group by key, val with cube
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_id2.q.out b/ql/src/test/results/clientpositive/groupby_grouping_id2.q.out
index a009e69..077a63f 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_id2.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_id2.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key INT, value INT) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/groupby_groupingid.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/groupby_groupingid.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: SELECT key, value, GROUPING__ID, count(*) from T1 GROUP BY key, value WITH ROLLUP
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out
index 41c47f5..d2934ba 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: SELECT * FROM T1
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
index 7186083..670d726 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- Since 4 grouping sets would be generated for the query below, an additional MR job should be created
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
index cd5194e..68ce31b 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
@@ -11,16 +11,16 @@
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets2.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets2.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets2.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets2.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- The query below will execute in a single MR job, since 4 rows are generated per input row
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out
index 1b321e3..a34967b 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out
@@ -7,10 +7,10 @@
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- This tests that cubes and rollups work fine inside sub-queries.
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
index e0b52b4..501a84d 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
@@ -7,10 +7,10 @@
CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/grouping_sets.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- This tests that cubes and rollups work fine where the source is a sub-query
diff --git a/ql/src/test/results/clientpositive/groupby_rollup1.q.out b/ql/src/test/results/clientpositive/groupby_rollup1.q.out
index f5d5cf0..90ca2a8 100644
--- a/ql/src/test/results/clientpositive/groupby_rollup1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_rollup1.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/groupby_sort_1.q.out b/ql/src/test/results/clientpositive/groupby_sort_1.q.out
index ebe5e67..7ecac4c 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_1.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- perform an insert to make sure there are 2 files
diff --git a/ql/src/test/results/clientpositive/groupby_sort_2.q.out b/ql/src/test/results/clientpositive/groupby_sort_2.q.out
index c20b757..69f0513 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_2.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_2.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key) SORTED BY (val) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- perform an insert to make sure there are 2 files
diff --git a/ql/src/test/results/clientpositive/groupby_sort_3.q.out b/ql/src/test/results/clientpositive/groupby_sort_3.q.out
index 700df0a..aa205d2 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_3.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_3.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- perform an insert to make sure there are 2 files
diff --git a/ql/src/test/results/clientpositive/groupby_sort_4.q.out b/ql/src/test/results/clientpositive/groupby_sort_4.q.out
index 95ff90a..b2f6895 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_4.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_4.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key, val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- perform an insert to make sure there are 2 files
diff --git a/ql/src/test/results/clientpositive/groupby_sort_5.q.out b/ql/src/test/results/clientpositive/groupby_sort_5.q.out
index 06afe99..8c1ba8e 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_5.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_5.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- perform an insert to make sure there are 2 files
@@ -218,10 +218,10 @@
POSTHOOK: Lineage: outputtbl1.val SIMPLE [(t1)t1.FieldSchema(name:val, type:string, comment:null), ]
POSTHOOK: Lineage: t1.key SIMPLE [(t1)t1.FieldSchema(name:key, type:string, comment:null), ]
POSTHOOK: Lineage: t1.val SIMPLE [(t1)t1.FieldSchema(name:val, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
POSTHOOK: Lineage: outputtbl1.cnt EXPRESSION [(t1)t1.null, ]
@@ -459,10 +459,10 @@
POSTHOOK: Lineage: t1.key SIMPLE [(t1)t1.FieldSchema(name:key, type:string, comment:null), ]
POSTHOOK: Lineage: t1.val SIMPLE [(t1)t1.FieldSchema(name:val, type:string, comment:null), ]
POSTHOOK: Lineage: t1.val SIMPLE [(t1)t1.FieldSchema(name:val, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
POSTHOOK: Lineage: outputtbl1.cnt EXPRESSION [(t1)t1.null, ]
diff --git a/ql/src/test/results/clientpositive/groupby_sort_6.q.out b/ql/src/test/results/clientpositive/groupby_sort_6.q.out
index a4e8707..7ff8685 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_6.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_6.q.out
@@ -157,10 +157,10 @@
#### A masked pattern was here ####
POSTHOOK: Lineage: outputtbl1.cnt EXPRESSION [(t1)t1.null, ]
POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(t1)t1.FieldSchema(name:key, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
POSTHOOK: Output: default@t1@ds=2
diff --git a/ql/src/test/results/clientpositive/groupby_sort_7.q.out b/ql/src/test/results/clientpositive/groupby_sort_7.q.out
index 489178c..7563ed0 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_7.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_7.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
POSTHOOK: Output: default@t1@ds=1
diff --git a/ql/src/test/results/clientpositive/groupby_sort_8.q.out b/ql/src/test/results/clientpositive/groupby_sort_8.q.out
index 22435d9..e58127c 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_8.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_8.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
POSTHOOK: Output: default@t1@ds=1
diff --git a/ql/src/test/results/clientpositive/groupby_sort_9.q.out b/ql/src/test/results/clientpositive/groupby_sort_9.q.out
index c8610e9..590721a 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_9.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_9.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1 PARTITION (ds='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
POSTHOOK: Output: default@t1@ds=1
diff --git a/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out b/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
index 28baf55..59b51c6 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- perform an insert to make sure there are 2 files
diff --git a/ql/src/test/results/clientpositive/groupby_sort_test_1.q.out b/ql/src/test/results/clientpositive/groupby_sort_test_1.q.out
index 9a36371..e951ac0 100644
--- a/ql/src/test/results/clientpositive/groupby_sort_test_1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_sort_test_1.q.out
@@ -5,10 +5,10 @@
CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: -- perform an insert to make sure there are 2 files
diff --git a/ql/src/test/results/clientpositive/index_serde.q.out b/ql/src/test/results/clientpositive/index_serde.q.out
index a034352..fcb6ae5 100644
--- a/ql/src/test/results/clientpositive/index_serde.q.out
+++ b/ql/src/test/results/clientpositive/index_serde.q.out
@@ -68,10 +68,10 @@
number int from deserializer
first_name string from deserializer
last_name string from deserializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors
PREHOOK: type: LOAD
PREHOOK: Output: default@doctors
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors
POSTHOOK: type: LOAD
POSTHOOK: Output: default@doctors
PREHOOK: query: -- Create and build an index
diff --git a/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out b/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
index 15380f5..b832564 100644
--- a/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
+++ b/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
@@ -450,10 +450,10 @@
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
@@ -469,10 +469,10 @@
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -487,10 +487,10 @@
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -505,10 +505,10 @@
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -523,10 +523,10 @@
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=12)
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=12)
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
diff --git a/ql/src/test/results/clientpositive/infer_const_type.q.out b/ql/src/test/results/clientpositive/infer_const_type.q.out
index 7439680..1d7530a 100644
--- a/ql/src/test/results/clientpositive/infer_const_type.q.out
+++ b/ql/src/test/results/clientpositive/infer_const_type.q.out
@@ -7,10 +7,10 @@
POSTHOOK: query: CREATE TABLE infertypes(ti TINYINT, si SMALLINT, i INT, bi BIGINT, fl FLOAT, db DOUBLE, str STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@infertypes
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/infer_const_type.txt' OVERWRITE INTO TABLE infertypes
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/infer_const_type.txt' OVERWRITE INTO TABLE infertypes
PREHOOK: type: LOAD
PREHOOK: Output: default@infertypes
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/infer_const_type.txt' OVERWRITE INTO TABLE infertypes
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/infer_const_type.txt' OVERWRITE INTO TABLE infertypes
POSTHOOK: type: LOAD
POSTHOOK: Output: default@infertypes
PREHOOK: query: SELECT * FROM infertypes
diff --git a/ql/src/test/results/clientpositive/input13.q.out b/ql/src/test/results/clientpositive/input13.q.out
index 2fbf51f..d03c93e 100644
--- a/ql/src/test/results/clientpositive/input13.q.out
+++ b/ql/src/test/results/clientpositive/input13.q.out
@@ -18,17 +18,17 @@
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
FROM src
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 100) (< (. (TOK_TABLE_OR_COL src) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 200) (< (. (TOK_TABLE_OR_COL src) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/dest4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL src) key) 300))))
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 100) (< (. (TOK_TABLE_OR_COL src) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 200) (< (. (TOK_TABLE_OR_COL src) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR 'target/warehouse/dest4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL src) key) 300))))
STAGE DEPENDENCIES:
Stage-4 is a root stage
@@ -323,7 +323,7 @@
Move Operator
files:
hdfs directory: true
- destination: ../build/ql/test/data/warehouse/dest4.out
+ destination: target/warehouse/dest4.out
Stage: Stage-23
Map Reduce
@@ -362,24 +362,24 @@
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../build/ql/test/data/warehouse/dest4.out
PREHOOK: Output: default@dest1
PREHOOK: Output: default@dest2
PREHOOK: Output: default@dest3@ds=2008-04-08/hr=12
+PREHOOK: Output: target/warehouse/dest4.out
POSTHOOK: query: FROM src
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../build/ql/test/data/warehouse/dest4.out
POSTHOOK: Output: default@dest1
POSTHOOK: Output: default@dest2
POSTHOOK: Output: default@dest3@ds=2008-04-08/hr=12
+POSTHOOK: Output: target/warehouse/dest4.out
POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
diff --git a/ql/src/test/results/clientpositive/input16.q.out b/ql/src/test/results/clientpositive/input16.q.out
index 98524b0..abd691e 100644
--- a/ql/src/test/results/clientpositive/input16.q.out
+++ b/ql/src/test/results/clientpositive/input16.q.out
@@ -9,10 +9,10 @@
POSTHOOK: query: CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUT16
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT16
PREHOOK: type: LOAD
PREHOOK: Output: default@input16
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT16
POSTHOOK: type: LOAD
POSTHOOK: Output: default@input16
PREHOOK: query: SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16
diff --git a/ql/src/test/results/clientpositive/input16_cc.q.out b/ql/src/test/results/clientpositive/input16_cc.q.out
index bbcc625..cd35507 100644
--- a/ql/src/test/results/clientpositive/input16_cc.q.out
+++ b/ql/src/test/results/clientpositive/input16_cc.q.out
@@ -13,10 +13,10 @@
POSTHOOK: query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUT16_CC
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC
PREHOOK: type: LOAD
PREHOOK: Output: default@input16_cc
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC
POSTHOOK: type: LOAD
POSTHOOK: Output: default@input16_cc
PREHOOK: query: SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC
diff --git a/ql/src/test/results/clientpositive/input19.q.out b/ql/src/test/results/clientpositive/input19.q.out
index 554f5e2..fb57bc7 100644
--- a/ql/src/test/results/clientpositive/input19.q.out
+++ b/ql/src/test/results/clientpositive/input19.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES ( 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol', 'quote.delim'= '("|\\[|\\])', 'field.delim'=' ', 'serialization.null.format'='-' ) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@apachelog
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/apache.access.log' INTO TABLE apachelog
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/apache.access.log' INTO TABLE apachelog
PREHOOK: type: LOAD
PREHOOK: Output: default@apachelog
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/apache.access.log' INTO TABLE apachelog
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/apache.access.log' INTO TABLE apachelog
POSTHOOK: type: LOAD
POSTHOOK: Output: default@apachelog
PREHOOK: query: SELECT a.* FROM apachelog a
diff --git a/ql/src/test/results/clientpositive/input21.q.out b/ql/src/test/results/clientpositive/input21.q.out
index 0c9058d..75f7567 100644
--- a/ql/src/test/results/clientpositive/input21.q.out
+++ b/ql/src/test/results/clientpositive/input21.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE src_null(a STRING, b STRING, c STRING, d STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_null
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE src_null
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/null.txt' INTO TABLE src_null
PREHOOK: type: LOAD
PREHOOK: Output: default@src_null
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE src_null
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/null.txt' INTO TABLE src_null
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_null
PREHOOK: query: EXPLAIN SELECT * FROM src_null DISTRIBUTE BY c SORT BY d
diff --git a/ql/src/test/results/clientpositive/input22.q.out b/ql/src/test/results/clientpositive/input22.q.out
index 0de36ef..fc55639 100644
--- a/ql/src/test/results/clientpositive/input22.q.out
+++ b/ql/src/test/results/clientpositive/input22.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUT4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
PREHOOK: type: LOAD
PREHOOK: Output: default@input4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@input4
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/input37.q.out b/ql/src/test/results/clientpositive/input37.q.out
index ddb341e..9c5e70d 100644
--- a/ql/src/test/results/clientpositive/input37.q.out
+++ b/ql/src/test/results/clientpositive/input37.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table documents(contents string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@documents
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE documents
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/docurl.txt' INTO TABLE documents
PREHOOK: type: LOAD
PREHOOK: Output: default@documents
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE documents
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/docurl.txt' INTO TABLE documents
POSTHOOK: type: LOAD
POSTHOOK: Output: default@documents
PREHOOK: query: select url, count(1)
diff --git a/ql/src/test/results/clientpositive/input3_limit.q.out b/ql/src/test/results/clientpositive/input3_limit.q.out
index 57e0f28..c8d5ab1 100644
--- a/ql/src/test/results/clientpositive/input3_limit.q.out
+++ b/ql/src/test/results/clientpositive/input3_limit.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, value STRING)
diff --git a/ql/src/test/results/clientpositive/input4.q.out b/ql/src/test/results/clientpositive/input4.q.out
index 70c918e..23893cc 100644
--- a/ql/src/test/results/clientpositive/input4.q.out
+++ b/ql/src/test/results/clientpositive/input4.q.out
@@ -4,13 +4,13 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUT4
PREHOOK: query: EXPLAIN
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
PREHOOK: type: LOAD
POSTHOOK: query: EXPLAIN
-LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
POSTHOOK: type: LOAD
ABSTRACT SYNTAX TREE:
- (TOK_LOAD '../data/files/kv1.txt' (TOK_TAB (TOK_TABNAME INPUT4)) LOCAL)
+ (TOK_LOAD '../../data/files/kv1.txt' (TOK_TAB (TOK_TABNAME INPUT4)) LOCAL)
STAGE DEPENDENCIES:
Stage-0 is a root stage
@@ -36,10 +36,10 @@
Stats-Aggr Operator
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
PREHOOK: type: LOAD
PREHOOK: Output: default@input4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@input4
PREHOOK: query: EXPLAIN FORMATTED
diff --git a/ql/src/test/results/clientpositive/input40.q.out b/ql/src/test/results/clientpositive/input40.q.out
index 17b3315..f6213ec 100644
--- a/ql/src/test/results/clientpositive/input40.q.out
+++ b/ql/src/test/results/clientpositive/input40.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table tmp_insert_test (key string, value string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tmp_insert_test
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tmp_insert_test
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_insert_test
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tmp_insert_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_insert_test
PREHOOK: query: select * from tmp_insert_test
@@ -522,10 +522,10 @@
POSTHOOK: query: create table tmp_insert_test_p (key string, value string) partitioned by (ds string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tmp_insert_test_p
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_insert_test_p
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_insert_test_p
POSTHOOK: Output: default@tmp_insert_test_p@ds=2009-08-01
@@ -1041,10 +1041,10 @@
97 val_97 2009-08-01
98 val_98 2009-08-01
98 val_98 2009-08-01
-PREHOOK: query: load data local inpath '../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_insert_test_p@ds=2009-08-01
-POSTHOOK: query: load data local inpath '../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_insert_test_p@ds=2009-08-01
PREHOOK: query: select * from tmp_insert_test_p where ds= '2009-08-01'
diff --git a/ql/src/test/results/clientpositive/input43.q.out b/ql/src/test/results/clientpositive/input43.q.out
index 6d5449d..c7d2c5a 100644
--- a/ql/src/test/results/clientpositive/input43.q.out
+++ b/ql/src/test/results/clientpositive/input43.q.out
@@ -7,10 +7,10 @@
POSTHOOK: query: create table tst_src1 like src1
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tst_src1
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table tst_src1
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tst_src1
PREHOOK: type: LOAD
PREHOOK: Output: default@tst_src1
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table tst_src1
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tst_src1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tst_src1
PREHOOK: query: select count(1) from tst_src1
@@ -22,10 +22,10 @@
POSTHOOK: Input: default@tst_src1
#### A masked pattern was here ####
500
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table tst_src1
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tst_src1
PREHOOK: type: LOAD
PREHOOK: Output: default@tst_src1
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table tst_src1
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table tst_src1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tst_src1
PREHOOK: query: select count(1) from tst_src1
diff --git a/ql/src/test/results/clientpositive/input45.q.out b/ql/src/test/results/clientpositive/input45.q.out
index b23d86a..728df2d 100644
--- a/ql/src/test/results/clientpositive/input45.q.out
+++ b/ql/src/test/results/clientpositive/input45.q.out
@@ -1,11 +1,11 @@
-PREHOOK: query: INSERT OVERWRITE DIRECTORY '../build/ql/test/data/x/y/z/' SELECT src.* FROM src
+PREHOOK: query: INSERT OVERWRITE DIRECTORY 'target/data/x/y/z/' SELECT src.* FROM src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../build/ql/test/data/x/y/z
-POSTHOOK: query: INSERT OVERWRITE DIRECTORY '../build/ql/test/data/x/y/z/' SELECT src.* FROM src
+PREHOOK: Output: target/data/x/y/z
+POSTHOOK: query: INSERT OVERWRITE DIRECTORY 'target/data/x/y/z/' SELECT src.* FROM src
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../build/ql/test/data/x/y/z
+POSTHOOK: Output: target/data/x/y/z
238val_238
86val_86
311val_311
diff --git a/ql/src/test/results/clientpositive/input4_cb_delim.q.out b/ql/src/test/results/clientpositive/input4_cb_delim.q.out
index 6e2c58a..4bec55c 100644
--- a/ql/src/test/results/clientpositive/input4_cb_delim.q.out
+++ b/ql/src/test/results/clientpositive/input4_cb_delim.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUT4_CB
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
PREHOOK: type: LOAD
PREHOOK: Output: default@input4_cb
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
POSTHOOK: type: LOAD
POSTHOOK: Output: default@input4_cb
PREHOOK: query: SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB
diff --git a/ql/src/test/results/clientpositive/inputddl5.q.out b/ql/src/test/results/clientpositive/inputddl5.q.out
index 8093347..d02d72d 100644
--- a/ql/src/test/results/clientpositive/inputddl5.q.out
+++ b/ql/src/test/results/clientpositive/inputddl5.q.out
@@ -7,10 +7,10 @@
CREATE TABLE INPUTDDL5(name STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUTDDL5
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE INPUTDDL5
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE INPUTDDL5
PREHOOK: type: LOAD
PREHOOK: Output: default@inputddl5
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE INPUTDDL5
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE INPUTDDL5
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputddl5
PREHOOK: query: DESCRIBE INPUTDDL5
diff --git a/ql/src/test/results/clientpositive/inputddl6.q.out b/ql/src/test/results/clientpositive/inputddl6.q.out
index f81c985..ca341bb 100644
--- a/ql/src/test/results/clientpositive/inputddl6.q.out
+++ b/ql/src/test/results/clientpositive/inputddl6.q.out
@@ -9,17 +9,17 @@
CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@INPUTDDL6
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@inputddl6
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputddl6
POSTHOOK: Output: default@inputddl6@ds=2008-04-09
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@inputddl6
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputddl6
POSTHOOK: Output: default@inputddl6@ds=2008-04-08
diff --git a/ql/src/test/results/clientpositive/inputddl7.q.out b/ql/src/test/results/clientpositive/inputddl7.q.out
index 2afb7df..91fab24 100644
--- a/ql/src/test/results/clientpositive/inputddl7.q.out
+++ b/ql/src/test/results/clientpositive/inputddl7.q.out
@@ -11,10 +11,10 @@
CREATE TABLE T1(name STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: SELECT COUNT(1) FROM T1
@@ -31,10 +31,10 @@
POSTHOOK: query: CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: SELECT COUNT(1) FROM T2
@@ -51,10 +51,10 @@
POSTHOOK: query: CREATE TABLE T3(name STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
POSTHOOK: Output: default@t3@ds=2008-04-09
@@ -74,10 +74,10 @@
POSTHOOK: query: CREATE TABLE T4(name STRING) PARTITIONED BY(ds STRING) STORED AS SEQUENCEFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@t4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t4
POSTHOOK: Output: default@t4@ds=2008-04-09
diff --git a/ql/src/test/results/clientpositive/insert1_overwrite_partitions.q.out b/ql/src/test/results/clientpositive/insert1_overwrite_partitions.q.out
index 10901bb..5d79ac2 100644
--- a/ql/src/test/results/clientpositive/insert1_overwrite_partitions.q.out
+++ b/ql/src/test/results/clientpositive/insert1_overwrite_partitions.q.out
@@ -3,17 +3,17 @@
POSTHOOK: query: CREATE TABLE sourceTable (one string,two string) PARTITIONED BY (ds string,hr string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@sourceTable
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='11')
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='11')
PREHOOK: type: LOAD
PREHOOK: Output: default@sourcetable
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='11')
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='11')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@sourcetable
POSTHOOK: Output: default@sourcetable@ds=2011-11-11/hr=11
-PREHOOK: query: load data local inpath '../data/files/kv3.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='12')
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@sourcetable
-POSTHOOK: query: load data local inpath '../data/files/kv3.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='12')
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' INTO TABLE sourceTable partition(ds='2011-11-11', hr='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@sourcetable
POSTHOOK: Output: default@sourcetable@ds=2011-11-11/hr=12
diff --git a/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out b/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out
index 2c5d56e..685629e 100644
--- a/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out
+++ b/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out
@@ -11,17 +11,17 @@
POSTHOOK: query: CREATE TABLE db1.sourceTable (one string,two string) PARTITIONED BY (ds string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: db1@sourceTable
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
PREHOOK: type: LOAD
PREHOOK: Output: db1@sourcetable
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
POSTHOOK: type: LOAD
POSTHOOK: Output: db1@sourcetable
POSTHOOK: Output: db1@sourcetable@ds=2011-11-11
-PREHOOK: query: load data local inpath '../data/files/kv3.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
PREHOOK: type: LOAD
PREHOOK: Output: db1@sourcetable@ds=2011-11-11
-POSTHOOK: query: load data local inpath '../data/files/kv3.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' INTO TABLE db1.sourceTable partition(ds='2011-11-11')
POSTHOOK: type: LOAD
POSTHOOK: Output: db1@sourcetable@ds=2011-11-11
PREHOOK: query: CREATE TABLE db2.destinTable (one string,two string) PARTITIONED BY (ds string)
diff --git a/ql/src/test/results/clientpositive/insert_overwrite_local_directory_1.q.out b/ql/src/test/results/clientpositive/insert_overwrite_local_directory_1.q.out
index 765911a..ab63e54 100644
--- a/ql/src/test/results/clientpositive/insert_overwrite_local_directory_1.q.out
+++ b/ql/src/test/results/clientpositive/insert_overwrite_local_directory_1.q.out
@@ -1,13 +1,13 @@
-PREHOOK: query: insert overwrite local directory '../data/files/local_src_table_1'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_src_table_1'
select * from src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../data/files/local_src_table_1
-POSTHOOK: query: insert overwrite local directory '../data/files/local_src_table_1'
+PREHOOK: Output: ../../data/files/local_src_table_1
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_src_table_1'
select * from src
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../data/files/local_src_table_1
+POSTHOOK: Output: ../../data/files/local_src_table_1
238val_238
86val_86
311val_311
@@ -508,20 +508,20 @@
400val_400
200val_200
97val_97
-PREHOOK: query: insert overwrite local directory '../data/files/local_src_table_2'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_src_table_2'
row format delimited
FIELDS TERMINATED BY ':'
select * from src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../data/files/local_src_table_2
-POSTHOOK: query: insert overwrite local directory '../data/files/local_src_table_2'
+PREHOOK: Output: ../../data/files/local_src_table_2
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_src_table_2'
row format delimited
FIELDS TERMINATED BY ':'
select * from src
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../data/files/local_src_table_2
+POSTHOOK: Output: ../../data/files/local_src_table_2
238:val_238
86:val_86
311:val_311
@@ -1033,58 +1033,58 @@
COLLECTION ITEMS TERMINATED BY ','
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@array_table
-PREHOOK: query: load data local inpath "../data/files/array_table.txt" overwrite into table array_table
+PREHOOK: query: load data local inpath "../../data/files/array_table.txt" overwrite into table array_table
PREHOOK: type: LOAD
PREHOOK: Output: default@array_table
-POSTHOOK: query: load data local inpath "../data/files/array_table.txt" overwrite into table array_table
+POSTHOOK: query: load data local inpath "../../data/files/array_table.txt" overwrite into table array_table
POSTHOOK: type: LOAD
POSTHOOK: Output: default@array_table
-PREHOOK: query: insert overwrite local directory '../data/files/local_array_table_1'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_array_table_1'
select * from array_table
PREHOOK: type: QUERY
PREHOOK: Input: default@array_table
-PREHOOK: Output: ../data/files/local_array_table_1
-POSTHOOK: query: insert overwrite local directory '../data/files/local_array_table_1'
+PREHOOK: Output: ../../data/files/local_array_table_1
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_array_table_1'
select * from array_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@array_table
-POSTHOOK: Output: ../data/files/local_array_table_1
+POSTHOOK: Output: ../../data/files/local_array_table_1
a1a2a3b1b2b3b4
a21a22a23b21b22b23b24
-PREHOOK: query: insert overwrite local directory '../data/files/local_array_table_2'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_array_table_2'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
select * from array_table
PREHOOK: type: QUERY
PREHOOK: Input: default@array_table
-PREHOOK: Output: ../data/files/local_array_table_2
-POSTHOOK: query: insert overwrite local directory '../data/files/local_array_table_2'
+PREHOOK: Output: ../../data/files/local_array_table_2
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_array_table_2'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
select * from array_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@array_table
-POSTHOOK: Output: ../data/files/local_array_table_2
+POSTHOOK: Output: ../../data/files/local_array_table_2
a1#a2#a3:b1#b2#b3#b4
a21#a22#a23:b21#b22#b23#b24
-PREHOOK: query: insert overwrite local directory '../data/files/local_array_table_2_withfields'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_array_table_2_withfields'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
select b,a from array_table
PREHOOK: type: QUERY
PREHOOK: Input: default@array_table
-PREHOOK: Output: ../data/files/local_array_table_2_withfields
-POSTHOOK: query: insert overwrite local directory '../data/files/local_array_table_2_withfields'
+PREHOOK: Output: ../../data/files/local_array_table_2_withfields
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_array_table_2_withfields'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
select b,a from array_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@array_table
-POSTHOOK: Output: ../data/files/local_array_table_2_withfields
+POSTHOOK: Output: ../../data/files/local_array_table_2_withfields
b1#b2#b3#b4:a1#a2#a3
b21#b22#b23#b24:a21#a22#a23
PREHOOK: query: create table map_table (foo STRING , bar MAP<STRING, STRING>)
@@ -1102,25 +1102,25 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@map_table
-PREHOOK: query: load data local inpath "../data/files/map_table.txt" overwrite into table map_table
+PREHOOK: query: load data local inpath "../../data/files/map_table.txt" overwrite into table map_table
PREHOOK: type: LOAD
PREHOOK: Output: default@map_table
-POSTHOOK: query: load data local inpath "../data/files/map_table.txt" overwrite into table map_table
+POSTHOOK: query: load data local inpath "../../data/files/map_table.txt" overwrite into table map_table
POSTHOOK: type: LOAD
POSTHOOK: Output: default@map_table
-PREHOOK: query: insert overwrite local directory '../data/files/local_map_table_1'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_map_table_1'
select * from map_table
PREHOOK: type: QUERY
PREHOOK: Input: default@map_table
-PREHOOK: Output: ../data/files/local_map_table_1
-POSTHOOK: query: insert overwrite local directory '../data/files/local_map_table_1'
+PREHOOK: Output: ../../data/files/local_map_table_1
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_map_table_1'
select * from map_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@map_table
-POSTHOOK: Output: ../data/files/local_map_table_1
+POSTHOOK: Output: ../../data/files/local_map_table_1
foo1k1v1k2v2k3v3
foo2k21v21k22v22k31v31
-PREHOOK: query: insert overwrite local directory '../data/files/local_map_table_2'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_map_table_2'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
@@ -1128,8 +1128,8 @@
select * from map_table
PREHOOK: type: QUERY
PREHOOK: Input: default@map_table
-PREHOOK: Output: ../data/files/local_map_table_2
-POSTHOOK: query: insert overwrite local directory '../data/files/local_map_table_2'
+PREHOOK: Output: ../../data/files/local_map_table_2
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_map_table_2'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
@@ -1137,10 +1137,10 @@
select * from map_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@map_table
-POSTHOOK: Output: ../data/files/local_map_table_2
+POSTHOOK: Output: ../../data/files/local_map_table_2
foo1:k1=v1#k2=v2#k3=v3
foo2:k21=v21#k22=v22#k31=v31
-PREHOOK: query: insert overwrite local directory '../data/files/local_map_table_2_withfields'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_map_table_2_withfields'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
@@ -1148,8 +1148,8 @@
select bar,foo from map_table
PREHOOK: type: QUERY
PREHOOK: Input: default@map_table
-PREHOOK: Output: ../data/files/local_map_table_2_withfields
-POSTHOOK: query: insert overwrite local directory '../data/files/local_map_table_2_withfields'
+PREHOOK: Output: ../../data/files/local_map_table_2_withfields
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_map_table_2_withfields'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ':'
COLLECTION ITEMS TERMINATED BY '#'
@@ -1157,53 +1157,53 @@
select bar,foo from map_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@map_table
-POSTHOOK: Output: ../data/files/local_map_table_2_withfields
+POSTHOOK: Output: ../../data/files/local_map_table_2_withfields
k1=v1#k2=v2#k3=v3:foo1
k21=v21#k22=v22#k31=v31:foo2
-PREHOOK: query: insert overwrite local directory '../data/files/local_array_table_3'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_array_table_3'
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.DelimitedJSONSerDe'
STORED AS TEXTFILE
select * from array_table
PREHOOK: type: QUERY
PREHOOK: Input: default@array_table
-PREHOOK: Output: ../data/files/local_array_table_3
-POSTHOOK: query: insert overwrite local directory '../data/files/local_array_table_3'
+PREHOOK: Output: ../../data/files/local_array_table_3
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_array_table_3'
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.DelimitedJSONSerDe'
STORED AS TEXTFILE
select * from array_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@array_table
-POSTHOOK: Output: ../data/files/local_array_table_3
+POSTHOOK: Output: ../../data/files/local_array_table_3
["a1","a2","a3"]["b1","b2","b3","b4"]
["a21","a22","a23"]["b21","b22","b23","b24"]
-PREHOOK: query: insert overwrite local directory '../data/files/local_map_table_3'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_map_table_3'
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.DelimitedJSONSerDe'
STORED AS TEXTFILE
select * from map_table
PREHOOK: type: QUERY
PREHOOK: Input: default@map_table
-PREHOOK: Output: ../data/files/local_map_table_3
-POSTHOOK: query: insert overwrite local directory '../data/files/local_map_table_3'
+PREHOOK: Output: ../../data/files/local_map_table_3
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_map_table_3'
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.DelimitedJSONSerDe'
STORED AS TEXTFILE
select * from map_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@map_table
-POSTHOOK: Output: ../data/files/local_map_table_3
+POSTHOOK: Output: ../../data/files/local_map_table_3
foo1{"k1":"v1","k2":"v2","k3":"v3"}
foo2{"k21":"v21","k22":"v22","k31":"v31"}
-PREHOOK: query: insert overwrite local directory '../data/files/local_rctable'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_rctable'
STORED AS RCFILE
select value,key from src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../data/files/local_rctable
-POSTHOOK: query: insert overwrite local directory '../data/files/local_rctable'
+PREHOOK: Output: ../../data/files/local_rctable
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_rctable'
STORED AS RCFILE
select value,key from src
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../data/files/local_rctable
+POSTHOOK: Output: ../../data/files/local_rctable
#### A masked pattern was here ####
PREHOOK: query: create external table local_rctable(value string, key string)
STORED AS RCFILE
@@ -1214,20 +1214,20 @@
#### A masked pattern was here ####
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@local_rctable
-PREHOOK: query: insert overwrite local directory '../data/files/local_rctable_out'
+PREHOOK: query: insert overwrite local directory '../../data/files/local_rctable_out'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
select key,value from local_rctable
PREHOOK: type: QUERY
PREHOOK: Input: default@local_rctable
-PREHOOK: Output: ../data/files/local_rctable_out
-POSTHOOK: query: insert overwrite local directory '../data/files/local_rctable_out'
+PREHOOK: Output: ../../data/files/local_rctable_out
+POSTHOOK: query: insert overwrite local directory '../../data/files/local_rctable_out'
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
select key,value from local_rctable
POSTHOOK: type: QUERY
POSTHOOK: Input: default@local_rctable
-POSTHOOK: Output: ../data/files/local_rctable_out
+POSTHOOK: Output: ../../data/files/local_rctable_out
238 val_238
86 val_86
311 val_311
diff --git a/ql/src/test/results/clientpositive/join_1to1.q.out b/ql/src/test/results/clientpositive/join_1to1.q.out
index 636c8b1..08d8482 100644
--- a/ql/src/test/results/clientpositive/join_1to1.q.out
+++ b/ql/src/test/results/clientpositive/join_1to1.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE join_1to1_1(key1 int, key2 int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@join_1to1_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in5.txt' INTO TABLE join_1to1_1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in5.txt' INTO TABLE join_1to1_1
PREHOOK: type: LOAD
PREHOOK: Output: default@join_1to1_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in5.txt' INTO TABLE join_1to1_1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in5.txt' INTO TABLE join_1to1_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@join_1to1_1
PREHOOK: query: CREATE TABLE join_1to1_2(key1 int, key2 int, value int)
@@ -14,10 +14,10 @@
POSTHOOK: query: CREATE TABLE join_1to1_2(key1 int, key2 int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@join_1to1_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in6.txt' INTO TABLE join_1to1_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in6.txt' INTO TABLE join_1to1_2
PREHOOK: type: LOAD
PREHOOK: Output: default@join_1to1_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in6.txt' INTO TABLE join_1to1_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in6.txt' INTO TABLE join_1to1_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@join_1to1_2
PREHOOK: query: SELECT * FROM join_1to1_1 a join join_1to1_2 b on a.key1 = b.key1 ORDER BY a.key1 ASC, a.key2 ASC, a.value ASC, b.key1 ASC, b.key2 ASC, b.value ASC
diff --git a/ql/src/test/results/clientpositive/join_array.q.out b/ql/src/test/results/clientpositive/join_array.q.out
index 2bf2c89..102753d 100644
--- a/ql/src/test/results/clientpositive/join_array.q.out
+++ b/ql/src/test/results/clientpositive/join_array.q.out
@@ -8,16 +8,16 @@
POSTHOOK: query: create table tinyB(a bigint, bList array<int>) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tinyB
-PREHOOK: query: load data local inpath '../data/files/tiny_a.txt' into table tinyA
+PREHOOK: query: load data local inpath '../../data/files/tiny_a.txt' into table tinyA
PREHOOK: type: LOAD
PREHOOK: Output: default@tinya
-POSTHOOK: query: load data local inpath '../data/files/tiny_a.txt' into table tinyA
+POSTHOOK: query: load data local inpath '../../data/files/tiny_a.txt' into table tinyA
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tinya
-PREHOOK: query: load data local inpath '../data/files/tiny_b.txt' into table tinyB
+PREHOOK: query: load data local inpath '../../data/files/tiny_b.txt' into table tinyB
PREHOOK: type: LOAD
PREHOOK: Output: default@tinyb
-POSTHOOK: query: load data local inpath '../data/files/tiny_b.txt' into table tinyB
+POSTHOOK: query: load data local inpath '../../data/files/tiny_b.txt' into table tinyB
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tinyb
PREHOOK: query: select * from tinyA
diff --git a/ql/src/test/results/clientpositive/join_casesensitive.q.out b/ql/src/test/results/clientpositive/join_casesensitive.q.out
index 94b0eb6..8e0534c 100644
--- a/ql/src/test/results/clientpositive/join_casesensitive.q.out
+++ b/ql/src/test/results/clientpositive/join_casesensitive.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE joinone(key1 int, key2 int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@joinone
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in5.txt' INTO TABLE joinone
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in5.txt' INTO TABLE joinone
PREHOOK: type: LOAD
PREHOOK: Output: default@joinone
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in5.txt' INTO TABLE joinone
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in5.txt' INTO TABLE joinone
POSTHOOK: type: LOAD
POSTHOOK: Output: default@joinone
PREHOOK: query: CREATE TABLE joinTwo(key1 int, key2 int, value int)
@@ -14,10 +14,10 @@
POSTHOOK: query: CREATE TABLE joinTwo(key1 int, key2 int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@joinTwo
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in6.txt' INTO TABLE joinTwo
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in6.txt' INTO TABLE joinTwo
PREHOOK: type: LOAD
PREHOOK: Output: default@jointwo
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in6.txt' INTO TABLE joinTwo
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in6.txt' INTO TABLE joinTwo
POSTHOOK: type: LOAD
POSTHOOK: Output: default@jointwo
PREHOOK: query: SELECT * FROM joinone JOIN joinTwo ON(joinone.key2=joinTwo.key2) ORDER BY joinone.key1 ASC, joinone.key2 ASC, joinone.value ASC, joinTwo.key1 ASC, joinTwo.key2 ASC, joinTwo.value ASC
diff --git a/ql/src/test/results/clientpositive/join_filters.q.out b/ql/src/test/results/clientpositive/join_filters.q.out
index 4c324a0..471eb45 100644
--- a/ql/src/test/results/clientpositive/join_filters.q.out
+++ b/ql/src/test/results/clientpositive/join_filters.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE myinput1(key int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@myinput1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in3.txt' INTO TABLE myinput1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in3.txt' INTO TABLE myinput1
PREHOOK: type: LOAD
PREHOOK: Output: default@myinput1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in3.txt' INTO TABLE myinput1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in3.txt' INTO TABLE myinput1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@myinput1
PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
@@ -493,28 +493,28 @@
POSTHOOK: query: CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a JOIN smb_input1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
diff --git a/ql/src/test/results/clientpositive/join_hive_626.q.out b/ql/src/test/results/clientpositive/join_hive_626.q.out
index f33c3e6..2bf9980 100644
--- a/ql/src/test/results/clientpositive/join_hive_626.q.out
+++ b/ql/src/test/results/clientpositive/join_hive_626.q.out
@@ -23,22 +23,22 @@
terminated by ',' stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@hive_count
-PREHOOK: query: load data local inpath '../data/files/hive_626_foo.txt' overwrite into table hive_foo
+PREHOOK: query: load data local inpath '../../data/files/hive_626_foo.txt' overwrite into table hive_foo
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_foo
-POSTHOOK: query: load data local inpath '../data/files/hive_626_foo.txt' overwrite into table hive_foo
+POSTHOOK: query: load data local inpath '../../data/files/hive_626_foo.txt' overwrite into table hive_foo
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_foo
-PREHOOK: query: load data local inpath '../data/files/hive_626_bar.txt' overwrite into table hive_bar
+PREHOOK: query: load data local inpath '../../data/files/hive_626_bar.txt' overwrite into table hive_bar
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_bar
-POSTHOOK: query: load data local inpath '../data/files/hive_626_bar.txt' overwrite into table hive_bar
+POSTHOOK: query: load data local inpath '../../data/files/hive_626_bar.txt' overwrite into table hive_bar
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_bar
-PREHOOK: query: load data local inpath '../data/files/hive_626_count.txt' overwrite into table hive_count
+PREHOOK: query: load data local inpath '../../data/files/hive_626_count.txt' overwrite into table hive_count
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_count
-POSTHOOK: query: load data local inpath '../data/files/hive_626_count.txt' overwrite into table hive_count
+POSTHOOK: query: load data local inpath '../../data/files/hive_626_count.txt' overwrite into table hive_count
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_count
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/join_nulls.q.out b/ql/src/test/results/clientpositive/join_nulls.q.out
index b095266..10e4c9b 100644
--- a/ql/src/test/results/clientpositive/join_nulls.q.out
+++ b/ql/src/test/results/clientpositive/join_nulls.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE myinput1(key int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@myinput1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE myinput1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' INTO TABLE myinput1
PREHOOK: type: LOAD
PREHOOK: Output: default@myinput1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE myinput1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' INTO TABLE myinput1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@myinput1
PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
@@ -429,28 +429,28 @@
POSTHOOK: query: CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in1.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in2.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a JOIN smb_input1 b ON a.key = b.key ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
diff --git a/ql/src/test/results/clientpositive/join_nullsafe.q.out b/ql/src/test/results/clientpositive/join_nullsafe.q.out
index 62f96ec..9aa18e0 100644
--- a/ql/src/test/results/clientpositive/join_nullsafe.q.out
+++ b/ql/src/test/results/clientpositive/join_nullsafe.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE myinput1(key int, value int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@myinput1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in8.txt' INTO TABLE myinput1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in8.txt' INTO TABLE myinput1
PREHOOK: type: LOAD
PREHOOK: Output: default@myinput1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in8.txt' INTO TABLE myinput1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in8.txt' INTO TABLE myinput1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@myinput1
PREHOOK: query: -- merging
@@ -961,28 +961,28 @@
POSTHOOK: query: CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in8.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in8.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in8.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in8.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in9.txt' into table smb_input1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in9.txt' into table smb_input1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in9.txt' into table smb_input1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in9.txt' into table smb_input1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in8.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in8.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in8.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in8.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in9.txt' into table smb_input2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in9.txt' into table smb_input2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_input2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in9.txt' into table smb_input2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/in9.txt' into table smb_input2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_input2
PREHOOK: query: SELECT /*+ MAPJOIN(a) */ * FROM smb_input1 a JOIN smb_input1 b ON a.key <=> b.key ORDER BY a.key, a.value, b.key, b.value
diff --git a/ql/src/test/results/clientpositive/join_reorder.q.out b/ql/src/test/results/clientpositive/join_reorder.q.out
index 544de54..bbc820d 100644
--- a/ql/src/test/results/clientpositive/join_reorder.q.out
+++ b/ql/src/test/results/clientpositive/join_reorder.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: EXPLAIN FROM T1 a JOIN src c ON c.key+1=a.key
diff --git a/ql/src/test/results/clientpositive/join_reorder2.q.out b/ql/src/test/results/clientpositive/join_reorder2.q.out
index ca990ee..a582f5f 100644
--- a/ql/src/test/results/clientpositive/join_reorder2.q.out
+++ b/ql/src/test/results/clientpositive/join_reorder2.q.out
@@ -18,28 +18,28 @@
POSTHOOK: query: CREATE TABLE T4(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4
PREHOOK: type: LOAD
PREHOOK: Output: default@t4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t4
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/join_reorder3.q.out b/ql/src/test/results/clientpositive/join_reorder3.q.out
index b5ab7ff..e9ad430 100644
--- a/ql/src/test/results/clientpositive/join_reorder3.q.out
+++ b/ql/src/test/results/clientpositive/join_reorder3.q.out
@@ -18,28 +18,28 @@
POSTHOOK: query: CREATE TABLE T4(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4
PREHOOK: type: LOAD
PREHOOK: Output: default@t4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t4
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/join_reorder4.q.out b/ql/src/test/results/clientpositive/join_reorder4.q.out
index 7ced058..9c1a39e 100644
--- a/ql/src/test/results/clientpositive/join_reorder4.q.out
+++ b/ql/src/test/results/clientpositive/join_reorder4.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: CREATE TABLE T3(key3 STRING, val3 STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: explain select /*+ STREAMTABLE(a) */ a.*, b.*, c.* from T1 a join T2 b on a.key1=b.key2 join T3 c on a.key1=c.key3
diff --git a/ql/src/test/results/clientpositive/join_star.q.out b/ql/src/test/results/clientpositive/join_star.q.out
index 67f4f26..ebc2704 100644
--- a/ql/src/test/results/clientpositive/join_star.q.out
+++ b/ql/src/test/results/clientpositive/join_star.q.out
@@ -38,52 +38,52 @@
POSTHOOK: query: create table dim7(f13 int, f14 int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@dim7
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/fact-data.txt' INTO TABLE fact
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fact-data.txt' INTO TABLE fact
PREHOOK: type: LOAD
PREHOOK: Output: default@fact
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/fact-data.txt' INTO TABLE fact
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fact-data.txt' INTO TABLE fact
POSTHOOK: type: LOAD
POSTHOOK: Output: default@fact
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim1
PREHOOK: type: LOAD
PREHOOK: Output: default@dim1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dim1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim2
PREHOOK: type: LOAD
PREHOOK: Output: default@dim2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dim2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim3
PREHOOK: type: LOAD
PREHOOK: Output: default@dim3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dim3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim4
PREHOOK: type: LOAD
PREHOOK: Output: default@dim4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dim4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim5
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim5
PREHOOK: type: LOAD
PREHOOK: Output: default@dim5
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim5
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim5
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dim5
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim6
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim6
PREHOOK: type: LOAD
PREHOOK: Output: default@dim6
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim6
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim6
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dim6
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim7
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim7
PREHOOK: type: LOAD
PREHOOK: Output: default@dim7
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/dim-data.txt' INTO TABLE dim7
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dim-data.txt' INTO TABLE dim7
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dim7
PREHOOK: query: explain select m1, m2, f2 from fact join dim1 on fact.d1=dim1.f1
diff --git a/ql/src/test/results/clientpositive/leadlag.q.out b/ql/src/test/results/clientpositive/leadlag.q.out
index 523a95b..58dba32 100644
--- a/ql/src/test/results/clientpositive/leadlag.q.out
+++ b/ql/src/test/results/clientpositive/leadlag.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: --1. testLagWithPTFWindowing
diff --git a/ql/src/test/results/clientpositive/leadlag_queries.q.out b/ql/src/test/results/clientpositive/leadlag_queries.q.out
index 75bee1e..93238f5 100644
--- a/ql/src/test/results/clientpositive/leadlag_queries.q.out
+++ b/ql/src/test/results/clientpositive/leadlag_queries.q.out
@@ -25,10 +25,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: -- 1. testLeadUDAF
diff --git a/ql/src/test/results/clientpositive/leftsemijoin.q.out b/ql/src/test/results/clientpositive/leftsemijoin.q.out
index 07c5a4d..d8ecfbf 100644
--- a/ql/src/test/results/clientpositive/leftsemijoin.q.out
+++ b/ql/src/test/results/clientpositive/leftsemijoin.q.out
@@ -20,23 +20,23 @@
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@things
-PREHOOK: query: load data local inpath '../data/files/sales.txt' INTO TABLE sales
+PREHOOK: query: load data local inpath '../../data/files/sales.txt' INTO TABLE sales
PREHOOK: type: LOAD
PREHOOK: Output: default@sales
-POSTHOOK: query: load data local inpath '../data/files/sales.txt' INTO TABLE sales
+POSTHOOK: query: load data local inpath '../../data/files/sales.txt' INTO TABLE sales
POSTHOOK: type: LOAD
POSTHOOK: Output: default@sales
-PREHOOK: query: load data local inpath '../data/files/things.txt' INTO TABLE things partition(ds='2011-10-23')
+PREHOOK: query: load data local inpath '../../data/files/things.txt' INTO TABLE things partition(ds='2011-10-23')
PREHOOK: type: LOAD
PREHOOK: Output: default@things
-POSTHOOK: query: load data local inpath '../data/files/things.txt' INTO TABLE things partition(ds='2011-10-23')
+POSTHOOK: query: load data local inpath '../../data/files/things.txt' INTO TABLE things partition(ds='2011-10-23')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@things
POSTHOOK: Output: default@things@ds=2011-10-23
-PREHOOK: query: load data local inpath '../data/files/things2.txt' INTO TABLE things partition(ds='2011-10-24')
+PREHOOK: query: load data local inpath '../../data/files/things2.txt' INTO TABLE things partition(ds='2011-10-24')
PREHOOK: type: LOAD
PREHOOK: Output: default@things
-POSTHOOK: query: load data local inpath '../data/files/things2.txt' INTO TABLE things partition(ds='2011-10-24')
+POSTHOOK: query: load data local inpath '../../data/files/things2.txt' INTO TABLE things partition(ds='2011-10-24')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@things
POSTHOOK: Output: default@things@ds=2011-10-24
diff --git a/ql/src/test/results/clientpositive/leftsemijoin_mr.q.out b/ql/src/test/results/clientpositive/leftsemijoin_mr.q.out
index 6d8a468..3ca0e15 100644
--- a/ql/src/test/results/clientpositive/leftsemijoin_mr.q.out
+++ b/ql/src/test/results/clientpositive/leftsemijoin_mr.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key INT)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/leftsemijoin_mr_t1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/leftsemijoin_mr_t1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key INT)
@@ -14,10 +14,10 @@
POSTHOOK: query: CREATE TABLE T2(key INT)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/leftsemijoin_mr_t2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/leftsemijoin_mr_t2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/leftsemijoin_mr_t2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- Run this query using TestMinimrCliDriver
diff --git a/ql/src/test/results/clientpositive/load_binary_data.q.out b/ql/src/test/results/clientpositive/load_binary_data.q.out
index 5e8a1db..76fbe74 100644
--- a/ql/src/test/results/clientpositive/load_binary_data.q.out
+++ b/ql/src/test/results/clientpositive/load_binary_data.q.out
Binary files differ
diff --git a/ql/src/test/results/clientpositive/load_exist_part_authsuccess.q.out b/ql/src/test/results/clientpositive/load_exist_part_authsuccess.q.out
index bc7aaa1..f674f2f 100644
--- a/ql/src/test/results/clientpositive/load_exist_part_authsuccess.q.out
+++ b/ql/src/test/results/clientpositive/load_exist_part_authsuccess.q.out
@@ -16,9 +16,9 @@
POSTHOOK: query: grant Update on table hive_test_src to user hive_test_user
POSTHOOK: type: GRANT_PRIVILEGE
POSTHOOK: Output: default@hive_test_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_test_src@pcol1=test_part
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_test_src@pcol1=test_part
diff --git a/ql/src/test/results/clientpositive/load_file_with_space_in_the_name.q.out b/ql/src/test/results/clientpositive/load_file_with_space_in_the_name.q.out
index b159114..af6fd10 100644
--- a/ql/src/test/results/clientpositive/load_file_with_space_in_the_name.q.out
+++ b/ql/src/test/results/clientpositive/load_file_with_space_in_the_name.q.out
@@ -9,9 +9,9 @@
CREATE TABLE load_file_with_space_in_the_name(name STRING, age INT)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@load_file_with_space_in_the_name
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/person age.txt' INTO TABLE load_file_with_space_in_the_name
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/person age.txt' INTO TABLE load_file_with_space_in_the_name
PREHOOK: type: LOAD
PREHOOK: Output: default@load_file_with_space_in_the_name
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/person age.txt' INTO TABLE load_file_with_space_in_the_name
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/person age.txt' INTO TABLE load_file_with_space_in_the_name
POSTHOOK: type: LOAD
POSTHOOK: Output: default@load_file_with_space_in_the_name
diff --git a/ql/src/test/results/clientpositive/load_fs.q.out b/ql/src/test/results/clientpositive/load_fs.q.out
index bcacdf3..563863d 100644
--- a/ql/src/test/results/clientpositive/load_fs.q.out
+++ b/ql/src/test/results/clientpositive/load_fs.q.out
@@ -8,22 +8,22 @@
#### A masked pattern was here ####
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@load_overwrite2
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table load_overwrite
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load_overwrite
PREHOOK: type: LOAD
PREHOOK: Output: default@load_overwrite
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table load_overwrite
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load_overwrite
POSTHOOK: type: LOAD
POSTHOOK: Output: default@load_overwrite
-PREHOOK: query: load data local inpath '../data/files/kv2.txt' into table load_overwrite
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load_overwrite
PREHOOK: type: LOAD
PREHOOK: Output: default@load_overwrite
-POSTHOOK: query: load data local inpath '../data/files/kv2.txt' into table load_overwrite
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' into table load_overwrite
POSTHOOK: type: LOAD
POSTHOOK: Output: default@load_overwrite
-PREHOOK: query: load data local inpath '../data/files/kv3.txt' into table load_overwrite
+PREHOOK: query: load data local inpath '../../data/files/kv3.txt' into table load_overwrite
PREHOOK: type: LOAD
PREHOOK: Output: default@load_overwrite
-POSTHOOK: query: load data local inpath '../data/files/kv3.txt' into table load_overwrite
+POSTHOOK: query: load data local inpath '../../data/files/kv3.txt' into table load_overwrite
POSTHOOK: type: LOAD
POSTHOOK: Output: default@load_overwrite
PREHOOK: query: show table extended like load_overwrite
diff --git a/ql/src/test/results/clientpositive/load_fs2.q.out b/ql/src/test/results/clientpositive/load_fs2.q.out
index 914ca14..2340dba 100644
--- a/ql/src/test/results/clientpositive/load_fs2.q.out
+++ b/ql/src/test/results/clientpositive/load_fs2.q.out
@@ -14,10 +14,10 @@
POSTHOOK: query: create table loader (key string, value string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@loader
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table loader
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table loader
PREHOOK: type: LOAD
PREHOOK: Output: default@loader
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table loader
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table loader
POSTHOOK: type: LOAD
POSTHOOK: Output: default@loader
PREHOOK: query: load data inpath '/build/ql/test/data/warehouse/loader/kv1.txt' into table result
@@ -43,10 +43,10 @@
minFileSize:5812
#### A masked pattern was here ####
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table loader
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table loader
PREHOOK: type: LOAD
PREHOOK: Output: default@loader
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table loader
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table loader
POSTHOOK: type: LOAD
POSTHOOK: Output: default@loader
PREHOOK: query: load data inpath '/build/ql/test/data/warehouse/loader/kv1.txt' into table result
@@ -72,10 +72,10 @@
minFileSize:5812
#### A masked pattern was here ####
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table loader
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table loader
PREHOOK: type: LOAD
PREHOOK: Output: default@loader
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table loader
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table loader
POSTHOOK: type: LOAD
POSTHOOK: Output: default@loader
PREHOOK: query: load data inpath '/build/ql/test/data/warehouse/loader/kv1.txt' into table result
diff --git a/ql/src/test/results/clientpositive/load_nonpart_authsuccess.q.out b/ql/src/test/results/clientpositive/load_nonpart_authsuccess.q.out
index 5131f12..ca96d95 100644
--- a/ql/src/test/results/clientpositive/load_nonpart_authsuccess.q.out
+++ b/ql/src/test/results/clientpositive/load_nonpart_authsuccess.q.out
@@ -9,9 +9,9 @@
POSTHOOK: query: grant Update on table hive_test_src to user hive_test_user
POSTHOOK: type: GRANT_PRIVILEGE
POSTHOOK: Output: default@hive_test_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_test_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_test_src
diff --git a/ql/src/test/results/clientpositive/load_overwrite.q.out b/ql/src/test/results/clientpositive/load_overwrite.q.out
index 8c2b959..39c7e7c 100644
--- a/ql/src/test/results/clientpositive/load_overwrite.q.out
+++ b/ql/src/test/results/clientpositive/load_overwrite.q.out
@@ -43,10 +43,10 @@
POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
500
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table load_overwrite
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load_overwrite
PREHOOK: type: LOAD
PREHOOK: Output: default@load_overwrite
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table load_overwrite
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table load_overwrite
POSTHOOK: type: LOAD
POSTHOOK: Output: default@load_overwrite
POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -81,10 +81,10 @@
POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
1000
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table load_overwrite
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table load_overwrite
PREHOOK: type: LOAD
PREHOOK: Output: default@load_overwrite
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table load_overwrite
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table load_overwrite
POSTHOOK: type: LOAD
POSTHOOK: Output: default@load_overwrite
POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
diff --git a/ql/src/test/results/clientpositive/load_part_authsuccess.q.out b/ql/src/test/results/clientpositive/load_part_authsuccess.q.out
index 9ebc333..560c582 100644
--- a/ql/src/test/results/clientpositive/load_part_authsuccess.q.out
+++ b/ql/src/test/results/clientpositive/load_part_authsuccess.q.out
@@ -9,10 +9,10 @@
POSTHOOK: query: grant Update on table hive_test_src to user hive_test_user
POSTHOOK: type: GRANT_PRIVILEGE
POSTHOOK: Output: default@hive_test_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_test_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src partition (pcol1 = 'test_part')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_test_src
POSTHOOK: Output: default@hive_test_src@pcol1=test_part
diff --git a/ql/src/test/results/clientpositive/loadpart1.q.out b/ql/src/test/results/clientpositive/loadpart1.q.out
index a1fd2f3..84f9631 100644
--- a/ql/src/test/results/clientpositive/loadpart1.q.out
+++ b/ql/src/test/results/clientpositive/loadpart1.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table hive_test_src ( col1 string ) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@hive_test_src
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_test_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_test_src
PREHOOK: query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile
diff --git a/ql/src/test/results/clientpositive/loadpart_err.q.out b/ql/src/test/results/clientpositive/loadpart_err.q.out
index db0bfa5..7e035ff 100644
--- a/ql/src/test/results/clientpositive/loadpart_err.q.out
+++ b/ql/src/test/results/clientpositive/loadpart_err.q.out
@@ -4,7 +4,7 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@loadpart1
PREHOOK: query: INSERT OVERWRITE TABLE loadpart1 PARTITION (ds='2009-01-01')
-SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+SELECT TRANSFORM(src.key, src.value) USING '../../data/scripts/error_script' AS (tkey, tvalue)
FROM src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
diff --git a/ql/src/test/results/clientpositive/mapjoin_subquery2.q.out b/ql/src/test/results/clientpositive/mapjoin_subquery2.q.out
index 60a5eab..73a5d9a 100644
--- a/ql/src/test/results/clientpositive/mapjoin_subquery2.q.out
+++ b/ql/src/test/results/clientpositive/mapjoin_subquery2.q.out
@@ -31,22 +31,22 @@
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@z
-PREHOOK: query: load data local inpath '../data/files/x.txt' INTO TABLE x
+PREHOOK: query: load data local inpath '../../data/files/x.txt' INTO TABLE x
PREHOOK: type: LOAD
PREHOOK: Output: default@x
-POSTHOOK: query: load data local inpath '../data/files/x.txt' INTO TABLE x
+POSTHOOK: query: load data local inpath '../../data/files/x.txt' INTO TABLE x
POSTHOOK: type: LOAD
POSTHOOK: Output: default@x
-PREHOOK: query: load data local inpath '../data/files/y.txt' INTO TABLE y
+PREHOOK: query: load data local inpath '../../data/files/y.txt' INTO TABLE y
PREHOOK: type: LOAD
PREHOOK: Output: default@y
-POSTHOOK: query: load data local inpath '../data/files/y.txt' INTO TABLE y
+POSTHOOK: query: load data local inpath '../../data/files/y.txt' INTO TABLE y
POSTHOOK: type: LOAD
POSTHOOK: Output: default@y
-PREHOOK: query: load data local inpath '../data/files/z.txt' INTO TABLE z
+PREHOOK: query: load data local inpath '../../data/files/z.txt' INTO TABLE z
PREHOOK: type: LOAD
PREHOOK: Output: default@z
-POSTHOOK: query: load data local inpath '../data/files/z.txt' INTO TABLE z
+POSTHOOK: query: load data local inpath '../../data/files/z.txt' INTO TABLE z
POSTHOOK: type: LOAD
POSTHOOK: Output: default@z
PREHOOK: query: -- Since the inputs are small, it should be automatically converted to mapjoin
diff --git a/ql/src/test/results/clientpositive/merge_dynamic_partition.q.out b/ql/src/test/results/clientpositive/merge_dynamic_partition.q.out
index 974a443..2e2fb40 100644
--- a/ql/src/test/results/clientpositive/merge_dynamic_partition.q.out
+++ b/ql/src/test/results/clientpositive/merge_dynamic_partition.q.out
@@ -8,29 +8,29 @@
POSTHOOK: query: create table merge_dynamic_part like srcpart
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@merge_dynamic_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out b/ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out
index 77695c2..3ec87b5 100644
--- a/ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out
+++ b/ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out
@@ -8,42 +8,42 @@
POSTHOOK: query: create table merge_dynamic_part like srcpart
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@merge_dynamic_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket0.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket0.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket0.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket0.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/srcbucket1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out b/ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out
index 7b636ff..e44b360 100644
--- a/ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out
+++ b/ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out
@@ -8,80 +8,80 @@
POSTHOOK: query: create table merge_dynamic_part like srcpart
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@merge_dynamic_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=11
-PREHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=11
-POSTHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=11
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=12
-PREHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=12
-POSTHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=12
PREHOOK: query: show partitions srcpart_merge_dp
diff --git a/ql/src/test/results/clientpositive/merge_dynamic_partition4.q.out b/ql/src/test/results/clientpositive/merge_dynamic_partition4.q.out
index 1f8ce49..32b0e56 100644
--- a/ql/src/test/results/clientpositive/merge_dynamic_partition4.q.out
+++ b/ql/src/test/results/clientpositive/merge_dynamic_partition4.q.out
@@ -35,54 +35,54 @@
POSTHOOK: type: ALTERTABLE_FILEFORMAT
POSTHOOK: Input: default@merge_dynamic_part
POSTHOOK: Output: default@merge_dynamic_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
PREHOOK: query: insert overwrite table srcpart_merge_dp_rc partition (ds = '2008-04-08', hr)
diff --git a/ql/src/test/results/clientpositive/merge_dynamic_partition5.q.out b/ql/src/test/results/clientpositive/merge_dynamic_partition5.q.out
index f3489f6..57f0004 100644
--- a/ql/src/test/results/clientpositive/merge_dynamic_partition5.q.out
+++ b/ql/src/test/results/clientpositive/merge_dynamic_partition5.q.out
@@ -33,35 +33,35 @@
POSTHOOK: type: ALTERTABLE_FILEFORMAT
POSTHOOK: Input: default@merge_dynamic_part
POSTHOOK: Output: default@merge_dynamic_part
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_merge_dp
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_merge_dp
POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
diff --git a/ql/src/test/results/clientpositive/nested_complex.q.out b/ql/src/test/results/clientpositive/nested_complex.q.out
index 0988ccc..1849afc 100644
--- a/ql/src/test/results/clientpositive/nested_complex.q.out
+++ b/ql/src/test/results/clientpositive/nested_complex.q.out
@@ -45,10 +45,10 @@
simple_string string None
#### A masked pattern was here ####
-PREHOOK: query: load data local inpath '../data/files/nested_complex.txt' overwrite into table nestedcomplex
+PREHOOK: query: load data local inpath '../../data/files/nested_complex.txt' overwrite into table nestedcomplex
PREHOOK: type: LOAD
PREHOOK: Output: default@nestedcomplex
-POSTHOOK: query: load data local inpath '../data/files/nested_complex.txt' overwrite into table nestedcomplex
+POSTHOOK: query: load data local inpath '../../data/files/nested_complex.txt' overwrite into table nestedcomplex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@nestedcomplex
PREHOOK: query: select * from nestedcomplex sort by simple_int
diff --git a/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out b/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out
index 5320ebc..1d1ce79 100644
--- a/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out
+++ b/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE table(string string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@table
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/docurl.txt' INTO TABLE table
PREHOOK: type: LOAD
PREHOOK: Output: default@table
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/docurl.txt' INTO TABLE table
POSTHOOK: type: LOAD
POSTHOOK: Output: default@table
PREHOOK: query: SELECT table, count(1)
diff --git a/ql/src/test/results/clientpositive/null_column.q.out b/ql/src/test/results/clientpositive/null_column.q.out
index 3c57a36..ae88ae7 100644
--- a/ql/src/test/results/clientpositive/null_column.q.out
+++ b/ql/src/test/results/clientpositive/null_column.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table temp_null(a int) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@temp_null
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table temp_null
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table temp_null
PREHOOK: type: LOAD
PREHOOK: Output: default@temp_null
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table temp_null
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table temp_null
POSTHOOK: type: LOAD
POSTHOOK: Output: default@temp_null
PREHOOK: query: select null, null from temp_null
@@ -91,14 +91,14 @@
NULL NULL
NULL NULL
NULL NULL
-PREHOOK: query: insert overwrite directory "../build/ql/test/data/warehouse/null_columns.out" select null, null from temp_null
+PREHOOK: query: insert overwrite directory "target/warehouse/null_columns.out" select null, null from temp_null
PREHOOK: type: QUERY
PREHOOK: Input: default@temp_null
-PREHOOK: Output: ../build/ql/test/data/warehouse/null_columns.out
-POSTHOOK: query: insert overwrite directory "../build/ql/test/data/warehouse/null_columns.out" select null, null from temp_null
+PREHOOK: Output: target/warehouse/null_columns.out
+POSTHOOK: query: insert overwrite directory "target/warehouse/null_columns.out" select null, null from temp_null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@temp_null
-POSTHOOK: Output: ../build/ql/test/data/warehouse/null_columns.out
+POSTHOOK: Output: target/warehouse/null_columns.out
POSTHOOK: Lineage: tt.a EXPRESSION []
POSTHOOK: Lineage: tt.b SIMPLE []
POSTHOOK: Lineage: tt_b.a EXPRESSION []
diff --git a/ql/src/test/results/clientpositive/nullgroup3.q.out b/ql/src/test/results/clientpositive/nullgroup3.q.out
index 6fa6e22..c4792ba 100644
--- a/ql/src/test/results/clientpositive/nullgroup3.q.out
+++ b/ql/src/test/results/clientpositive/nullgroup3.q.out
@@ -3,17 +3,17 @@
POSTHOOK: query: CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tstparttbl
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl
POSTHOOK: Output: default@tstparttbl@ds=2008-04-09
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl
POSTHOOK: Output: default@tstparttbl@ds=2008-04-08
@@ -93,17 +93,17 @@
POSTHOOK: query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tstparttbl2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl2
POSTHOOK: Output: default@tstparttbl2@ds=2008-04-09
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl2
POSTHOOK: Output: default@tstparttbl2@ds=2008-04-08
@@ -191,17 +191,17 @@
POSTHOOK: query: CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tstparttbl
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl
POSTHOOK: Output: default@tstparttbl@ds=2008-04-09
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl
POSTHOOK: Output: default@tstparttbl@ds=2008-04-08
@@ -289,17 +289,17 @@
POSTHOOK: query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tstparttbl2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl2
POSTHOOK: Output: default@tstparttbl2@ds=2008-04-09
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl2
POSTHOOK: Output: default@tstparttbl2@ds=2008-04-08
diff --git a/ql/src/test/results/clientpositive/nullgroup5.q.out b/ql/src/test/results/clientpositive/nullgroup5.q.out
index 4cb8148..44bf4ae 100644
--- a/ql/src/test/results/clientpositive/nullgroup5.q.out
+++ b/ql/src/test/results/clientpositive/nullgroup5.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tstparttbl
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl
POSTHOOK: Output: default@tstparttbl@ds=2009-04-09
@@ -15,10 +15,10 @@
POSTHOOK: query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tstparttbl2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09')
PREHOOK: type: LOAD
PREHOOK: Output: default@tstparttbl2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tstparttbl2
POSTHOOK: Output: default@tstparttbl2@ds=2009-04-09
diff --git a/ql/src/test/results/clientpositive/nullscript.q.out b/ql/src/test/results/clientpositive/nullscript.q.out
index b3b0eb1..4671220 100644
--- a/ql/src/test/results/clientpositive/nullscript.q.out
+++ b/ql/src/test/results/clientpositive/nullscript.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE nullscript(KEY STRING, VALUE STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@nullscript
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE nullscript
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE nullscript
PREHOOK: type: LOAD
PREHOOK: Output: default@nullscript
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE nullscript
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE nullscript
POSTHOOK: type: LOAD
POSTHOOK: Output: default@nullscript
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE nullscript
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE nullscript
PREHOOK: type: LOAD
PREHOOK: Output: default@nullscript
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE nullscript
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE nullscript
POSTHOOK: type: LOAD
POSTHOOK: Output: default@nullscript
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/orc_create.q.out b/ql/src/test/results/clientpositive/orc_create.q.out
index 03e1fb3..3527917 100644
--- a/ql/src/test/results/clientpositive/orc_create.q.out
+++ b/ql/src/test/results/clientpositive/orc_create.q.out
@@ -302,10 +302,10 @@
Sort Columns: []
Storage Desc Params:
serialization.format 1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
PREHOOK: type: LOAD
PREHOOK: Output: default@orc_create_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
POSTHOOK: type: LOAD
POSTHOOK: Output: default@orc_create_staging
PREHOOK: query: SELECT * from orc_create_staging
@@ -425,11 +425,11 @@
POSTHOOK: Lineage: orc_create_complex.mp SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:mp, type:map<string,string>, comment:null), ]
POSTHOOK: Lineage: orc_create_complex.str SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:str, type:string, comment:null), ]
POSTHOOK: Lineage: orc_create_complex.strct SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:strct, type:struct<A:string,B:string>, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create_people.txt'
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create_people.txt'
OVERWRITE INTO TABLE orc_create_people_staging
PREHOOK: type: LOAD
PREHOOK: Output: default@orc_create_people_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create_people.txt'
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create_people.txt'
OVERWRITE INTO TABLE orc_create_people_staging
POSTHOOK: type: LOAD
POSTHOOK: Output: default@orc_create_people_staging
diff --git a/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out b/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out
index 976abd4..b8187ef 100644
--- a/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out
+++ b/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out
@@ -66,11 +66,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_thousand
POSTHOOK: Lineage: test_orc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1kv2.cogroup.txt'
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1kv2.cogroup.txt'
INTO TABLE src_thousand
PREHOOK: type: LOAD
PREHOOK: Output: default@src_thousand
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1kv2.cogroup.txt'
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1kv2.cogroup.txt'
INTO TABLE src_thousand
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_thousand
diff --git a/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out b/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out
index 3b8aa64..81d6138 100644
--- a/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out
+++ b/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out
@@ -30,10 +30,10 @@
CREATE TABLE src_null(a STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_null
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nulls.txt' INTO TABLE src_null
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nulls.txt' INTO TABLE src_null
PREHOOK: type: LOAD
PREHOOK: Output: default@src_null
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nulls.txt' INTO TABLE src_null
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nulls.txt' INTO TABLE src_null
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_null
PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT a FROM src_null
diff --git a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
index 94f3ff8..b7a14b8 100644
--- a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
+++ b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
@@ -62,10 +62,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@staging
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/over1k' OVERWRITE INTO TABLE staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE staging
PREHOOK: type: LOAD
PREHOOK: Output: default@staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/over1k' OVERWRITE INTO TABLE staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE staging
POSTHOOK: type: LOAD
POSTHOOK: Output: default@staging
PREHOOK: query: INSERT INTO TABLE orc_pred select * from staging
diff --git a/ql/src/test/results/clientpositive/parallel_orderby.q.out b/ql/src/test/results/clientpositive/parallel_orderby.q.out
index eecebe0..44c2c28 100644
--- a/ql/src/test/results/clientpositive/parallel_orderby.q.out
+++ b/ql/src/test/results/clientpositive/parallel_orderby.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: create table src5 (key string, value string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src5
-PREHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+PREHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
PREHOOK: type: LOAD
PREHOOK: Output: default@src5
-POSTHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+POSTHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src5
-PREHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+PREHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
PREHOOK: type: LOAD
PREHOOK: Output: default@src5
-POSTHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+POSTHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src5
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/partition_type_check.q.out b/ql/src/test/results/clientpositive/partition_type_check.q.out
index 8aed925..b166587 100644
--- a/ql/src/test/results/clientpositive/partition_type_check.q.out
+++ b/ql/src/test/results/clientpositive/partition_type_check.q.out
@@ -5,10 +5,10 @@
CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
PREHOOK: type: LOAD
PREHOOK: Output: default@tab1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab1
POSTHOOK: Output: default@tab1@month=June/day=2
@@ -43,10 +43,10 @@
CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tab1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@tab1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab1
POSTHOOK: Output: default@tab1@month=June/day=2
@@ -88,10 +88,10 @@
POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@tab1
POSTHOOK: Output: default@tab1@month=June/day=2008-01-01
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
PREHOOK: type: LOAD
PREHOOK: Output: default@tab1@month=June/day=2008-01-01
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tab1@month=June/day=2008-01-01
PREHOOK: query: select id1, id2, day from tab1 where day='2008-01-01'
diff --git a/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out b/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out
index e9caf69..1b13a0e 100644
--- a/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out
+++ b/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out
@@ -15,10 +15,10 @@
POSTHOOK: query: CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY<INT>) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@PW17
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@pw17
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@pw17
POSTHOOK: Output: default@pw17@year=1
@@ -66,10 +66,10 @@
POSTHOOK: query: CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY<INT>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@PW17_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_2
PREHOOK: type: LOAD
PREHOOK: Output: default@pw17_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@pw17_2
PREHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via MapOperator
@@ -92,10 +92,10 @@
POSTHOOK: query: CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@PW17_3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@pw17_3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@pw17_3
POSTHOOK: Output: default@pw17_3@year=1
@@ -141,10 +141,10 @@
POSTHOOK: query: CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@PW17_4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_4
PREHOOK: type: LOAD
PREHOOK: Output: default@pw17_4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@pw17_4
PREHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via MapOperator
diff --git a/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out b/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out
index 72dd6c5..fce16a0 100644
--- a/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out
+++ b/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out
@@ -17,10 +17,10 @@
POSTHOOK: query: CREATE TABLE PW18(USER STRING, COMPLEXDT UNIONTYPE<INT, DOUBLE>) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@PW18
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@pw18
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@pw18
POSTHOOK: Output: default@pw18@year=1
@@ -60,10 +60,10 @@
POSTHOOK: query: CREATE TABLE PW18_2(USER STRING, COMPLEXDT UNIONTYPE<INT, DOUBLE>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@PW18_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18_2
PREHOOK: type: LOAD
PREHOOK: Output: default@pw18_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@pw18_2
PREHOOK: query: -- Without the fix HIVE-5202, will throw unsupported data type exception
diff --git a/ql/src/test/results/clientpositive/pcr.q.out b/ql/src/test/results/clientpositive/pcr.q.out
index 4e0a406..4b060b8 100644
--- a/ql/src/test/results/clientpositive/pcr.q.out
+++ b/ql/src/test/results/clientpositive/pcr.q.out
@@ -5896,11 +5896,11 @@
POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ]
POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ]
POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: load data local inpath '../data/files/kv1.txt'
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt'
overwrite into table ab
PREHOOK: type: LOAD
PREHOOK: Output: default@ab
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt'
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt'
overwrite into table ab
POSTHOOK: type: LOAD
POSTHOOK: Output: default@ab
diff --git a/ql/src/test/results/clientpositive/ppd_multi_insert.q.out b/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
index ca82bca..352db1a 100644
--- a/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
+++ b/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
@@ -18,17 +18,17 @@
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL a) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 100) (< (. (TOK_TABLE_OR_COL a) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 200) (< (. (TOK_TABLE_OR_COL a) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/mi4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL a) key) 300))))
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL a) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 100) (< (. (TOK_TABLE_OR_COL a) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 200) (< (. (TOK_TABLE_OR_COL a) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR 'target/warehouse/mi4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL a) key) 300))))
STAGE DEPENDENCIES:
Stage-4 is a root stage
@@ -200,31 +200,31 @@
Move Operator
files:
hdfs directory: true
- destination: ../build/ql/test/data/warehouse/mi4.out
+ destination: target/warehouse/mi4.out
PREHOOK: query: FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../build/ql/test/data/warehouse/mi4.out
PREHOOK: Output: default@mi1
PREHOOK: Output: default@mi2
PREHOOK: Output: default@mi3@ds=2008-04-08/hr=12
+PREHOOK: Output: target/warehouse/mi4.out
POSTHOOK: query: FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../build/ql/test/data/warehouse/mi4.out
POSTHOOK: Output: default@mi1
POSTHOOK: Output: default@mi2
POSTHOOK: Output: default@mi3@ds=2008-04-08/hr=12
+POSTHOOK: Output: target/warehouse/mi4.out
POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ]
@@ -1304,14 +1304,14 @@
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
POSTHOOK: type: QUERY
POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ]
@@ -1319,7 +1319,7 @@
POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ]
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL a) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 100) (< (. (TOK_TABLE_OR_COL a) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 200) (< (. (TOK_TABLE_OR_COL a) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/mi4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL a) key) 300))))
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL a) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 100) (< (. (TOK_TABLE_OR_COL a) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 200) (< (. (TOK_TABLE_OR_COL a) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR 'target/warehouse/mi4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL a) key) 300))))
STAGE DEPENDENCIES:
Stage-4 is a root stage
@@ -1491,31 +1491,31 @@
Move Operator
files:
hdfs directory: true
- destination: ../build/ql/test/data/warehouse/mi4.out
+ destination: target/warehouse/mi4.out
PREHOOK: query: FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../build/ql/test/data/warehouse/mi4.out
PREHOOK: Output: default@mi1
PREHOOK: Output: default@mi2
PREHOOK: Output: default@mi3@ds=2008-04-08/hr=12
+PREHOOK: Output: target/warehouse/mi4.out
POSTHOOK: query: FROM src a JOIN src b ON (a.key = b.key)
INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100
INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200
INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
+INSERT OVERWRITE DIRECTORY 'target/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../build/ql/test/data/warehouse/mi4.out
POSTHOOK: Output: default@mi1
POSTHOOK: Output: default@mi2
POSTHOOK: Output: default@mi3@ds=2008-04-08/hr=12
+POSTHOOK: Output: target/warehouse/mi4.out
POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ]
diff --git a/ql/src/test/results/clientpositive/progress_1.q.out b/ql/src/test/results/clientpositive/progress_1.q.out
index 00b498f..05415b9 100644
--- a/ql/src/test/results/clientpositive/progress_1.q.out
+++ b/ql/src/test/results/clientpositive/progress_1.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE PROGRESS_1(key int, value string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@PROGRESS_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv6.txt' INTO TABLE PROGRESS_1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv6.txt' INTO TABLE PROGRESS_1
PREHOOK: type: LOAD
PREHOOK: Output: default@progress_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv6.txt' INTO TABLE PROGRESS_1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv6.txt' INTO TABLE PROGRESS_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@progress_1
PREHOOK: query: select count(1) from PROGRESS_1 t1 join PROGRESS_1 t2 on t1.key=t2.key
diff --git a/ql/src/test/results/clientpositive/ptf.q.out b/ql/src/test/results/clientpositive/ptf.q.out
index 2b7db57..d86b705 100644
--- a/ql/src/test/results/clientpositive/ptf.q.out
+++ b/ql/src/test/results/clientpositive/ptf.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: --1. test1
diff --git a/ql/src/test/results/clientpositive/ptf_decimal.q.out b/ql/src/test/results/clientpositive/ptf_decimal.q.out
index 14393b4..490ef39 100644
--- a/ql/src/test/results/clientpositive/ptf_decimal.q.out
+++ b/ql/src/test/results/clientpositive/ptf_decimal.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: -- 1. aggregate functions with decimal type
diff --git a/ql/src/test/results/clientpositive/ptf_general_queries.q.out b/ql/src/test/results/clientpositive/ptf_general_queries.q.out
index 4914b8f..d1ac044 100644
--- a/ql/src/test/results/clientpositive/ptf_general_queries.q.out
+++ b/ql/src/test/results/clientpositive/ptf_general_queries.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: -- 1. testNoPTFNoWindowing
diff --git a/ql/src/test/results/clientpositive/ptf_matchpath.q.out b/ql/src/test/results/clientpositive/ptf_matchpath.q.out
index ed57dcb..cf3f0ae 100644
--- a/ql/src/test/results/clientpositive/ptf_matchpath.q.out
+++ b/ql/src/test/results/clientpositive/ptf_matchpath.q.out
@@ -23,10 +23,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@flights_tiny
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
PREHOOK: type: LOAD
PREHOOK: Output: default@flights_tiny
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
POSTHOOK: type: LOAD
POSTHOOK: Output: default@flights_tiny
PREHOOK: query: -- 1. basic Matchpath test
diff --git a/ql/src/test/results/clientpositive/ptf_rcfile.q.out b/ql/src/test/results/clientpositive/ptf_rcfile.q.out
index 8d83f44..ac0f02b 100644
--- a/ql/src/test/results/clientpositive/ptf_rcfile.q.out
+++ b/ql/src/test/results/clientpositive/ptf_rcfile.q.out
@@ -27,10 +27,10 @@
) STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part_rc
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part.rc' overwrite into table part_rc
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part.rc' overwrite into table part_rc
PREHOOK: type: LOAD
PREHOOK: Output: default@part_rc
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part.rc' overwrite into table part_rc
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part.rc' overwrite into table part_rc
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part_rc
PREHOOK: query: -- testWindowingPTFWithPartRC
diff --git a/ql/src/test/results/clientpositive/ptf_register_tblfn.q.out b/ql/src/test/results/clientpositive/ptf_register_tblfn.q.out
index 0e99d6d..e10779a 100644
--- a/ql/src/test/results/clientpositive/ptf_register_tblfn.q.out
+++ b/ql/src/test/results/clientpositive/ptf_register_tblfn.q.out
@@ -23,10 +23,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@flights_tiny
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
PREHOOK: type: LOAD
PREHOOK: Output: default@flights_tiny
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt' OVERWRITE INTO TABLE flights_tiny
POSTHOOK: type: LOAD
POSTHOOK: Output: default@flights_tiny
PREHOOK: query: create temporary function matchpathtest as 'org.apache.hadoop.hive.ql.udf.ptf.MatchPath$MatchPathResolver'
diff --git a/ql/src/test/results/clientpositive/ptf_seqfile.q.out b/ql/src/test/results/clientpositive/ptf_seqfile.q.out
index 7d36c8f..9926591 100644
--- a/ql/src/test/results/clientpositive/ptf_seqfile.q.out
+++ b/ql/src/test/results/clientpositive/ptf_seqfile.q.out
@@ -27,10 +27,10 @@
) STORED AS SEQUENCEFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part_seq
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part.seq' overwrite into table part_seq
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part.seq' overwrite into table part_seq
PREHOOK: type: LOAD
PREHOOK: Output: default@part_seq
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part.seq' overwrite into table part_seq
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part.seq' overwrite into table part_seq
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part_seq
PREHOOK: query: -- testWindowingPTFWithPartSeqFile
diff --git a/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out b/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
index 8926546..4463387 100644
--- a/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
+++ b/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
@@ -41,10 +41,10 @@
FIELDS TERMINATED BY '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@lineitem
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
PREHOOK: type: LOAD
PREHOOK: Output: default@lineitem
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
POSTHOOK: type: LOAD
POSTHOOK: Output: default@lineitem
PREHOOK: query: CREATE INDEX lineitem_lshipdate_idx ON TABLE lineitem(l_shipdate) AS 'org.apache.hadoop.hive.ql.index.AggregateIndexHandler' WITH DEFERRED REBUILD IDXPROPERTIES("AGGREGATES"="count(l_shipdate)")
@@ -3589,10 +3589,10 @@
POSTHOOK: Lineage: tblpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: tblpart PARTITION(ds=2008-04-09,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: tblpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
PREHOOK: type: LOAD
PREHOOK: Output: default@tbl
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tbl.txt' OVERWRITE INTO TABLE tbl
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tbl
POSTHOOK: Lineage: default__lineitem_lineitem_lshipdate_idx__._bucketname SIMPLE [(lineitem)lineitem.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
diff --git a/ql/src/test/results/clientpositive/repair.q.out b/ql/src/test/results/clientpositive/repair.q.out
index a05726a..553c4c1 100644
--- a/ql/src/test/results/clientpositive/repair.q.out
+++ b/ql/src/test/results/clientpositive/repair.q.out
@@ -1,3 +1,7 @@
+PREHOOK: query: DROP TABLE IF EXISTS repairtable
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS repairtable
+POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING)
PREHOOK: type: CREATETABLE
POSTHOOK: query: CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING)
@@ -23,3 +27,11 @@
PREHOOK: type: MSCK
POSTHOOK: query: MSCK TABLE repairtable
POSTHOOK: type: MSCK
+PREHOOK: query: DROP TABLE repairtable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@repairtable
+PREHOOK: Output: default@repairtable
+POSTHOOK: query: DROP TABLE repairtable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@repairtable
+POSTHOOK: Output: default@repairtable
diff --git a/ql/src/test/results/clientpositive/serde_regex.q.out b/ql/src/test/results/clientpositive/serde_regex.q.out
index e4f983d..64207f1 100644
--- a/ql/src/test/results/clientpositive/serde_regex.q.out
+++ b/ql/src/test/results/clientpositive/serde_regex.q.out
@@ -87,16 +87,16 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@serde_regex
PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
@@ -188,10 +188,10 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@serde_regex1
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/kv7.txt" INTO TABLE serde_regex1
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/kv7.txt" INTO TABLE serde_regex1
PREHOOK: type: LOAD
PREHOOK: Output: default@serde_regex1
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/kv7.txt" INTO TABLE serde_regex1
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/kv7.txt" INTO TABLE serde_regex1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@serde_regex1
PREHOOK: query: SELECT key, value FROM serde_regex1 ORDER BY key, value
diff --git a/ql/src/test/results/clientpositive/skewjoin.q.out b/ql/src/test/results/clientpositive/skewjoin.q.out
index 584e515..c5d57da 100644
--- a/ql/src/test/results/clientpositive/skewjoin.q.out
+++ b/ql/src/test/results/clientpositive/skewjoin.q.out
@@ -23,28 +23,28 @@
POSTHOOK: query: CREATE TABLE dest_j1(key INT, value STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@dest_j1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4
PREHOOK: type: LOAD
PREHOOK: Output: default@t4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T4
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t4
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out b/ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out
index c5e4fd8..12bb869 100644
--- a/ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out
+++ b/ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out
@@ -21,10 +21,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -34,10 +34,10 @@
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- a simple join query with skew on both the tables on the join key
diff --git a/ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out b/ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out
index f056a86..9800c54 100644
--- a/ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out
+++ b/ql/src/test/results/clientpositive/skewjoin_union_remove_2.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
@@ -29,10 +29,10 @@
POSTHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: -- This is to test the union->selectstar->filesink and skewjoin optimization
diff --git a/ql/src/test/results/clientpositive/skewjoinopt1.q.out b/ql/src/test/results/clientpositive/skewjoinopt1.q.out
index cdac231..8688687 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt1.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt1.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- a simple join query with skew on both the tables on the join key
diff --git a/ql/src/test/results/clientpositive/skewjoinopt10.q.out b/ql/src/test/results/clientpositive/skewjoinopt10.q.out
index 00ad9cd..a976bc9 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt10.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt10.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: drop table array_valued_T1
diff --git a/ql/src/test/results/clientpositive/skewjoinopt11.q.out b/ql/src/test/results/clientpositive/skewjoinopt11.q.out
index 6bc0e7d..ef7c8e1 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt11.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt11.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -16,10 +16,10 @@
POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- This test is to verify the skew join compile optimization when the join is followed
diff --git a/ql/src/test/results/clientpositive/skewjoinopt12.q.out b/ql/src/test/results/clientpositive/skewjoinopt12.q.out
index 337e8d1..325256f 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt12.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt12.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- Both the join tables are skewed by 2 keys, and one of the skewed values
diff --git a/ql/src/test/results/clientpositive/skewjoinopt13.q.out b/ql/src/test/results/clientpositive/skewjoinopt13.q.out
index b29457d..a9692d4 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt13.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt13.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -14,10 +14,10 @@
POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: CREATE TABLE T3(key STRING, val STRING)
@@ -27,10 +27,10 @@
SKEWED BY (val) ON ((12)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: -- This test is for skewed join compile time optimization for more than 2 tables.
diff --git a/ql/src/test/results/clientpositive/skewjoinopt14.q.out b/ql/src/test/results/clientpositive/skewjoinopt14.q.out
index 8b3dcbc..801dd7c 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt14.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt14.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -16,10 +16,10 @@
POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: CREATE TABLE T3(key STRING, val STRING)
@@ -29,10 +29,10 @@
SKEWED BY (val) ON ((12)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: -- This test is for skewed join compile time optimization for more than 2 tables.
diff --git a/ql/src/test/results/clientpositive/skewjoinopt15.q.out b/ql/src/test/results/clientpositive/skewjoinopt15.q.out
index 930253d..3cfe11a 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt15.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt15.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE tmpT1(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tmpT1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE tmpT1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1
PREHOOK: type: LOAD
PREHOOK: Output: default@tmpt1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE tmpT1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmpt1
PREHOOK: query: -- testing skew on other data types - int
@@ -33,10 +33,10 @@
POSTHOOK: Output: default@tmpT2
POSTHOOK: Lineage: t1.key EXPRESSION [(tmpt1)tmpt1.FieldSchema(name:key, type:string, comment:null), ]
POSTHOOK: Lineage: t1.val SIMPLE [(tmpt1)tmpt1.FieldSchema(name:val, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE tmpT2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE tmpT2
PREHOOK: type: LOAD
PREHOOK: Output: default@tmpt2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE tmpT2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE tmpT2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmpt2
POSTHOOK: Lineage: t1.key EXPRESSION [(tmpt1)tmpt1.FieldSchema(name:key, type:string, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/skewjoinopt16.q.out b/ql/src/test/results/clientpositive/skewjoinopt16.q.out
index 8797494..2f3a5d6 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt16.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt16.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- One of the tables is skewed by 2 columns, and the other table is
diff --git a/ql/src/test/results/clientpositive/skewjoinopt17.q.out b/ql/src/test/results/clientpositive/skewjoinopt17.q.out
index 44ae05c..0b30a3e 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt17.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt17.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- One of the tables is skewed by 2 columns, and the other table is
@@ -272,10 +272,10 @@
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -285,10 +285,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- One of the tables is skewed by 2 columns, and the other table is
diff --git a/ql/src/test/results/clientpositive/skewjoinopt18.q.out b/ql/src/test/results/clientpositive/skewjoinopt18.q.out
index 724b20e..3097045 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt18.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt18.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE tmpT1(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tmpT1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE tmpT1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1
PREHOOK: type: LOAD
PREHOOK: Output: default@tmpt1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE tmpT1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE tmpT1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmpt1
PREHOOK: query: -- testing skew on other data types - int
@@ -41,10 +41,10 @@
POSTHOOK: Output: default@T2
POSTHOOK: Lineage: t1.key EXPRESSION [(tmpt1)tmpt1.FieldSchema(name:key, type:string, comment:null), ]
POSTHOOK: Lineage: t1.val SIMPLE [(tmpt1)tmpt1.FieldSchema(name:val, type:string, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
POSTHOOK: Lineage: t1.key EXPRESSION [(tmpt1)tmpt1.FieldSchema(name:key, type:string, comment:null), ]
diff --git a/ql/src/test/results/clientpositive/skewjoinopt19.q.out b/ql/src/test/results/clientpositive/skewjoinopt19.q.out
index 09a8137..d78f72f 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt19.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt19.q.out
@@ -7,10 +7,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -18,10 +18,10 @@
POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- add a test where the skewed key is also the bucketized key
diff --git a/ql/src/test/results/clientpositive/skewjoinopt2.q.out b/ql/src/test/results/clientpositive/skewjoinopt2.q.out
index 36aeb8d..edf2504 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt2.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt2.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2), (7)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- a simple query with skew on both the tables on the join key
diff --git a/ql/src/test/results/clientpositive/skewjoinopt20.q.out b/ql/src/test/results/clientpositive/skewjoinopt20.q.out
index f65d241..3dce48a 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt20.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt20.q.out
@@ -7,10 +7,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -18,10 +18,10 @@
POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- add a test where the skewed key is also the bucketized/sorted key
diff --git a/ql/src/test/results/clientpositive/skewjoinopt3.q.out b/ql/src/test/results/clientpositive/skewjoinopt3.q.out
index bd81936..b56c023 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt3.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt3.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- a simple query with skew on both the tables. One of the skewed
diff --git a/ql/src/test/results/clientpositive/skewjoinopt4.q.out b/ql/src/test/results/clientpositive/skewjoinopt4.q.out
index 8b35cad..327add7 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt4.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt4.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -16,10 +16,10 @@
POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- only of the tables of the join (the left table of the join) is skewed
diff --git a/ql/src/test/results/clientpositive/skewjoinopt5.q.out b/ql/src/test/results/clientpositive/skewjoinopt5.q.out
index 9149ccaa..b0a3778 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt5.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt5.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key, val) ON ((2, 12)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((3)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- One of the tables is skewed by 2 columns, and the other table is
diff --git a/ql/src/test/results/clientpositive/skewjoinopt6.q.out b/ql/src/test/results/clientpositive/skewjoinopt6.q.out
index eac542d..4cef0be 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt6.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt6.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key, val) ON ((2, 12), (8, 18)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key, val) ON ((3, 13), (8, 18)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- Both the join tables are skewed by 2 keys, and one of the skewed values
diff --git a/ql/src/test/results/clientpositive/skewjoinopt7.q.out b/ql/src/test/results/clientpositive/skewjoinopt7.q.out
index 9905264..1977587 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt7.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt7.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -18,10 +18,10 @@
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
@@ -29,10 +29,10 @@
POSTHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: -- This test is for validating skewed join compile time optimization for more than
diff --git a/ql/src/test/results/clientpositive/skewjoinopt8.q.out b/ql/src/test/results/clientpositive/skewjoinopt8.q.out
index 7520579..d63b27c 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt8.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt8.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING)
@@ -16,10 +16,10 @@
SKEWED BY (key) ON ((3), (8)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
@@ -27,10 +27,10 @@
POSTHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: -- This test is for validating skewed join compile time optimization for more than
diff --git a/ql/src/test/results/clientpositive/skewjoinopt9.q.out b/ql/src/test/results/clientpositive/skewjoinopt9.q.out
index a3c5f90..624f74e 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt9.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt9.q.out
@@ -5,10 +5,10 @@
SKEWED BY (key) ON ((2)) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -16,10 +16,10 @@
POSTHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
PREHOOK: query: -- no skew join compile time optimization would be performed if one of the
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_1.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_1.q.out
index 9cbaac8..35c6474 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_1.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_1.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_bucket_3
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_1
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_2
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_3
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_3
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_10.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_10.q.out
index f473e61..a3423e7 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_10.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_10.q.out
@@ -19,30 +19,30 @@
POSTHOOK: Output: default@tmp_smb_bucket_10@ds=2
PREHOOK: query: -- add dummy files to make sure that the number of files in each partition is same as number of buckets
-load data local inpath '../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_smb_bucket_10@ds=1
POSTHOOK: query: -- add dummy files to make sure that the number of files in each partition is same as number of buckets
-load data local inpath '../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_smb_bucket_10@ds=1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_smb_bucket_10@ds=1
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_smb_bucket_10@ds=1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_smb_bucket_10@ds=2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_smb_bucket_10@ds=2
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_smb_bucket_10@ds=2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_smb_bucket_10@ds=2
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_2.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_2.q.out
index 3c5c2c9..20a9c933 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_2.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_2.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_bucket_3
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_1
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_2
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_3
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_3
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_25.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_25.q.out
index 7100e73..2826c58 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_25.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_25.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_bucket_3
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_1
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_2
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_3
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_3
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_3.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_3.q.out
index e089f7c..6c17a6b 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_3.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_3.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_bucket_3
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_1
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_2
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_3
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_3
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_4.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_4.q.out
index 9b80324..3bfcfd7 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_4.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_4.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_bucket_3
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_1
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_2
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_3
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_3
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_5.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_5.q.out
index 185f6c7..49265d5 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_5.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_5.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_bucket_3
-PREHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_1
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_1
-PREHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_2
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_2
-PREHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_3
-POSTHOOK: query: load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_3
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_7.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_7.q.out
index 36c1fea..38a138b 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_7.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_7.q.out
@@ -23,16 +23,16 @@
POSTHOOK: query: create table normal_join_results(k1 int, v1 string, k2 int, v2 string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@normal_join_results
-PREHOOK: query: load data local inpath '../data/files/empty1.txt' into table smb_bucket4_1
+PREHOOK: query: load data local inpath '../../data/files/empty1.txt' into table smb_bucket4_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket4_1
-POSTHOOK: query: load data local inpath '../data/files/empty1.txt' into table smb_bucket4_1
+POSTHOOK: query: load data local inpath '../../data/files/empty1.txt' into table smb_bucket4_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket4_1
-PREHOOK: query: load data local inpath '../data/files/empty2.txt' into table smb_bucket4_1
+PREHOOK: query: load data local inpath '../../data/files/empty2.txt' into table smb_bucket4_1
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket4_1
-POSTHOOK: query: load data local inpath '../data/files/empty2.txt' into table smb_bucket4_1
+POSTHOOK: query: load data local inpath '../../data/files/empty2.txt' into table smb_bucket4_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket4_1
PREHOOK: query: insert overwrite table smb_bucket4_2
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_8.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_8.q.out
index a33babc..c393aa8 100644
--- a/ql/src/test/results/clientpositive/smb_mapjoin_8.q.out
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_8.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table smb_bucket_input (key int, value string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@smb_bucket_input
-PREHOOK: query: load data local inpath '../data/files/smb_bucket_input.rc' into table smb_bucket_input
+PREHOOK: query: load data local inpath '../../data/files/smb_bucket_input.rc' into table smb_bucket_input
PREHOOK: type: LOAD
PREHOOK: Output: default@smb_bucket_input
-POSTHOOK: query: load data local inpath '../data/files/smb_bucket_input.rc' into table smb_bucket_input
+POSTHOOK: query: load data local inpath '../../data/files/smb_bucket_input.rc' into table smb_bucket_input
POSTHOOK: type: LOAD
POSTHOOK: Output: default@smb_bucket_input
PREHOOK: query: CREATE TABLE smb_bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS
diff --git a/ql/src/test/results/clientpositive/split.q.out b/ql/src/test/results/clientpositive/split.q.out
index e9b53f1..8bd206a 100644
--- a/ql/src/test/results/clientpositive/split.q.out
+++ b/ql/src/test/results/clientpositive/split.q.out
@@ -9,11 +9,11 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@tmp_jo_tab_test
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/input.txt'
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/input.txt'
OVERWRITE INTO TABLE tmp_jo_tab_test
PREHOOK: type: LOAD
PREHOOK: Output: default@tmp_jo_tab_test
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/input.txt'
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/input.txt'
OVERWRITE INTO TABLE tmp_jo_tab_test
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmp_jo_tab_test
diff --git a/ql/src/test/results/clientpositive/stats1.q.out b/ql/src/test/results/clientpositive/stats1.q.out
index 32b7944..69f207d 100644
--- a/ql/src/test/results/clientpositive/stats1.q.out
+++ b/ql/src/test/results/clientpositive/stats1.q.out
@@ -225,13 +225,13 @@
PREHOOK: query: -- Load a file into a existing table
-- Some stats (numFiles, totalSize) should be updated correctly
-- Some other stats (numRows, rawDataSize) should be cleared
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE tmptable
PREHOOK: type: LOAD
PREHOOK: Output: default@tmptable
POSTHOOK: query: -- Load a file into a existing table
-- Some stats (numFiles, totalSize) should be updated correctly
-- Some other stats (numRows, rawDataSize) should be cleared
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE tmptable
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE tmptable
POSTHOOK: type: LOAD
POSTHOOK: Output: default@tmptable
POSTHOOK: Lineage: tmptable.key EXPRESSION [(src1)s2.FieldSchema(name:key, type:string, comment:default), ]
diff --git a/ql/src/test/results/clientpositive/stats11.q.out b/ql/src/test/results/clientpositive/stats11.q.out
index 671cbcd..ac06448 100644
--- a/ql/src/test/results/clientpositive/stats11.q.out
+++ b/ql/src/test/results/clientpositive/stats11.q.out
@@ -3,16 +3,16 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
@@ -21,13 +21,13 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part
PREHOOK: query: explain
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
POSTHOOK: query: explain
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
ABSTRACT SYNTAX TREE:
- (TOK_LOAD '../data/files/srcbucket20.txt' (TOK_TAB (TOK_TABNAME srcbucket_mapjoin_part) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08'))) local)
+ (TOK_LOAD '../../data/files/srcbucket20.txt' (TOK_TAB (TOK_TABNAME srcbucket_mapjoin_part) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08'))) local)
STAGE DEPENDENCIES:
Stage-0 is a root stage
@@ -55,10 +55,10 @@
Stats-Aggr Operator
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
@@ -100,10 +100,10 @@
Sort Columns: []
Storage Desc Params:
serialization.format 1
-PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
@@ -144,10 +144,10 @@
Sort Columns: []
Storage Desc Params:
serialization.format 1
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
@@ -188,10 +188,10 @@
Sort Columns: []
Storage Desc Params:
serialization.format 1
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
PREHOOK: query: desc formatted srcbucket_mapjoin_part partition(ds='2008-04-08')
@@ -237,17 +237,17 @@
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
-PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2
-POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
-POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08
PREHOOK: query: create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint)
diff --git a/ql/src/test/results/clientpositive/stats18.q.out b/ql/src/test/results/clientpositive/stats18.q.out
index 4deff84..73c4848 100644
--- a/ql/src/test/results/clientpositive/stats18.q.out
+++ b/ql/src/test/results/clientpositive/stats18.q.out
@@ -60,10 +60,10 @@
Sort Columns: []
Storage Desc Params:
serialization.format 1
-PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13')
+PREHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13')
PREHOOK: type: LOAD
PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
-POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13')
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE stats_part partition (ds='2010-04-08', hr='13')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
POSTHOOK: Lineage: stats_part PARTITION(ds=2010-04-08,hr=13).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
diff --git a/ql/src/test/results/clientpositive/stats3.q.out b/ql/src/test/results/clientpositive/stats3.q.out
index 6636415..85fc994 100644
--- a/ql/src/test/results/clientpositive/stats3.q.out
+++ b/ql/src/test/results/clientpositive/stats3.q.out
@@ -12,13 +12,13 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@hive_test_src
PREHOOK: query: explain extended
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
PREHOOK: type: LOAD
POSTHOOK: query: explain extended
-load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
POSTHOOK: type: LOAD
ABSTRACT SYNTAX TREE:
- (TOK_LOAD '../data/files/test.dat' (TOK_TAB (TOK_TABNAME hive_test_src)) local overwrite)
+ (TOK_LOAD '../../data/files/test.dat' (TOK_TAB (TOK_TABNAME hive_test_src)) local overwrite)
STAGE DEPENDENCIES:
Stage-0 is a root stage
@@ -56,10 +56,10 @@
Stats-Aggr Operator
-PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+PREHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
PREHOOK: type: LOAD
PREHOOK: Output: default@hive_test_src
-POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+POSTHOOK: query: load data local inpath '../../data/files/test.dat' overwrite into table hive_test_src
POSTHOOK: type: LOAD
POSTHOOK: Output: default@hive_test_src
PREHOOK: query: desc formatted hive_test_src
diff --git a/ql/src/test/results/clientpositive/subq.q.out b/ql/src/test/results/clientpositive/subq.q.out
index 7989b5d..68fe5ad 100644
--- a/ql/src/test/results/clientpositive/subq.q.out
+++ b/ql/src/test/results/clientpositive/subq.q.out
@@ -2,16 +2,16 @@
FROM (
FROM src select src.* WHERE src.key < 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
FROM (
FROM src select src.* WHERE src.key < 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/union.out')) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME unioninput))))))
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR 'target/warehouse/union.out')) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME unioninput))))))
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -61,7 +61,7 @@
Move Operator
files:
hdfs directory: true
- destination: ../build/ql/test/data/warehouse/union.out
+ destination: target/warehouse/union.out
Stage: Stage-2
Map Reduce
@@ -99,17 +99,17 @@
PREHOOK: query: FROM (
FROM src select src.* WHERE src.key < 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../build/ql/test/data/warehouse/union.out
+PREHOOK: Output: target/warehouse/union.out
POSTHOOK: query: FROM (
FROM src select src.* WHERE src.key < 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../build/ql/test/data/warehouse/union.out
+POSTHOOK: Output: target/warehouse/union.out
86val_86
27val_27
98val_98
diff --git a/ql/src/test/results/clientpositive/subquery_in.q.out b/ql/src/test/results/clientpositive/subquery_in.q.out
index 32a248a..b04b508 100644
--- a/ql/src/test/results/clientpositive/subquery_in.q.out
+++ b/ql/src/test/results/clientpositive/subquery_in.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: DROP TABLE lineitem
@@ -78,10 +78,10 @@
FIELDS TERMINATED BY '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@lineitem
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
PREHOOK: type: LOAD
PREHOOK: Output: default@lineitem
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
POSTHOOK: type: LOAD
POSTHOOK: Output: default@lineitem
PREHOOK: query: -- non agg, non corr
diff --git a/ql/src/test/results/clientpositive/subquery_notin.q.out b/ql/src/test/results/clientpositive/subquery_notin.q.out
index f80af93..bf87e3b 100644
--- a/ql/src/test/results/clientpositive/subquery_notin.q.out
+++ b/ql/src/test/results/clientpositive/subquery_notin.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: DROP TABLE lineitem
@@ -78,10 +78,10 @@
FIELDS TERMINATED BY '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@lineitem
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
PREHOOK: type: LOAD
PREHOOK: Output: default@lineitem
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem
POSTHOOK: type: LOAD
POSTHOOK: Output: default@lineitem
PREHOOK: query: -- non agg, non corr
diff --git a/ql/src/test/results/clientpositive/symlink_text_input_format.q.out b/ql/src/test/results/clientpositive/symlink_text_input_format.q.out
index eb15d88..849d915 100644
--- a/ql/src/test/results/clientpositive/symlink_text_input_format.q.out
+++ b/ql/src/test/results/clientpositive/symlink_text_input_format.q.out
@@ -1,3 +1,7 @@
+PREHOOK: query: DROP TABLE IF EXISTS symlink_text_input_format
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS symlink_text_input_format
+POSTHOOK: type: DROPTABLE
PREHOOK: query: EXPLAIN
CREATE TABLE symlink_text_input_format (key STRING, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
PREHOOK: type: CREATETABLE
@@ -241,3 +245,11 @@
POSTHOOK: Input: default@symlink_text_input_format
#### A masked pattern was here ####
16
+PREHOOK: query: DROP TABLE symlink_text_input_format
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@symlink_text_input_format
+PREHOOK: Output: default@symlink_text_input_format
+POSTHOOK: query: DROP TABLE symlink_text_input_format
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@symlink_text_input_format
+POSTHOOK: Output: default@symlink_text_input_format
diff --git a/ql/src/test/results/clientpositive/table_access_keys_stats.q.out b/ql/src/test/results/clientpositive/table_access_keys_stats.q.out
index 15ff151..0521251 100644
--- a/ql/src/test/results/clientpositive/table_access_keys_stats.q.out
+++ b/ql/src/test/results/clientpositive/table_access_keys_stats.q.out
@@ -2,7 +2,7 @@
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
diff --git a/ql/src/test/results/clientpositive/timestamp_null.q.out b/ql/src/test/results/clientpositive/timestamp_null.q.out
index d21b880..57269d7 100644
--- a/ql/src/test/results/clientpositive/timestamp_null.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_null.q.out
@@ -7,10 +7,10 @@
POSTHOOK: query: CREATE TABLE timestamp_null (t1 TIMESTAMP)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@timestamp_null
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE timestamp_null
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE timestamp_null
PREHOOK: type: LOAD
PREHOOK: Output: default@timestamp_null
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE timestamp_null
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/test.dat' OVERWRITE INTO TABLE timestamp_null
POSTHOOK: type: LOAD
POSTHOOK: Output: default@timestamp_null
PREHOOK: query: SELECT * FROM timestamp_null LIMIT 1
diff --git a/ql/src/test/results/clientpositive/truncate_table.q.out b/ql/src/test/results/clientpositive/truncate_table.q.out
index 8ef2dd5..d64ac18 100644
--- a/ql/src/test/results/clientpositive/truncate_table.q.out
+++ b/ql/src/test/results/clientpositive/truncate_table.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: create table src_truncate (key string, value string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@src_truncate
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table src_truncate
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table src_truncate
PREHOOK: type: LOAD
PREHOOK: Output: default@src_truncate
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table src_truncate
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table src_truncate
POSTHOOK: type: LOAD
POSTHOOK: Output: default@src_truncate
PREHOOK: query: create table srcpart_truncate (key string, value string) partitioned by (ds string, hr string)
@@ -42,28 +42,28 @@
POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@srcpart_truncate
POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11')
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11')
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12')
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12')
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11')
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11')
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
-PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12')
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12')
PREHOOK: type: LOAD
PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
-POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12')
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
PREHOOK: query: -- truncate non-partitioned table
diff --git a/ql/src/test/results/clientpositive/udaf_context_ngrams.q.out b/ql/src/test/results/clientpositive/udaf_context_ngrams.q.out
index 2759582..3ee8509 100644
--- a/ql/src/test/results/clientpositive/udaf_context_ngrams.q.out
+++ b/ql/src/test/results/clientpositive/udaf_context_ngrams.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE kafka (contents STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@kafka
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/text-en.txt' INTO TABLE kafka
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/text-en.txt' INTO TABLE kafka
PREHOOK: type: LOAD
PREHOOK: Output: default@kafka
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/text-en.txt' INTO TABLE kafka
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/text-en.txt' INTO TABLE kafka
POSTHOOK: type: LOAD
POSTHOOK: Output: default@kafka
PREHOOK: query: SELECT context_ngrams(sentences(lower(contents)), array(null), 100, 1000).estfrequency FROM kafka
diff --git a/ql/src/test/results/clientpositive/udaf_corr.q.out b/ql/src/test/results/clientpositive/udaf_corr.q.out
index 89dbfa9..5a73f2b 100644
--- a/ql/src/test/results/clientpositive/udaf_corr.q.out
+++ b/ql/src/test/results/clientpositive/udaf_corr.q.out
@@ -11,11 +11,11 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@covar_tab
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab
PREHOOK: type: LOAD
PREHOOK: Output: default@covar_tab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab
POSTHOOK: type: LOAD
POSTHOOK: Output: default@covar_tab
diff --git a/ql/src/test/results/clientpositive/udaf_covar_pop.q.out b/ql/src/test/results/clientpositive/udaf_covar_pop.q.out
index 6f02138..9b1f9ca 100644
--- a/ql/src/test/results/clientpositive/udaf_covar_pop.q.out
+++ b/ql/src/test/results/clientpositive/udaf_covar_pop.q.out
@@ -11,11 +11,11 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@covar_tab
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab
PREHOOK: type: LOAD
PREHOOK: Output: default@covar_tab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab
POSTHOOK: type: LOAD
POSTHOOK: Output: default@covar_tab
diff --git a/ql/src/test/results/clientpositive/udaf_covar_samp.q.out b/ql/src/test/results/clientpositive/udaf_covar_samp.q.out
index bfb425b..f053289 100644
--- a/ql/src/test/results/clientpositive/udaf_covar_samp.q.out
+++ b/ql/src/test/results/clientpositive/udaf_covar_samp.q.out
@@ -11,11 +11,11 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@covar_tab
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab
PREHOOK: type: LOAD
PREHOOK: Output: default@covar_tab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/covar_tab.txt' OVERWRITE
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/covar_tab.txt' OVERWRITE
INTO TABLE covar_tab
POSTHOOK: type: LOAD
POSTHOOK: Output: default@covar_tab
diff --git a/ql/src/test/results/clientpositive/udaf_ngrams.q.out b/ql/src/test/results/clientpositive/udaf_ngrams.q.out
index 87b1f0b..238a2e1 100644
--- a/ql/src/test/results/clientpositive/udaf_ngrams.q.out
+++ b/ql/src/test/results/clientpositive/udaf_ngrams.q.out
@@ -3,10 +3,10 @@
POSTHOOK: query: CREATE TABLE kafka (contents STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@kafka
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/text-en.txt' INTO TABLE kafka
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/text-en.txt' INTO TABLE kafka
PREHOOK: type: LOAD
PREHOOK: Output: default@kafka
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/text-en.txt' INTO TABLE kafka
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/text-en.txt' INTO TABLE kafka
POSTHOOK: type: LOAD
POSTHOOK: Output: default@kafka
PREHOOK: query: SELECT ngrams(sentences(lower(contents)), 1, 100, 1000).estfrequency FROM kafka
diff --git a/ql/src/test/results/clientpositive/udaf_percentile_approx_20.q.out b/ql/src/test/results/clientpositive/udaf_percentile_approx_20.q.out
index c4135d3..0d14e5e 100644
--- a/ql/src/test/results/clientpositive/udaf_percentile_approx_20.q.out
+++ b/ql/src/test/results/clientpositive/udaf_percentile_approx_20.q.out
@@ -7,28 +7,28 @@
CREATE TABLE bucket (key double, value string) CLUSTERED BY (key) SORTED BY (key DESC) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
PREHOOK: query: create table t1 (result double)
diff --git a/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out b/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out
index e638ffe..a63846a 100644
--- a/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out
+++ b/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out
@@ -9,28 +9,28 @@
CREATE TABLE bucket (key double, value string) CLUSTERED BY (key) SORTED BY (key DESC) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket2outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket3outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
-PREHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
+PREHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
PREHOOK: type: LOAD
PREHOOK: Output: default@bucket
-POSTHOOK: query: load data local inpath '../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
+POSTHOOK: query: load data local inpath '../../data/files/srcsortbucket4outof4.txt' INTO TABLE bucket
POSTHOOK: type: LOAD
POSTHOOK: Output: default@bucket
PREHOOK: query: create table t1 (result double)
diff --git a/ql/src/test/results/clientpositive/udf_field.q.out b/ql/src/test/results/clientpositive/udf_field.q.out
index de6f817..09ab1f9 100644
--- a/ql/src/test/results/clientpositive/udf_field.q.out
+++ b/ql/src/test/results/clientpositive/udf_field.q.out
@@ -73,10 +73,10 @@
POSTHOOK: query: CREATE TABLE test_table(col1 STRING, col2 STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@test_table
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table
PREHOOK: type: LOAD
PREHOOK: Output: default@test_table
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test_table
PREHOOK: query: select col1,col2,
@@ -116,10 +116,10 @@
POSTHOOK: query: CREATE TABLE test_table1(col1 int, col2 string) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@test_table1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table1
PREHOOK: type: LOAD
PREHOOK: Output: default@test_table1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@test_table1
PREHOOK: query: select col1,col2,
diff --git a/ql/src/test/results/clientpositive/udf_in_file.q.out b/ql/src/test/results/clientpositive/udf_in_file.q.out
index aff0cfe..abd6adc 100644
--- a/ql/src/test/results/clientpositive/udf_in_file.q.out
+++ b/ql/src/test/results/clientpositive/udf_in_file.q.out
@@ -4,19 +4,19 @@
POSTHOOK: type: DESCFUNCTION
in_file(str, filename) - Returns true if str appears in the file
PREHOOK: query: EXPLAIN
-SELECT in_file("303", "../data/files/test2.dat"),
- in_file("304", "../data/files/test2.dat"),
- in_file(CAST(NULL AS STRING), "../data/files/test2.dat")
+SELECT in_file("303", "../../data/files/test2.dat"),
+ in_file("304", "../../data/files/test2.dat"),
+ in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
FROM src LIMIT 1
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
-SELECT in_file("303", "../data/files/test2.dat"),
- in_file("304", "../data/files/test2.dat"),
- in_file(CAST(NULL AS STRING), "../data/files/test2.dat")
+SELECT in_file("303", "../../data/files/test2.dat"),
+ in_file("304", "../../data/files/test2.dat"),
+ in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
FROM src LIMIT 1
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION in_file "303" "../data/files/test2.dat")) (TOK_SELEXPR (TOK_FUNCTION in_file "304" "../data/files/test2.dat")) (TOK_SELEXPR (TOK_FUNCTION in_file (TOK_FUNCTION TOK_STRING TOK_NULL) "../data/files/test2.dat"))) (TOK_LIMIT 1)))
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION in_file "303" "../../data/files/test2.dat")) (TOK_SELEXPR (TOK_FUNCTION in_file "304" "../../data/files/test2.dat")) (TOK_SELEXPR (TOK_FUNCTION in_file (TOK_FUNCTION TOK_STRING TOK_NULL) "../../data/files/test2.dat"))) (TOK_LIMIT 1)))
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -31,11 +31,11 @@
alias: src
Select Operator
expressions:
- expr: in_file('303', '../data/files/test2.dat')
+ expr: in_file('303', '../../data/files/test2.dat')
type: boolean
- expr: in_file('304', '../data/files/test2.dat')
+ expr: in_file('304', '../../data/files/test2.dat')
type: boolean
- expr: in_file(UDFToString(null), '../data/files/test2.dat')
+ expr: in_file(UDFToString(null), '../../data/files/test2.dat')
type: boolean
outputColumnNames: _col0, _col1, _col2
Limit
@@ -52,16 +52,16 @@
limit: 1
-PREHOOK: query: SELECT in_file("303", "../data/files/test2.dat"),
- in_file("304", "../data/files/test2.dat"),
- in_file(CAST(NULL AS STRING), "../data/files/test2.dat")
+PREHOOK: query: SELECT in_file("303", "../../data/files/test2.dat"),
+ in_file("304", "../../data/files/test2.dat"),
+ in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
FROM src LIMIT 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
#### A masked pattern was here ####
-POSTHOOK: query: SELECT in_file("303", "../data/files/test2.dat"),
- in_file("304", "../data/files/test2.dat"),
- in_file(CAST(NULL AS STRING), "../data/files/test2.dat")
+POSTHOOK: query: SELECT in_file("303", "../../data/files/test2.dat"),
+ in_file("304", "../../data/files/test2.dat"),
+ in_file(CAST(NULL AS STRING), "../../data/files/test2.dat")
FROM src LIMIT 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
diff --git a/ql/src/test/results/clientpositive/udf_length.q.out b/ql/src/test/results/clientpositive/udf_length.q.out
index 691a1a8..f757c89 100644
--- a/ql/src/test/results/clientpositive/udf_length.q.out
+++ b/ql/src/test/results/clientpositive/udf_length.q.out
@@ -171,10 +171,10 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@dest1
POSTHOOK: Lineage: dest1.len EXPRESSION [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE dest1
PREHOOK: type: LOAD
PREHOOK: Output: default@dest1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE dest1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dest1
POSTHOOK: Lineage: dest1.len EXPRESSION [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
diff --git a/ql/src/test/results/clientpositive/udf_printf.q.out b/ql/src/test/results/clientpositive/udf_printf.q.out
index 9cc2158..0cd9df9 100644
--- a/ql/src/test/results/clientpositive/udf_printf.q.out
+++ b/ql/src/test/results/clientpositive/udf_printf.q.out
@@ -142,10 +142,10 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@binay_udf
POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE binay_udf
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/string.txt' INTO TABLE binay_udf
PREHOOK: type: LOAD
PREHOOK: Output: default@binay_udf
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE binay_udf
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/string.txt' INTO TABLE binay_udf
POSTHOOK: type: LOAD
POSTHOOK: Output: default@binay_udf
POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
diff --git a/ql/src/test/results/clientpositive/udf_reverse.q.out b/ql/src/test/results/clientpositive/udf_reverse.q.out
index 14b75b6..2cb886b 100644
--- a/ql/src/test/results/clientpositive/udf_reverse.q.out
+++ b/ql/src/test/results/clientpositive/udf_reverse.q.out
@@ -175,10 +175,10 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@dest1
POSTHOOK: Lineage: dest1.len EXPRESSION [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE dest1
PREHOOK: type: LOAD
PREHOOK: Output: default@dest1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE dest1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv4.txt' INTO TABLE dest1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dest1
POSTHOOK: Lineage: dest1.len EXPRESSION [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
diff --git a/ql/src/test/results/clientpositive/udf_sort_array.q.out b/ql/src/test/results/clientpositive/udf_sort_array.q.out
index d8316bd..4609a2a 100644
--- a/ql/src/test/results/clientpositive/udf_sort_array.q.out
+++ b/ql/src/test/results/clientpositive/udf_sort_array.q.out
@@ -127,10 +127,10 @@
) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@dest1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/primitive_type_arrays.txt' OVERWRITE INTO TABLE dest1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/primitive_type_arrays.txt' OVERWRITE INTO TABLE dest1
PREHOOK: type: LOAD
PREHOOK: Output: default@dest1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/primitive_type_arrays.txt' OVERWRITE INTO TABLE dest1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/primitive_type_arrays.txt' OVERWRITE INTO TABLE dest1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@dest1
PREHOOK: query: SELECT sort_array(tinyints), sort_array(smallints), sort_array(ints),
diff --git a/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out b/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
index 7ad34d4..df8d869 100644
--- a/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
@@ -14,10 +14,10 @@
POSTHOOK: query: create table oneline(key int, value string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@oneline
-PREHOOK: query: load data local inpath '../data/files/things.txt' into table oneline
+PREHOOK: query: load data local inpath '../../data/files/things.txt' into table oneline
PREHOOK: type: LOAD
PREHOOK: Output: default@oneline
-POSTHOOK: query: load data local inpath '../data/files/things.txt' into table oneline
+POSTHOOK: query: load data local inpath '../../data/files/things.txt' into table oneline
POSTHOOK: type: LOAD
POSTHOOK: Output: default@oneline
PREHOOK: query: SELECT
diff --git a/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out b/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
index cb47182..d2295bf 100644
--- a/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
@@ -14,10 +14,10 @@
POSTHOOK: query: create table oneline(key int, value string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@oneline
-PREHOOK: query: load data local inpath '../data/files/things.txt' into table oneline
+PREHOOK: query: load data local inpath '../../data/files/things.txt' into table oneline
PREHOOK: type: LOAD
PREHOOK: Output: default@oneline
-POSTHOOK: query: load data local inpath '../data/files/things.txt' into table oneline
+POSTHOOK: query: load data local inpath '../../data/files/things.txt' into table oneline
POSTHOOK: type: LOAD
POSTHOOK: Output: default@oneline
PREHOOK: query: SELECT
diff --git a/ql/src/test/results/clientpositive/udtf_posexplode.q.out b/ql/src/test/results/clientpositive/udtf_posexplode.q.out
index 78be2b9..54eb493 100644
--- a/ql/src/test/results/clientpositive/udtf_posexplode.q.out
+++ b/ql/src/test/results/clientpositive/udtf_posexplode.q.out
@@ -13,10 +13,10 @@
address STRUCT<street:STRING, city:STRING, state:STRING, zip:INT>)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@employees
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/posexplode_data.txt' INTO TABLE employees
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/posexplode_data.txt' INTO TABLE employees
PREHOOK: type: LOAD
PREHOOK: Output: default@employees
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/posexplode_data.txt' INTO TABLE employees
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/posexplode_data.txt' INTO TABLE employees
POSTHOOK: type: LOAD
POSTHOOK: Output: default@employees
PREHOOK: query: SELECT
diff --git a/ql/src/test/results/clientpositive/union.q.out b/ql/src/test/results/clientpositive/union.q.out
index 0945638..b0837ed 100644
--- a/ql/src/test/results/clientpositive/union.q.out
+++ b/ql/src/test/results/clientpositive/union.q.out
@@ -6,7 +6,7 @@
UNION ALL
FROM src SELECT src.* WHERE src.key > 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
PREHOOK: type: QUERY
POSTHOOK: query: -- union case: both subqueries are map jobs on same input, followed by filesink
@@ -16,10 +16,10 @@
UNION ALL
FROM src SELECT src.* WHERE src.key > 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) 100))))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/union.out')) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME unioninput))))))
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) 100))))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR 'target/warehouse/union.out')) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME unioninput))))))
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -106,7 +106,7 @@
Move Operator
files:
hdfs directory: true
- destination: ../build/ql/test/data/warehouse/union.out
+ destination: target/warehouse/union.out
Stage: Stage-2
Map Reduce
@@ -146,19 +146,19 @@
UNION ALL
FROM src SELECT src.* WHERE src.key > 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: ../build/ql/test/data/warehouse/union.out
+PREHOOK: Output: target/warehouse/union.out
POSTHOOK: query: FROM (
FROM src select src.key, src.value WHERE src.key < 100
UNION ALL
FROM src SELECT src.* WHERE src.key > 100
) unioninput
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/union.out' SELECT unioninput.*
+INSERT OVERWRITE DIRECTORY 'target/warehouse/union.out' SELECT unioninput.*
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: ../build/ql/test/data/warehouse/union.out
+POSTHOOK: Output: target/warehouse/union.out
238val_238
86val_86
311val_311
diff --git a/ql/src/test/results/clientpositive/union_date.q.out b/ql/src/test/results/clientpositive/union_date.q.out
index cf8ca44..92b94ad 100644
--- a/ql/src/test/results/clientpositive/union_date.q.out
+++ b/ql/src/test/results/clientpositive/union_date.q.out
@@ -40,16 +40,16 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@union_date_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_1
PREHOOK: type: LOAD
PREHOOK: Output: default@union_date_1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@union_date_1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_2
PREHOOK: type: LOAD
PREHOOK: Output: default@union_date_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_join.txt' OVERWRITE INTO TABLE union_date_2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@union_date_2
PREHOOK: query: select * from (
diff --git a/ql/src/test/results/clientpositive/union_remove_1.q.out b/ql/src/test/results/clientpositive/union_remove_1.q.out
index b28d891..785cd03 100644
--- a/ql/src/test/results/clientpositive/union_remove_1.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_1.q.out
@@ -28,10 +28,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_10.q.out b/ql/src/test/results/clientpositive/union_remove_10.q.out
index 2316641..25eeb50 100644
--- a/ql/src/test/results/clientpositive/union_remove_10.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_10.q.out
@@ -36,10 +36,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_11.q.out b/ql/src/test/results/clientpositive/union_remove_11.q.out
index abc28f1..2e579d4 100644
--- a/ql/src/test/results/clientpositive/union_remove_11.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_11.q.out
@@ -36,10 +36,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_12.q.out b/ql/src/test/results/clientpositive/union_remove_12.q.out
index 8debcc8..5794ae1 100644
--- a/ql/src/test/results/clientpositive/union_remove_12.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_12.q.out
@@ -34,10 +34,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_13.q.out b/ql/src/test/results/clientpositive/union_remove_13.q.out
index 6d35db4..581472a 100644
--- a/ql/src/test/results/clientpositive/union_remove_13.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_13.q.out
@@ -34,10 +34,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_14.q.out b/ql/src/test/results/clientpositive/union_remove_14.q.out
index 9c408e9..3537d7b 100644
--- a/ql/src/test/results/clientpositive/union_remove_14.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_14.q.out
@@ -36,10 +36,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_15.q.out b/ql/src/test/results/clientpositive/union_remove_15.q.out
index b036e9a..fbd76f8 100644
--- a/ql/src/test/results/clientpositive/union_remove_15.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_15.q.out
@@ -34,10 +34,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_16.q.out b/ql/src/test/results/clientpositive/union_remove_16.q.out
index 6483a6a..18edba7 100644
--- a/ql/src/test/results/clientpositive/union_remove_16.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_16.q.out
@@ -32,10 +32,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_17.q.out b/ql/src/test/results/clientpositive/union_remove_17.q.out
index 8d1ce63..73531d3 100644
--- a/ql/src/test/results/clientpositive/union_remove_17.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_17.q.out
@@ -28,10 +28,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_18.q.out b/ql/src/test/results/clientpositive/union_remove_18.q.out
index 7951eac..bc8046f 100644
--- a/ql/src/test/results/clientpositive/union_remove_18.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_18.q.out
@@ -32,10 +32,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) partitioned by (ds string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_19.q.out b/ql/src/test/results/clientpositive/union_remove_19.q.out
index 3e0c984..44eb72b 100644
--- a/ql/src/test/results/clientpositive/union_remove_19.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_19.q.out
@@ -28,10 +28,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_2.q.out b/ql/src/test/results/clientpositive/union_remove_2.q.out
index ec044cb..9157983 100644
--- a/ql/src/test/results/clientpositive/union_remove_2.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_2.q.out
@@ -30,10 +30,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_20.q.out b/ql/src/test/results/clientpositive/union_remove_20.q.out
index 22f97c0..decdbc4 100644
--- a/ql/src/test/results/clientpositive/union_remove_20.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_20.q.out
@@ -30,10 +30,10 @@
POSTHOOK: query: create table outputTbl1(values bigint, key string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_21.q.out b/ql/src/test/results/clientpositive/union_remove_21.q.out
index c2dde8c..4446085 100644
--- a/ql/src/test/results/clientpositive/union_remove_21.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_21.q.out
@@ -30,10 +30,10 @@
POSTHOOK: query: create table outputTbl1(key string) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_22.q.out b/ql/src/test/results/clientpositive/union_remove_22.q.out
index 2c77827..8faf0ba 100644
--- a/ql/src/test/results/clientpositive/union_remove_22.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_22.q.out
@@ -28,10 +28,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint, values2 bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_23.q.out b/ql/src/test/results/clientpositive/union_remove_23.q.out
index 28e2a1c..4ebbe22 100644
--- a/ql/src/test/results/clientpositive/union_remove_23.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_23.q.out
@@ -30,10 +30,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_24.q.out b/ql/src/test/results/clientpositive/union_remove_24.q.out
index 40199e3..6bc20d8 100644
--- a/ql/src/test/results/clientpositive/union_remove_24.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_24.q.out
@@ -26,10 +26,10 @@
POSTHOOK: query: create table outputTbl1(key double, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: EXPLAIN
diff --git a/ql/src/test/results/clientpositive/union_remove_3.q.out b/ql/src/test/results/clientpositive/union_remove_3.q.out
index 0860e75..f475aba 100644
--- a/ql/src/test/results/clientpositive/union_remove_3.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_3.q.out
@@ -30,10 +30,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_4.q.out b/ql/src/test/results/clientpositive/union_remove_4.q.out
index 34341f9..6b7437d 100644
--- a/ql/src/test/results/clientpositive/union_remove_4.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_4.q.out
@@ -28,10 +28,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_5.q.out b/ql/src/test/results/clientpositive/union_remove_5.q.out
index e367474..e5722f9 100644
--- a/ql/src/test/results/clientpositive/union_remove_5.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_5.q.out
@@ -32,10 +32,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_6.q.out b/ql/src/test/results/clientpositive/union_remove_6.q.out
index 259212b..eb1f204 100644
--- a/ql/src/test/results/clientpositive/union_remove_6.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_6.q.out
@@ -27,10 +27,10 @@
POSTHOOK: query: create table outputTbl2(key string, values bigint) stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl2
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_7.q.out b/ql/src/test/results/clientpositive/union_remove_7.q.out
index 81699f9..de53880 100644
--- a/ql/src/test/results/clientpositive/union_remove_7.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_7.q.out
@@ -32,10 +32,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_8.q.out b/ql/src/test/results/clientpositive/union_remove_8.q.out
index b999476..5e8807e 100644
--- a/ql/src/test/results/clientpositive/union_remove_8.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_8.q.out
@@ -34,10 +34,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/union_remove_9.q.out b/ql/src/test/results/clientpositive/union_remove_9.q.out
index 90d669b..b86eb02 100644
--- a/ql/src/test/results/clientpositive/union_remove_9.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_9.q.out
@@ -32,10 +32,10 @@
POSTHOOK: query: create table outputTbl1(key string, values bigint) stored as rcfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@outputTbl1
-PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+PREHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
PREHOOK: type: LOAD
PREHOOK: Output: default@inputtbl1
-POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1
+POSTHOOK: query: load data local inpath '../../data/files/T1.txt' into table inputTbl1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@inputtbl1
PREHOOK: query: explain
diff --git a/ql/src/test/results/clientpositive/uniquejoin.q.out b/ql/src/test/results/clientpositive/uniquejoin.q.out
index faf414d..00172b5 100644
--- a/ql/src/test/results/clientpositive/uniquejoin.q.out
+++ b/ql/src/test/results/clientpositive/uniquejoin.q.out
@@ -13,22 +13,22 @@
POSTHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@T3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
PREHOOK: type: LOAD
PREHOOK: Output: default@t1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
PREHOOK: type: LOAD
PREHOOK: Output: default@t2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
PREHOOK: type: LOAD
PREHOOK: Output: default@t3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T3.txt' INTO TABLE T3
POSTHOOK: type: LOAD
POSTHOOK: Output: default@t3
PREHOOK: query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key)
diff --git a/ql/src/test/results/clientpositive/varchar_1.q.out b/ql/src/test/results/clientpositive/varchar_1.q.out
index 6d5dc00..0757dff 100644
--- a/ql/src/test/results/clientpositive/varchar_1.q.out
+++ b/ql/src/test/results/clientpositive/varchar_1.q.out
@@ -17,11 +17,11 @@
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@varchar1_1
PREHOOK: query: -- load from file
-load data local inpath '../data/files/srcbucket0.txt' overwrite into table varchar1
+load data local inpath '../../data/files/srcbucket0.txt' overwrite into table varchar1
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar1
POSTHOOK: query: -- load from file
-load data local inpath '../data/files/srcbucket0.txt' overwrite into table varchar1
+load data local inpath '../../data/files/srcbucket0.txt' overwrite into table varchar1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar1
PREHOOK: query: select * from varchar1 order by key, value limit 2
diff --git a/ql/src/test/results/clientpositive/varchar_join1.q.out b/ql/src/test/results/clientpositive/varchar_join1.q.out
index b389de9..c99e8e4 100644
--- a/ql/src/test/results/clientpositive/varchar_join1.q.out
+++ b/ql/src/test/results/clientpositive/varchar_join1.q.out
@@ -43,22 +43,22 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@varchar_join1_str
-PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_join1_vc1
+PREHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_join1_vc1
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar_join1_vc1
-POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_join1_vc1
+POSTHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_join1_vc1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar_join1_vc1
-PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_join1_vc2
+PREHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_join1_vc2
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar_join1_vc2
-POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_join1_vc2
+POSTHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_join1_vc2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar_join1_vc2
-PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_join1_str
+PREHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_join1_str
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar_join1_str
-POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_join1_str
+POSTHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_join1_str
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar_join1_str
PREHOOK: query: -- Join varchar with same length varchar
diff --git a/ql/src/test/results/clientpositive/varchar_serde.q.out b/ql/src/test/results/clientpositive/varchar_serde.q.out
index 8ae974e..1cccd60 100644
--- a/ql/src/test/results/clientpositive/varchar_serde.q.out
+++ b/ql/src/test/results/clientpositive/varchar_serde.q.out
@@ -49,10 +49,10 @@
stored as textfile
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@varchar_serde_regex
-PREHOOK: query: load data local inpath '../data/files/srcbucket0.txt' overwrite into table varchar_serde_regex
+PREHOOK: query: load data local inpath '../../data/files/srcbucket0.txt' overwrite into table varchar_serde_regex
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar_serde_regex
-POSTHOOK: query: load data local inpath '../data/files/srcbucket0.txt' overwrite into table varchar_serde_regex
+POSTHOOK: query: load data local inpath '../../data/files/srcbucket0.txt' overwrite into table varchar_serde_regex
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar_serde_regex
PREHOOK: query: select * from varchar_serde_regex limit 5
diff --git a/ql/src/test/results/clientpositive/varchar_union1.q.out b/ql/src/test/results/clientpositive/varchar_union1.q.out
index 8041b7b..a591551 100644
--- a/ql/src/test/results/clientpositive/varchar_union1.q.out
+++ b/ql/src/test/results/clientpositive/varchar_union1.q.out
@@ -43,22 +43,22 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@varchar_union1_str
-PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc1
+PREHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_union1_vc1
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar_union1_vc1
-POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc1
+POSTHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_union1_vc1
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar_union1_vc1
-PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc2
+PREHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_union1_vc2
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar_union1_vc2
-POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc2
+POSTHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_union1_vc2
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar_union1_vc2
-PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_str
+PREHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_union1_str
PREHOOK: type: LOAD
PREHOOK: Output: default@varchar_union1_str
-POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_str
+POSTHOOK: query: load data local inpath '../../data/files/vc1.txt' into table varchar_union1_str
POSTHOOK: type: LOAD
POSTHOOK: Output: default@varchar_union1_str
PREHOOK: query: -- union varchar with same length varchar
diff --git a/ql/src/test/results/clientpositive/view.q.out b/ql/src/test/results/clientpositive/view.q.out
index 30d5b88..288ae47 100644
--- a/ql/src/test/results/clientpositive/view.q.out
+++ b/ql/src/test/results/clientpositive/view.q.out
@@ -13,11 +13,11 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: db1@table1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE table1
PREHOOK: type: LOAD
PREHOOK: Output: db1@table1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE table1
POSTHOOK: type: LOAD
POSTHOOK: Output: db1@table1
@@ -28,11 +28,11 @@
STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: db1@table2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE table2
PREHOOK: type: LOAD
PREHOOK: Output: db1@table2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt'
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt'
OVERWRITE INTO TABLE table2
POSTHOOK: type: LOAD
POSTHOOK: Output: db1@table2
diff --git a/ql/src/test/results/clientpositive/view_cast.q.out b/ql/src/test/results/clientpositive/view_cast.q.out
index f2c0afc..ed1e410 100644
--- a/ql/src/test/results/clientpositive/view_cast.q.out
+++ b/ql/src/test/results/clientpositive/view_cast.q.out
@@ -7,17 +7,17 @@
POSTHOOK: query: CREATE TABLE IF NOT EXISTS atab (ks_uid BIGINT, sr_uid STRING, sr_id STRING, tstamp STRING, m_id STRING, act STRING, at_sr_uid STRING, tstamp_type STRING, original_m_id STRING, original_tstamp STRING, registered_flag TINYINT, at_ks_uid BIGINT) PARTITIONED BY (dt STRING,nt STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@atab
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130312', nt='tw')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130312', nt='tw')
PREHOOK: type: LOAD
PREHOOK: Output: default@atab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130312', nt='tw')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130312', nt='tw')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@atab
POSTHOOK: Output: default@atab@dt=20130312/nt=tw
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130311', nt='tw')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130311', nt='tw')
PREHOOK: type: LOAD
PREHOOK: Output: default@atab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130311', nt='tw')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/v1.txt' INTO TABLE atab PARTITION (dt='20130311', nt='tw')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@atab
POSTHOOK: Output: default@atab@dt=20130311/nt=tw
@@ -30,10 +30,10 @@
POSTHOOK: query: CREATE TABLE mstab(ks_uid INT, csc INT) PARTITIONED BY (dt STRING)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@mstab
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/v2.txt' INTO TABLE mstab PARTITION (dt='20130311')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/v2.txt' INTO TABLE mstab PARTITION (dt='20130311')
PREHOOK: type: LOAD
PREHOOK: Output: default@mstab
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/v2.txt' INTO TABLE mstab PARTITION (dt='20130311')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/v2.txt' INTO TABLE mstab PARTITION (dt='20130311')
POSTHOOK: type: LOAD
POSTHOOK: Output: default@mstab
POSTHOOK: Output: default@mstab@dt=20130311
diff --git a/ql/src/test/results/clientpositive/windowing.q.out b/ql/src/test/results/clientpositive/windowing.q.out
index a1c445c..bbd169f 100644
--- a/ql/src/test/results/clientpositive/windowing.q.out
+++ b/ql/src/test/results/clientpositive/windowing.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: -- 1. testWindowing
diff --git a/ql/src/test/results/clientpositive/windowing_adjust_rowcontainer_sz.q.out b/ql/src/test/results/clientpositive/windowing_adjust_rowcontainer_sz.q.out
index b8c4a6a..0a15f68 100644
--- a/ql/src/test/results/clientpositive/windowing_adjust_rowcontainer_sz.q.out
+++ b/ql/src/test/results/clientpositive/windowing_adjust_rowcontainer_sz.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: select p_mfgr, p_name, p_size,
diff --git a/ql/src/test/results/clientpositive/windowing_columnPruning.q.out b/ql/src/test/results/clientpositive/windowing_columnPruning.q.out
index 5856417..91d6237 100644
--- a/ql/src/test/results/clientpositive/windowing_columnPruning.q.out
+++ b/ql/src/test/results/clientpositive/windowing_columnPruning.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: -- 1. testQueryLevelPartitionColsNotInSelect
diff --git a/ql/src/test/results/clientpositive/windowing_expressions.q.out b/ql/src/test/results/clientpositive/windowing_expressions.q.out
index 6b69f00..ce3d943 100644
--- a/ql/src/test/results/clientpositive/windowing_expressions.q.out
+++ b/ql/src/test/results/clientpositive/windowing_expressions.q.out
@@ -29,10 +29,10 @@
)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
PREHOOK: type: LOAD
PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
POSTHOOK: type: LOAD
POSTHOOK: Output: default@part
PREHOOK: query: drop table over10k
@@ -70,10 +70,10 @@
fields terminated by '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
PREHOOK: type: LOAD
PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
POSTHOOK: type: LOAD
POSTHOOK: Output: default@over10k
PREHOOK: query: select p_mfgr, p_retailprice, p_size,
diff --git a/ql/src/test/results/clientpositive/windowing_multipartitioning.q.out b/ql/src/test/results/clientpositive/windowing_multipartitioning.q.out
index 0c42817..6472921 100644
--- a/ql/src/test/results/clientpositive/windowing_multipartitioning.q.out
+++ b/ql/src/test/results/clientpositive/windowing_multipartitioning.q.out
@@ -33,10 +33,10 @@
fields terminated by '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
PREHOOK: type: LOAD
PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
POSTHOOK: type: LOAD
POSTHOOK: Output: default@over10k
PREHOOK: query: select s, rank() over (partition by s order by si), sum(b) over (partition by s order by si) from over10k limit 100
diff --git a/ql/src/test/results/clientpositive/windowing_navfn.q.out b/ql/src/test/results/clientpositive/windowing_navfn.q.out
index 5a8f845..4fff8fe 100644
--- a/ql/src/test/results/clientpositive/windowing_navfn.q.out
+++ b/ql/src/test/results/clientpositive/windowing_navfn.q.out
@@ -33,10 +33,10 @@
fields terminated by '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
PREHOOK: type: LOAD
PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
POSTHOOK: type: LOAD
POSTHOOK: Output: default@over10k
PREHOOK: query: select s, row_number() over (partition by d order by dec) from over10k limit 100
diff --git a/ql/src/test/results/clientpositive/windowing_ntile.q.out b/ql/src/test/results/clientpositive/windowing_ntile.q.out
index 7c1f7fc..7d95db7 100644
--- a/ql/src/test/results/clientpositive/windowing_ntile.q.out
+++ b/ql/src/test/results/clientpositive/windowing_ntile.q.out
@@ -33,10 +33,10 @@
fields terminated by '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
PREHOOK: type: LOAD
PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
POSTHOOK: type: LOAD
POSTHOOK: Output: default@over10k
PREHOOK: query: select i, ntile(10) over (partition by s order by i) from over10k limit 100
diff --git a/ql/src/test/results/clientpositive/windowing_rank.q.out b/ql/src/test/results/clientpositive/windowing_rank.q.out
index 4721a82..ac11897 100644
--- a/ql/src/test/results/clientpositive/windowing_rank.q.out
+++ b/ql/src/test/results/clientpositive/windowing_rank.q.out
@@ -33,10 +33,10 @@
fields terminated by '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
PREHOOK: type: LOAD
PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
POSTHOOK: type: LOAD
POSTHOOK: Output: default@over10k
PREHOOK: query: select s, rank() over (partition by f order by t) from over10k limit 100
diff --git a/ql/src/test/results/clientpositive/windowing_udaf.q.out b/ql/src/test/results/clientpositive/windowing_udaf.q.out
index 901229d..3231354 100644
--- a/ql/src/test/results/clientpositive/windowing_udaf.q.out
+++ b/ql/src/test/results/clientpositive/windowing_udaf.q.out
@@ -33,10 +33,10 @@
fields terminated by '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
PREHOOK: type: LOAD
PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
POSTHOOK: type: LOAD
POSTHOOK: Output: default@over10k
PREHOOK: query: select s, min(i) over (partition by s) from over10k limit 100
diff --git a/ql/src/test/results/clientpositive/windowing_windowspec.q.out b/ql/src/test/results/clientpositive/windowing_windowspec.q.out
index 09a0684..9d05548 100644
--- a/ql/src/test/results/clientpositive/windowing_windowspec.q.out
+++ b/ql/src/test/results/clientpositive/windowing_windowspec.q.out
@@ -33,10 +33,10 @@
fields terminated by '|'
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
PREHOOK: type: LOAD
PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../data/files/over10k' into table over10k
+POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
POSTHOOK: type: LOAD
POSTHOOK: Output: default@over10k
PREHOOK: query: select s, sum(b) over (partition by i order by s,b rows unbounded preceding) from over10k limit 100
diff --git a/serde/pom.xml b/serde/pom.xml
new file mode 100644
index 0000000..35bc8cd
--- /dev/null
+++ b/serde/pom.xml
@@ -0,0 +1,163 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-serde</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Serde</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-shims</artifactId>
+ <version>${project.version}</version>
+ <classifier>uberjar</classifier>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>${commons-codec.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.avro</groupId>
+ <artifactId>avro</artifactId>
+ <version>${avro.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/gen/protobuf/gen-java</source>
+ <source>src/gen/thrift/gen-javabean</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/service/pom.xml b/service/pom.xml
new file mode 100644
index 0000000..2282ce2
--- /dev/null
+++ b/service/pom.xml
@@ -0,0 +1,161 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-service</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Service</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>${commons-cli.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>${commons-io.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libfb303</artifactId>
+ <version>${libfb303.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>hadoop-1</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ <profile>
+ <id>hadoop-2</id>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </profile>
+ </profiles>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/model</source>
+ <source>src/gen/thrift/gen-javabean</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/service/src/test/org/apache/hive/service/auth/TestCustomAuthentication.java b/service/src/test/org/apache/hive/service/auth/TestCustomAuthentication.java
index 8db6313..f3fa1c7 100644
--- a/service/src/test/org/apache/hive/service/auth/TestCustomAuthentication.java
+++ b/service/src/test/org/apache/hive/service/auth/TestCustomAuthentication.java
@@ -26,102 +26,72 @@
import org.junit.Test;
import javax.security.sasl.AuthenticationException;
+import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.sql.Connection;
import java.sql.DriverManager;
+import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
public class TestCustomAuthentication {
- private static HiveServer2 hiveserver2 = null;
-
- private static File configFile = null;
+ private static HiveServer2 hiveserver2;
+ private static HiveConf hiveConf;
+ private static byte[] hiveConfBackup;
@BeforeClass
public static void setUp() throws Exception {
- createConfig();
- startServer();
- }
-
- @AfterClass
- public static void tearDown() throws Exception {
- stopServer();
- removeConfig();
- }
-
- private static void startServer() throws Exception{
-
- HiveConf hiveConf = new HiveConf();
+ hiveConf = new HiveConf();
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ hiveConf.writeXml(baos);
+ baos.close();
+ hiveConfBackup = baos.toByteArray();
+ hiveConf.set("hive.server2.authentication", "CUSTOM");
+ hiveConf.set("hive.server2.custom.authentication.class",
+ "org.apache.hive.service.auth.TestCustomAuthentication$SimpleAuthenticationProviderImpl");
+ FileOutputStream fos = new FileOutputStream(new File(hiveConf.getHiveSiteLocation().toURI()));
+ hiveConf.writeXml(fos);
+ fos.close();
hiveserver2 = new HiveServer2();
hiveserver2.init(hiveConf);
hiveserver2.start();
Thread.sleep(1000);
System.out.println("hiveServer2 start ......");
-
}
- private static void stopServer(){
- try {
- if (hiveserver2 != null) {
- hiveserver2.stop();
- hiveserver2 = null;
- }
- Thread.sleep(1000);
- } catch (Exception e) {
- e.printStackTrace();
+ @AfterClass
+ public static void tearDown() throws Exception {
+ if(hiveConf != null && hiveConfBackup != null) {
+ FileOutputStream fos = new FileOutputStream(new File(hiveConf.getHiveSiteLocation().toURI()));
+ fos.write(hiveConfBackup);
+ fos.close();
}
+ if (hiveserver2 != null) {
+ hiveserver2.stop();
+ hiveserver2 = null;
+ }
+ Thread.sleep(1000);
System.out.println("hiveServer2 stop ......");
}
- private static void createConfig() throws Exception{
-
- Configuration conf = new Configuration(false);
- conf.set("hive.server2.authentication", "CUSTOM");
- conf.set("hive.server2.custom.authentication.class",
- "org.apache.hive.service.auth.TestCustomAuthentication$SimpleAuthenticationProviderImpl");
-
- configFile = new File("../build/service/test/resources","hive-site.xml");
-
- FileOutputStream out = new FileOutputStream(configFile);
- conf.writeXml(out);
- }
-
- private static void removeConfig(){
- try {
- configFile.delete();
- } catch (Exception e){
- System.out.println(e.getMessage());
- }
- }
-
@Test
- public void testCustomAuthentication() throws Exception{
+ public void testCustomAuthentication() throws Exception {
String url = "jdbc:hive2://localhost:10000/default";
+ Class.forName("org.apache.hive.jdbc.HiveDriver");
- Exception exception = null;
- try{
- Class.forName("org.apache.hive.jdbc.HiveDriver");
- Connection connection = DriverManager.getConnection(url, "wronguser", "pwd");
- connection.close();
- } catch (Exception e){
- exception = e;
+ try {
+ DriverManager.getConnection(url, "wronguser", "pwd");
+ Assert.fail("Expected Exception");
+ } catch(SQLException e) {
+ Assert.assertNotNull(e.getMessage());
+ Assert.assertTrue(e.getMessage(), e.getMessage().contains("Peer indicated failure: Error validating the login"));
}
- Assert.assertNotNull(exception);
-
- exception = null;
- try{
- Class.forName("org.apache.hive.jdbc.HiveDriver");
- Connection connection = DriverManager.getConnection(url, "hiveuser", "hive");
- connection.close();
- } catch (Exception e){
- exception = e;
- }
-
- Assert.assertNull(exception);
+ Connection connection = DriverManager.getConnection(url, "hiveuser", "hive");
+ connection.close();
System.out.println(">>> PASSED testCustomAuthentication");
}
@@ -131,6 +101,7 @@ public static class SimpleAuthenticationProviderImpl implements PasswdAuthentica
private Map<String, String> userMap = new HashMap<String, String>();
public SimpleAuthenticationProviderImpl() {
+ Thread.dumpStack();
init();
}
diff --git a/shims/0.20/pom.xml b/shims/0.20/pom.xml
new file mode 100644
index 0000000..9117824
--- /dev/null
+++ b/shims/0.20/pom.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-0.20</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Shims 0.20</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-tools</artifactId>
+ <version>${hadoop-20.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/shims/0.20S/pom.xml b/shims/0.20S/pom.xml
new file mode 100644
index 0000000..58bc6f4
--- /dev/null
+++ b/shims/0.20S/pom.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-0.20S</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Shims 0.20S</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common-secure</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-test</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml
new file mode 100644
index 0000000..8e2c4e9
--- /dev/null
+++ b/shims/0.23/pom.xml
@@ -0,0 +1,91 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-0.23</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Shims 0.23</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common-secure</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <type>test-jar</type>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <type>test-jar</type>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <version>${jetty.version}</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/shims/common-secure/pom.xml b/shims/common-secure/pom.xml
new file mode 100644
index 0000000..12c102c
--- /dev/null
+++ b/shims/common-secure/pom.xml
@@ -0,0 +1,87 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common-secure</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Shims Secure Common</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- intra-project -->
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>${commons-codec.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commons-lang.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-tools</artifactId>
+ <version>${hadoop-20S.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>${zookeeper.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/shims/common/pom.xml b/shims/common/pom.xml
new file mode 100644
index 0000000..5871b6a
--- /dev/null
+++ b/shims/common/pom.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../../pom.xml</relativePath>
+ </parent>
+
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Shims Common</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>${log4j.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoop-20.version}</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/shims/pom.xml b/shims/pom.xml
new file mode 100644
index 0000000..d06173b
--- /dev/null
+++ b/shims/pom.xml
@@ -0,0 +1,112 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-shims</artifactId>
+ <packaging>pom</packaging>
+ <name>Hive Shims</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <modules>
+ <module>common</module>
+ <module>0.20</module>
+ <module>common-secure</module>
+ <module>0.20S</module>
+ <module>0.23</module>
+ </modules>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-0.20</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-common-secure</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-0.20S</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive.shims</groupId>
+ <artifactId>hive-shims-0.23</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <configuration>
+ <descriptors>
+ <descriptor>src/assembly/uberjar.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <executions>
+ <execution>
+ <phase>compile</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>attach-artifacts</id>
+ <phase>package</phase>
+ <goals>
+ <goal>attach-artifact</goal>
+ </goals>
+ <configuration>
+ <artifacts>
+ <artifact>
+ <file>target/hive-shims-${project.version}-uberjar.jar</file>
+ <type>jar</type>
+ </artifact>
+ </artifacts>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index d36587d..1e0114d 100644
--- a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -88,7 +88,6 @@ public String getTaskAttemptLogUrl(JobConf conf,
@Override
public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
- JobTrackerState state;
switch (clusterStatus.getJobTrackerStatus()) {
case INITIALIZING:
return JobTrackerState.INITIALIZING;
@@ -217,10 +216,7 @@ public void shutdown() throws IOException {
public void setupConfiguration(Configuration conf) {
JobConf jConf = mr.createJobConf();
for (Map.Entry<String, String> pair: jConf) {
- //System.out.println("XXX Var: "+pair.getKey() +"="+pair.getValue());
- //if (conf.get(pair.getKey()) == null) {
- conf.set(pair.getKey(), pair.getValue());
- //}
+ conf.set(pair.getKey(), pair.getValue());
}
}
}
diff --git a/shims/src/assembly/uberjar.xml b/shims/src/assembly/uberjar.xml
new file mode 100644
index 0000000..67ce50b
--- /dev/null
+++ b/shims/src/assembly/uberjar.xml
@@ -0,0 +1,19 @@
+<assembly>
+ <id>uberjar</id>
+ <formats>
+ <format>jar</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <dependencySets>
+ <dependencySet>
+ <unpack>true</unpack>
+ <scope>compile</scope>
+ <useProjectArtifact>false</useProjectArtifact>
+ <useTransitiveDependencies>false</useTransitiveDependencies>
+ <excludes>
+ <exclude>org.slf4j:slf4j-api</exclude>
+ <exclude>org.slf4j:slf4j-log4j12</exclude>
+ </excludes>
+ </dependencySet>
+ </dependencySets>
+</assembly>
diff --git a/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java b/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
index c5ae55f..8737da8 100644
--- a/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
+++ b/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
@@ -46,7 +46,7 @@ public class TestZooKeeperTokenStore extends TestCase {
@Override
protected void setUp() throws Exception {
- File zkDataDir = new File(System.getProperty("java.io.tmpdir"));
+ File zkDataDir = new File(System.getProperty("test.tmp.dir"));
if (this.zkCluster != null) {
throw new IOException("Cluster already running");
}
diff --git a/testutils/build b/testutils/build
deleted file mode 100755
index 3165292..0000000
--- a/testutils/build
+++ /dev/null
@@ -1,4 +0,0 @@
-cd ql
-ant build-grammar
-cd ..
-ant
diff --git a/testutils/pom.xml b/testutils/pom.xml
new file mode 100644
index 0000000..59b1dcd
--- /dev/null
+++ b/testutils/pom.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive</artifactId>
+ <version>0.13.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-testutils</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive TestUtils</name>
+
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
+ <dependencies>
+ <!-- inter-project -->
+ <dependency>
+ <groupId>com.google.code.tempus-fugit</groupId>
+ <artifactId>tempus-fugit</artifactId>
+ <version>${tempus-fugit.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <sourceDirectory>${basedir}/src/java</sourceDirectory>
+ </build>
+
+</project>