Merge pull request #1 from agozhiy/resurrection

Resurrection
diff --git a/.gitignore b/.gitignore
index 7e5cfd7..8ef36d7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,6 +21,7 @@
 framework/resources/Datasources/s3minio/minio/minio_data/.minio.sys
 framework/resources/Datasources/s3minio/minio/minio_data/tstbucket/tmp/ppruning/
 framework/resources/Datasources/s3minio/minio/minio_data/tstbucket/tmp/gitignore
+drill-output/
 
 # Hive stuff
 metastore_db
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 09634b4..7e0e4ea 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -85,6 +85,18 @@
  }
 </code></pre>
 
+The data described in "datasources" section is prepared in several stages. The stages are defined by "mode" field and executed in the following order:  
+- **rm** - remove data specified in "dest" field
+- **cp** - copy data from "src" in local file system to "dest" in the target fs
+- **mkdir** - make directory specified in "dest"
+- **gen** - execute command specified in "src". In case of a file with .ddl extension it will be send to Drill through JDBC
+- **post_rm** - the same as "rm" but executed later
+- **post_cp** - the same as "cp" but executed later
+- **dfs_cp** - copy data between dfs locations
+- **ddl** - executes specified ddl file through JDBC
+
+Note that local paths are based on DRILL_TEST_DATA_DIR property (by default it is framework/resources), and all dfs paths are based on DRILL_TESTDATA property (by default it is /drill/testdata)  
+
 ### Special handling for Drill version in regex based baseline verification
 
 The Drill version in a regex based baseline file can be substituted with "{DRILL_VERSION}". 
diff --git a/README.md b/README.md
index 9a7f2d9..01bcb38 100644
--- a/README.md
+++ b/README.md
@@ -36,11 +36,25 @@
 
 If you've already downloaded the datasets previously, you can simply skip the download.
 
+If you're going to run tests in docker, you'll need to build the docker image first:
+
+```
+mvn clean install docker:build -DskipTests
+```
+
 ## Execute Tests
 
 In the root directory of your repository, execute the following command to run tests:
 
-`bin/run_tests -s <suites> -g <groups> -t <Timeout> -x <Exclude> -n <Concurrency> -d`
+```
+bin/run_tests -s <suites> -g <groups> -t <Timeout> -x <Exclude> -n <Concurrency> -d`
+```
+
+Alternatively, you can run tests in docker:
+
+```
+mvn docker:start -Dtest.args="-s <suites> -g <groups> -t <Timeout> -x <Exclude> -n <Concurrency> -d"
+```
 
 Example:
  <pre><code> bin/run_tests -s Functional/aggregates,Functional/joins -g functional -x hbase -t 180 -n 20 -d
@@ -57,7 +71,8 @@
   -n concurrency (optional)
      Here, '20' queries can execute concurrently
   -x exclude dependencies (optional)
-     Here, any 'hbase' test suites within the specified directory are excluded
+     Here, any test suites within the specified directory that have specified dependencies are excluded.
+     In case of "-x all" all tests suites that have at least one dependency are excluded
   -h help (optional)
      Use this option to provide the usage of the command, which includes additional options
 </code></pre>
diff --git a/bin/run_tests b/bin/run_tests
index 0186f96..1a37cb1 100755
--- a/bin/run_tests
+++ b/bin/run_tests
@@ -5,6 +5,7 @@
 cd ${bin}/../
 
 default_debug_string="-Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,address=50000,server=y,suspend=y"
+JDBC_DRIVER_CP="${DRILL_HOME}/jars/jdbc-driver/drill-jdbc-all-${DRILL_VERSION}.jar"
 
 ARGS=()
 while [[ $# > 0 ]]
@@ -20,6 +21,11 @@
     DRILL_TEST_FRAMEWORK_JAVA_OPTS="$DRILL_TEST_FRAMEWORK_JAVA_OPTS $default_debug_string"
     shift
     ;;
+  --jdbcCP)
+    shift
+    JDBC_DRIVER_CP=$1
+    shift
+    ;;
   *)
     ARGS+=("$1")
     shift
@@ -34,7 +40,7 @@
 # use the following line when testing apache drill JDBC driver
 if [[ $HADOOP_VERSION == *"mapr"* ]]
 then
-  java $DRILL_TEST_FRAMEWORK_JAVA_OPTS -Xss40m -cp .:conf/:${DRILL_HOME}/jars/jdbc-driver/drill-jdbc-all-${DRILL_VERSION}.jar:framework/target/framework-1.0.0-SNAPSHOT-shaded.jar -Dfs.mapr.bailout.on.library.mismatch=false -Djava.io.tmpdir=/tmp/drill/tests -Djava.security.auth.login.config=/opt/mapr/conf/mapr.login.conf -Dzookeeper.sasl.client=false org.apache.drill.test.framework.TestDriver "${ARGS[@]}"
+  java $DRILL_TEST_FRAMEWORK_JAVA_OPTS -Xss40m -cp .:conf/:${JDBC_DRIVER_CP}:framework/target/framework-1.0.0-SNAPSHOT-shaded.jar -Dfs.mapr.bailout.on.library.mismatch=false -Djava.io.tmpdir=/tmp/drill/tests -Djava.security.auth.login.config=/opt/mapr/conf/mapr.login.conf -Dzookeeper.sasl.client=false org.apache.drill.test.framework.TestDriver "${ARGS[@]}"
 else
-  java $DRILL_TEST_FRAMEWORK_JAVA_OPTS -Xss40m -cp .:conf/:${DRILL_HOME}/jars/jdbc-driver/drill-jdbc-all-${DRILL_VERSION}.jar:framework/target/framework-1.0.0-SNAPSHOT-shaded.jar org.apache.drill.test.framework.TestDriver "${ARGS[@]}"
+  java $DRILL_TEST_FRAMEWORK_JAVA_OPTS -Xss40m -cp .:conf/:${JDBC_DRIVER_CP}:framework/target/framework-1.0.0-SNAPSHOT-shaded.jar org.apache.drill.test.framework.TestDriver "${ARGS[@]}"
 fi
diff --git a/conf/logback.xml b/conf/logback.xml
new file mode 100644
index 0000000..91004fb
--- /dev/null
+++ b/conf/logback.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="UTF-8" ?>

+<!--

+

+    Licensed to the Apache Software Foundation (ASF) under one

+    or more contributor license agreements.  See the NOTICE file

+    distributed with this work for additional information

+    regarding copyright ownership.  The ASF licenses this file

+    to you under the Apache License, Version 2.0 (the

+    "License"); you may not use this file except in compliance

+    with the License.  You may obtain a copy of the License at

+

+    http://www.apache.org/licenses/LICENSE-2.0

+

+    Unless required by applicable law or agreed to in writing, software

+    distributed under the License is distributed on an "AS IS" BASIS,

+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+    See the License for the specific language governing permissions and

+    limitations under the License.

+

+-->

+<configuration>

+

+  <!-- Direct log messages to ./drill.log.timestamp file -->

+  <appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">

+      <fileNamePattern>logs/drill.log.%d{yyyy-MM-dd-HH}</fileNamePattern>

+    </rollingPolicy>

+    <encoder>

+      <pattern>%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n</pattern>

+    </encoder>

+  </appender>

+

+  <!-- Direct log messages to stdout -->

+  <appender name="Drill" class="ch.qos.logback.core.ConsoleAppender">

+    <Target>System.out</Target>

+    <encoder>

+      <pattern>%m%n</pattern>

+    </encoder>

+  </appender>

+

+  <!-- Direct log messages to stdout -->

+  <appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">

+    <Target>System.out</Target>

+    <encoder>

+      <pattern>%m%n</pattern>

+    </encoder>

+  </appender>

+

+

+  <logger name="org.apache.zookeeper" level="WARN"/>

+

+  <logger name="DrillTestLogger" additivity="false" level="INFO">

+    <appender-ref ref="file"/>

+    <appender-ref ref="Drill"/>

+  </logger>

+

+  <logger name="org.apache.curator" level="WARN"/>

+

+  <!-- Root logger option -->

+  <root level="ERROR">

+    <appender-ref ref="stdout"/>

+  </root>

+

+</configuration>

diff --git a/framework/pom.xml b/framework/pom.xml
index 018766a..5bc6536 100644
--- a/framework/pom.xml
+++ b/framework/pom.xml
@@ -7,12 +7,27 @@
   <version>1.0.0-SNAPSHOT</version>
   <properties>
     <testng.version>6.4</testng.version>
-    <log4j.version>1.2.17</log4j.version>
+    <slf4j.version>1.7.26</slf4j.version>
+    <logback.version>1.2.9</logback.version>
     <jackson.version>2.10.0</jackson.version>
-    <drill.version>${env.DRILL_VERSION}</drill.version>
-    <hadoop.version>${env.HADOOP_VERSION}</hadoop.version>
-    <runtimeDepenencies>${env.DRILL_HOME}/conf</runtimeDepenencies>
+    <guava.version>30.1.1-jre</guava.version>
+    <drill.version>2.0.0-SNAPSHOT</drill.version>
+    <hadoop.version>3.2.2</hadoop.version>
+    <drill.test.framework.root>${project.basedir}/..</drill.test.framework.root>
+    <drill.home>${project.basedir}/../../drill</drill.home>
+    <drill.distribution>${drill.home}/distribution/target/apache-drill-${drill.version}/apache-drill-${drill.version}/</drill.distribution>
+    <drill.jdbc.driver.path>jars/jdbc-driver/drill-jdbc-all-${drill.version}.jar</drill.jdbc.driver.path>
+    <drill.java.exec.path>jars/drill-java-exec-${drill.version}.jar</drill.java.exec.path>
+    <m2.home>${env.HOME}/.m2</m2.home>
+    <runtimeDepenencies>${drill.distribution}/conf</runtimeDepenencies>
     <maven.surefire.plugin.version>2.19.1</maven.surefire.plugin.version>
+    <!-- Docker Plugin Properties -->
+    <maven.docker.plugin.version>0.38.1</maven.docker.plugin.version>
+    <docker.base.image>maven:3.8.4-jdk-11</docker.base.image>
+    <docker.automation.root>/root/testAutomation</docker.automation.root>
+    <docker.drill.test.framework.home>${docker.automation.root}/drill-test-framework</docker.drill.test.framework.home>
+    <docker.drill.home>${docker.automation.root}/apache-drill-${drill.version}</docker.drill.home>
+    <test.args>-s Functional -g empty</test.args>
   </properties>
   <dependencies>
     <dependency>
@@ -24,21 +39,46 @@
       <groupId>org.ojai</groupId>
       <artifactId>ojai</artifactId>
       <version>1.1</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-api</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
       <version>3.4.14</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-log4j12</artifactId>
+        </exclusion>
+        <exclusion>
+          <artifactId>log4j</artifactId>
+          <groupId>log4j</groupId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <version>${log4j.version}</version>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
     </dependency>
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>apache-log4j-extras</artifactId>
-      <version>${log4j.version}</version>
+      <groupId>ch.qos.logback</groupId>
+      <artifactId>logback-classic</artifactId>
+      <version>${logback.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>ch.qos.logback</groupId>
+      <artifactId>logback-core</artifactId>
+      <version>${logback.version}</version>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -58,7 +98,12 @@
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
-      <version>16.0.1</version>
+      <version>${guava.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -74,6 +119,10 @@
           <artifactId>slf4j-log4j12</artifactId>
         </exclusion>
         <exclusion>
+          <artifactId>log4j</artifactId>
+          <groupId>log4j</groupId>
+        </exclusion>
+        <exclusion>
           <groupId>org.codehaus.jackson</groupId>
           <artifactId>jackson-mapper-asl</artifactId>
         </exclusion>
@@ -92,7 +141,14 @@
       <groupId>org.apache.drill.exec</groupId>
       <artifactId>drill-jdbc-all</artifactId>
       <version>${drill.version}</version>
-      <systemPath>${env.DRILL_HOME}/jars/jdbc-driver/drill-jdbc-all-${drill.version}.jar</systemPath>
+      <systemPath>${drill.distribution}/${drill.jdbc.driver.path}</systemPath>
+      <scope>system</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>drill-java-exec</artifactId>
+      <version>${drill.version}</version>
+      <systemPath>${drill.distribution}/${drill.java.exec.path}</systemPath>
       <scope>system</scope>
     </dependency>
     <dependency>
@@ -141,7 +197,7 @@
       <id>mapr-releases</id>
       <url>http://repository.mapr.com/maven/</url>
       <releases>
-        <enabled>true</enabled>
+        <enabled>false</enabled>
       </releases>
       <snapshots>
         <enabled>false</enabled>
@@ -151,6 +207,61 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>io.fabric8</groupId>
+        <artifactId>docker-maven-plugin</artifactId>
+        <version>${maven.docker.plugin.version}</version>
+        <configuration>
+          <showLogs>true</showLogs>
+          <verbose>true</verbose>
+          <images>
+            
+          </images>
+        </configuration>
+        <!--<executions>
+          <execution>
+            <phase>install</phase>
+            <goals>
+              <goal>build</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>start</id>
+            <phase>test</phase>
+            <goals>
+              <goal>run</goal>
+              <goal>stop</goal>
+            </goals>
+          </execution>
+        </executions>-->
+      </plugin>
+      <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-shade-plugin</artifactId>
         <version>3.1.0</version>
diff --git a/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl b/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl
index 0ff2614..5e69767 100644
--- a/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl
+++ b/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl
@@ -1,4 +1,4 @@
-CREATE TABLE `complex.json` AS 
+CREATE TABLE dfs.drillTestDirComplexParquet.`complex.json` AS
   SELECT id, 
          gbyi, 
          gbyt, 
diff --git a/framework/resources/Datasources/ctas/create_tables_parquet.ddl b/framework/resources/Datasources/ctas/create_tables_parquet.ddl
index 6814a19..bc187ee 100644
--- a/framework/resources/Datasources/ctas/create_tables_parquet.ddl
+++ b/framework/resources/Datasources/ctas/create_tables_parquet.ddl
@@ -1,6 +1,6 @@
 alter session set `store.format` = 'parquet';
 
-create table ctas_t1(c1, c2, c3, c4) as
+create table dfs.ctas_parquet.ctas_t1(c1, c2, c3, c4) as
 select  j1.c_varchar,
         j2.c_varchar,
         j1.c_date,
@@ -12,7 +12,7 @@
         and j1.c_timestamp IS NOT NULL
 ;
 
-create table ctas_t2(c1, c2) as  
+create table dfs.ctas_parquet.ctas_t2(c1, c2) as
 select  
         j1.c_bigint,
         coalesce(j3.c_bigint, -1000)
@@ -23,19 +23,19 @@
         and j1.d9 > 0
 ;      
 
-create table ctas_t3 as
+create table dfs.ctas_parquet.ctas_t3 as
 select  j4.c_varchar || j2.c_varchar as c1
 from    
         dfs.`ctas`.`j4` right outer join dfs.`ctas`.`j2` on (j2.c_integer = j4.c_integer)
 ;  
 
-create table ctas_t4 as
+create table dfs.ctas_parquet.ctas_t4 as
 select  concat(j4.c_varchar, j2.c_varchar) as c1
 from    
         dfs.`ctas`.`j4` right outer join dfs.`ctas`.`j2` on (j2.c_integer = j4.c_integer)
 ;   
 
-create table ctas_t5(count_star, max_j1_c_int, min_j2_c_int, avg_j1_c_int, avg_j2_c_int) as
+create table dfs.ctas_parquet.ctas_t5(count_star, max_j1_c_int, min_j2_c_int, avg_j1_c_int, avg_j2_c_int) as
 select  
         count(*),
         max(j1.c_integer),
@@ -48,11 +48,11 @@
         j1.c_boolean is false
 ;
 
-/* create table ctas_t6(c1,c2,c3,c4,c5) as select c_integer, c_bigint, c_date, c_time, c_varchar from dfs.`ctas`.`j4` where c_bigint is null; */
+-- create table ctas_t6(c1,c2,c3,c4,c5) as select c_integer, c_bigint, c_date, c_time, c_varchar from dfs.`ctas`.`j4` where c_bigint is null;
 
-create table ctas_t8(c1) as select distinct c_integer from ( select c_integer from dfs.`ctas`.`j1` union all select c_integer from dfs.`ctas`.`j2`) as xyz;
+create table dfs.ctas_parquet.ctas_t8(c1) as select distinct c_integer from ( select c_integer from dfs.`ctas`.`j1` union all select c_integer from dfs.`ctas`.`j2`) as xyz;
 
-create table ctas_t9 as
+create table dfs.ctas_parquet.ctas_t9 as
 select  c_integer          as c1,
         count(c_date)      as c2 
 from    
@@ -63,7 +63,7 @@
         c_integer;
 
 
-create table ctas_t10 (c1, c2) as
+create table dfs.ctas_parquet.ctas_t10 (c1, c2) as
 select  
         count(distinct c_integer),
         sum(c_integer)
@@ -74,7 +74,7 @@
 order by 
         c_date;
 
-create table ctas_t11 as 
+create table dfs.ctas_parquet.ctas_t11 as
 select  
         count(distinct d9) as count_distinct_d9, 
         sum(d9)            as sum_d9 
@@ -92,7 +92,7 @@
         c_date,
         sum(d9) desc nulls first;
 
-create table ctas_t12 (c1, c2, c3) as
+create table dfs.ctas_parquet.ctas_t12 (c1, c2, c3) as
 select
         count(distinct c_bigint) ,
         sum(c_bigint)            ,
@@ -106,7 +106,7 @@
         2
 ;
 
-create table ctas_t13 (c1, c2)  as
+create table dfs.ctas_parquet.ctas_t13 (c1, c2)  as
 select  
         count(distinct d18),
         sum(d18) 
@@ -122,7 +122,7 @@
 group by 
         c_varchar;
 
-create table ctas_t14(x, y) as 
+create table dfs.ctas_parquet.ctas_t14(x, y) as
 select  
         c_date,
         count(distinct c_time)
@@ -133,7 +133,7 @@
 order by 
         c_date limit 10 offset 900;
 
-create table ctas_t15 as 
+create table dfs.ctas_parquet.ctas_t15 as
 select  
         count(distinct c_integer) as count_distinct_1,
         count(distinct c_varchar) as count_distinct_2,
diff --git a/framework/resources/Datasources/ctas/decimal/fragments/decimal_fragments.sh b/framework/resources/Datasources/ctas/decimal/fragments/decimal_fragments.sh
index 2aa524b..4fa6175 100755
--- a/framework/resources/Datasources/ctas/decimal/fragments/decimal_fragments.sh
+++ b/framework/resources/Datasources/ctas/decimal/fragments/decimal_fragments.sh
@@ -1,19 +1,6 @@
 #!/bin/bash
-source conf/drillTestConfig.properties
+DRILL_TEST_DATA_DIR=$1
 
 tar -xzmvf ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_big.tar.gz -C ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments
 tar -xzmvf ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_big_zero_prec.tar.gz -C ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments
 tar -xzmvf ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_big_zero_scale.tar.gz -C ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments
-
-hadoop fs -mkdir -p /drill/testdata/decimal/fragments
-
-hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_big.tsv /drill/testdata/decimal/fragments
-hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_big_zero_prec.tsv /drill/testdata/decimal/fragments
-hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_big_zero_scale.tsv /drill/testdata/decimal/fragments
-
-if [ -z "$PASSWORD" ]
-then
-  ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.drillTestDir;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_fragments.ddl
-else
-  ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.drillTestDir;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/decimal/fragments/decimal_fragments.ddl
-fi
diff --git a/framework/resources/Datasources/ctas_auto_partition/create_tables.ddl b/framework/resources/Datasources/ctas_auto_partition/create_tables.ddl
index 33a3a2a..0ec6eca 100644
--- a/framework/resources/Datasources/ctas_auto_partition/create_tables.ddl
+++ b/framework/resources/Datasources/ctas_auto_partition/create_tables.ddl
@@ -1,8 +1,8 @@
-create table `tpch_single_partition/lineitem` partition by (l_moddate) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 l_moddate from cp.`tpch/lineitem.parquet` l;
-create table `tpch_single_partition/orders` partition by (o_decimal) as select o.*, cast(case when o_orderpriority = '5-LOW' then 2.8 else 2 end as decimal(5,2)) o_decimal from cp.`tpch/orders.parquet` o;
-create table `tpch_single_partition/partsupp` partition by (ps_partkey_int) as select ps.*, cast(ps_partkey as int) ps_partkey_int from cp.`tpch/partsupp.parquet` ps;
-create table `tpch_single_partition/part` partition by (p_mfgr) as select * from cp.`tpch/part.parquet`;
-create table `tpch_single_partition/supplier` partition by (dbl_nationkey) as select s.*, cast(s_nationkey as double) dbl_nationkey from cp.`tpch/supplier.parquet` s;
-create table `tpch_single_partition/nation` partition by (n_rgnkey_float) as select n.*, cast(n_regionkey as float) n_rgnkey_float from cp.`tpch/nation.parquet` n;
-create table `tpch_single_partition/region` partition by (r_regionkey_bigint) as select r.*, cast(r_regionkey+10000000000 as bigint) r_regionkey_bigint from cp.`tpch/region.parquet` r;
-create table `tpch_single_partition/customer` partition by (binary_mktsegment) as select c.*, encode(c_mktsegment, 'UTF-8') binary_mktsegment from cp.`tpch/customer.parquet` c;
+create table dfs.ctasAutoPartition.`tpch_single_partition/lineitem` partition by (l_moddate) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 l_moddate from cp.`tpch/lineitem.parquet` l;
+create table dfs.ctasAutoPartition.`tpch_single_partition/orders` partition by (o_decimal) as select o.*, cast(case when o_orderpriority = '5-LOW' then 2.8 else 2 end as decimal(5,2)) o_decimal from cp.`tpch/orders.parquet` o;
+create table dfs.ctasAutoPartition.`tpch_single_partition/partsupp` partition by (ps_partkey_int) as select ps.*, cast(ps_partkey as int) ps_partkey_int from cp.`tpch/partsupp.parquet` ps;
+create table dfs.ctasAutoPartition.`tpch_single_partition/part` partition by (p_mfgr) as select * from cp.`tpch/part.parquet`;
+create table dfs.ctasAutoPartition.`tpch_single_partition/supplier` partition by (dbl_nationkey) as select s.*, cast(s_nationkey as double) dbl_nationkey from cp.`tpch/supplier.parquet` s;
+create table dfs.ctasAutoPartition.`tpch_single_partition/nation` partition by (n_rgnkey_float) as select n.*, cast(n_regionkey as float) n_rgnkey_float from cp.`tpch/nation.parquet` n;
+create table dfs.ctasAutoPartition.`tpch_single_partition/region` partition by (r_regionkey_bigint) as select r.*, cast(r_regionkey+10000000000 as bigint) r_regionkey_bigint from cp.`tpch/region.parquet` r;
+create table dfs.ctasAutoPartition.`tpch_single_partition/customer` partition by (binary_mktsegment) as select c.*, encode(c_mktsegment, 'UTF-8') binary_mktsegment from cp.`tpch/customer.parquet` c;
diff --git a/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl b/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl
index 77c3fec..d90a6c9 100644
--- a/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl
+++ b/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl
@@ -1,5 +1,5 @@
-create table `existing_partition_pruning/lineitempart` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/lineitempart`;
-create table `existing_partition_pruning/lineitem` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/lineitem`;
-create table `existing_partition_pruning/lineitem_hierarchical_intstring` partition by (dir0, dir1) as select * from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring`;
-create table `existing_partition_pruning/ordersjson` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/ordersjson`;
-create table `existing_partition_pruning/orders` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/orders`;
+create table dfs.ctasAutoPartition.`existing_partition_pruning/lineitempart` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/lineitempart`;
+create table dfs.ctasAutoPartition.`existing_partition_pruning/lineitem` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/lineitem`;
+create table dfs.ctasAutoPartition.`existing_partition_pruning/lineitem_hierarchical_intstring` partition by (dir0, dir1) as select * from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring`;
+create table dfs.ctasAutoPartition.`existing_partition_pruning/ordersjson` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/ordersjson`;
+create table dfs.ctasAutoPartition.`existing_partition_pruning/orders` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/orders`;
diff --git a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl
index d890e70..8f83b94 100644
--- a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl
+++ b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl
@@ -1,5 +1,5 @@
-create table `tpch_multiple_partitions/lineitem_twopart` partition by (l_modline, l_moddate) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l;
-create table `tpch_multiple_partitions/lineitem_twopart_ordered` partition by (l_modline, l_moddate) as select l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l order by l_moddate;
-create table `tpch_multiple_partitions/lineitem_twopart_ordered2` partition by (l_modline, l_moddate) as select l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l order by l_modline, l_moddate;
-create table `tpch_multiple_partitions/lineitem_twopart_ordered3` partition by (l_modline, l_moddate) as select l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l order by l_modline;
-create table `tpch_multiple_partitions/lineitem_twopart_zero` partition by (l_modline, l_moddate) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l where 1=0;
+create table dfs.ctasAutoPartition.`tpch_multiple_partitions/lineitem_twopart` partition by (l_modline, l_moddate) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l;
+create table dfs.ctasAutoPartition.`tpch_multiple_partitions/lineitem_twopart_ordered` partition by (l_modline, l_moddate) as select l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l order by l_moddate;
+create table dfs.ctasAutoPartition.`tpch_multiple_partitions/lineitem_twopart_ordered2` partition by (l_modline, l_moddate) as select l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l order by l_modline, l_moddate;
+create table dfs.ctasAutoPartition.`tpch_multiple_partitions/lineitem_twopart_ordered3` partition by (l_modline, l_moddate) as select l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l order by l_modline;
+create table dfs.ctasAutoPartition.`tpch_multiple_partitions/lineitem_twopart_zero` partition by (l_modline, l_moddate) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 l_moddate, mod(l_linenumber,3) l_modline from cp.`tpch/lineitem.parquet` l where 1=0;
diff --git a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl
index a9559c6..b47e911 100644
--- a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl
+++ b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl
@@ -1,11 +1,11 @@
-create table `tpch_single_partition1/lineitem` partition by (c1) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 + time '05:30:44.800' as c1 from cp.`tpch/lineitem.parquet` l;
-create table `tpch_single_partition1/orders` partition by (c1) as SELECT o.*, case when `year`(o_orderdate) > 1995 then interval '1 2' day to hour else interval '1 19:30:45' day to second end as c1 from cp.`tpch/orders.parquet` o;
-create table `tpch_single_partition1/partsupp`  partition by (c1) as select ps.*, case when ps_partkey > 1000 then interval '1-2' year to month else interval '1' year end as c1 from cp.`tpch/partsupp.parquet` ps;
-create table `tpch_single_partition1/part` partition by (p_mfgr) as select * from cp.`tpch/part.parquet`;
-create table `tpch_single_partition1/supplier` partition by (dbl_nationkey) as select s.*, cast(s_nationkey as double) dbl_nationkey from cp.`tpch/supplier.parquet` s;
-create table `tpch_single_partition1/nation` partition by (n_regionkey) as select * from cp.`tpch/nation.parquet` n;
-create table `tpch_single_partition1/region` partition by (c1) as select r.*, case when r_regionkey < 3 then time '12:12:12.12' else time '14:13:12.00' end c1 from cp.`tpch/region.parquet` r;
-create table `tpch_single_partition1/customer` partition by (c_bool) as select c.*, case when c_mktsegment = 'MACHINERY' then true else false end as c_bool from cp.`tpch/customer.parquet` c;
-create table `tpch_single_partition1/drill4449` partition by (l_discount) as select * from cp.`tpch/lineitem.parquet`;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/lineitem` partition by (c1) as select l.*, l_shipdate - extract(day from l_shipdate) + 1 + time '05:30:44.800' as c1 from cp.`tpch/lineitem.parquet` l;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/orders` partition by (c1) as SELECT o.*, case when `year`(o_orderdate) > 1995 then interval '1 2' day to hour else interval '1 19:30:45' day to second end as c1 from cp.`tpch/orders.parquet` o;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/partsupp`  partition by (c1) as select ps.*, case when ps_partkey > 1000 then interval '1-2' year to month else interval '1' year end as c1 from cp.`tpch/partsupp.parquet` ps;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/part` partition by (p_mfgr) as select * from cp.`tpch/part.parquet`;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/supplier` partition by (dbl_nationkey) as select s.*, cast(s_nationkey as double) dbl_nationkey from cp.`tpch/supplier.parquet` s;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/nation` partition by (n_regionkey) as select * from cp.`tpch/nation.parquet` n;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/region` partition by (c1) as select r.*, case when r_regionkey < 3 then time '12:12:12.12' else time '14:13:12.00' end c1 from cp.`tpch/region.parquet` r;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/customer` partition by (c_bool) as select c.*, case when c_mktsegment = 'MACHINERY' then true else false end as c_bool from cp.`tpch/customer.parquet` c;
+create table dfs.ctasAutoPartition.`tpch_single_partition1/drill4449` partition by (l_discount) as select * from cp.`tpch/lineitem.parquet`;
 
-refresh table metadata `tpch_single_partition1/drill4449`;
+refresh table metadata dfs.ctasAutoPartition.`tpch_single_partition1/drill4449`;
diff --git a/framework/resources/Datasources/join/crt_tbl_prtnby_nulls.ddl b/framework/resources/Datasources/join/crt_tbl_prtnby_nulls.ddl
index 172342a..9bb818b 100644
--- a/framework/resources/Datasources/join/crt_tbl_prtnby_nulls.ddl
+++ b/framework/resources/Datasources/join/crt_tbl_prtnby_nulls.ddl
@@ -1,6 +1,6 @@
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_chr_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_chr_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_chr_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_chr_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -9,11 +9,11 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         CASE when columns[3] = '' THEN CAST(null AS char(1)) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
 
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_vrchr_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_vrchr_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_vrchr_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_vrchr_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -22,11 +22,11 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         cASe when columns[3] = '' THEN CAST(null AS VARCHAR(2)) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
 
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_int_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_int_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_int_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_int_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -35,11 +35,11 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         cASe when columns[3] = '' THEN CAST(null AS INTEGER) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
 
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_dbl_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_dbl_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_dbl_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_dbl_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -48,11 +48,11 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         cASe when columns[3] = '' THEN CAST(null AS DOUBLE) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
 
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_date_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_date_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_date_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_date_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -61,11 +61,11 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         cASe when columns[3] = '' THEN CAST(null AS DATE) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
 
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_time_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_time_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_time_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_time_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -74,11 +74,11 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         cASe when columns[3] = '' THEN CAST(null AS TIME) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
 
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_tmstmp_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_tmstmp_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_tmstmp_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_tmstmp_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -87,11 +87,11 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         cASe when columns[3] = '' THEN CAST(null AS TIMESTAMP) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
 
-DROP TABLE IF EXISTS tbl_prtnby_all_nulls_intrvlday_col;
+DROP TABLE IF EXISTS dfs.`Join`.tbl_prtnby_all_nulls_intrvlday_col;
 
-CREATE TABLE tbl_prtnby_all_nulls_intrvlday_col
+CREATE TABLE dfs.`Join`.tbl_prtnby_all_nulls_intrvlday_col
 PARTITION BY (c4) 
 AS
 SELECT
@@ -100,4 +100,4 @@
         CAST(columns[2] AS VARCHAR(65)) c3,
         cASe when columns[3] = '' THEN CAST(null AS INTERVAL DAY) END c4,
         CAST(columns[4] AS DATE) c5
-FROM `allNullsInCol.csv`;
+FROM dfs.`Join`.`allNullsInCol.csv`;
diff --git a/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl b/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl
index ef5317d..f0566bd 100644
--- a/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl
+++ b/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl
@@ -1,22 +1,22 @@
-DROP TABLE IF EXISTS l_tblprtnby_intcl;
-CREATE TABLE l_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_chrcl;
-CREATE TABLE l_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_vrchrcl1;
-CREATE TABLE l_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_vrchrcl2;
-CREATE TABLE l_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_dtcl;
-CREATE TABLE l_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_timcl;
-CREATE TABLE l_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_tmstmpcl;
-CREATE TABLE l_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_fltcl;
-CREATE TABLE l_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_intrvlyrcl;
-CREATE TABLE l_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_intrvldycl;
-CREATE TABLE l_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
-DROP TABLE IF EXISTS l_tblprtnby_blncl;
-CREATE TABLE l_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_intcl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_chrcl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_vrchrcl1;
+CREATE TABLE dfs.`Join`.l_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_vrchrcl2;
+CREATE TABLE dfs.`Join`.l_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_dtcl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_timcl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_tmstmpcl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_fltcl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_intrvlyrcl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_intrvldycl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
+DROP TABLE IF EXISTS dfs.`Join`.l_tblprtnby_blncl;
+CREATE TABLE dfs.`Join`.l_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`;
diff --git a/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl b/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl
index b63e2f6..f9b64d6 100644
--- a/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl
+++ b/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl
@@ -1,22 +1,22 @@
-DROP TABLE IF EXISTS r_tblprtnby_intcl;
-CREATE TABLE r_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_chrcl;
-CREATE TABLE r_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_vrchrcl1;
-CREATE TABLE r_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_vrchrcl2;
-CREATE TABLE r_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_dtcl;
-CREATE TABLE r_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_timcl;
-CREATE TABLE r_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_tmstmpcl;
-CREATE TABLE r_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_fltcl;
-CREATE TABLE r_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_intrvlyrcl;
-CREATE TABLE r_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_intrvldycl;
-CREATE TABLE r_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
-DROP TABLE IF EXISTS r_tblprtnby_blncl;
-CREATE TABLE r_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_intcl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_chrcl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_vrchrcl1;
+CREATE TABLE dfs.`Join`.r_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_vrchrcl2;
+CREATE TABLE dfs.`Join`.r_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_dtcl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_timcl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_tmstmpcl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_fltcl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_intrvlyrcl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_intrvldycl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
+DROP TABLE IF EXISTS dfs.`Join`.r_tblprtnby_blncl;
+CREATE TABLE dfs.`Join`.r_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`;
diff --git a/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql b/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql
index 6e5d306..584d123 100644
--- a/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql
+++ b/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql
@@ -1,3 +1,3 @@
-create or replace view cast_tbl_1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs.joins`.cast_tbl_1;
+create or replace view dfs.joins.cast_tbl_1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs.joins`.cast_tbl_1;
 
-create or replace view cast_tbl_2_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs.joins`.cast_tbl_2;
+create or replace view dfs.joins.cast_tbl_2_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs.joins`.cast_tbl_2;
diff --git a/framework/resources/Datasources/min_max_dir/scripts/refresh_metadata.ddl b/framework/resources/Datasources/min_max_dir/scripts/refresh_metadata.ddl
index 8e9c395..ad16c7b 100755
--- a/framework/resources/Datasources/min_max_dir/scripts/refresh_metadata.ddl
+++ b/framework/resources/Datasources/min_max_dir/scripts/refresh_metadata.ddl
@@ -1 +1 @@
-refresh table metadata min_max_dir_metadatacache;
+refresh table metadata dfs.drillTestDir.min_max_dir_metadatacache;
diff --git a/framework/resources/Datasources/schema_change_empty_batch/json/setup.sh b/framework/resources/Datasources/schema_change_empty_batch/json/setup.sh
index 5fb583e..e1ea2ec 100755
--- a/framework/resources/Datasources/schema_change_empty_batch/json/setup.sh
+++ b/framework/resources/Datasources/schema_change_empty_batch/json/setup.sh
@@ -1,5 +1,5 @@
 #!/bin/bash
-source conf/drillTestConfig.properties
+DRILL_TEST_DATA_DIR=$1
 
 set -x
 set -e
@@ -39,21 +39,4 @@
 
 fi
 
-if ! $(hadoop fs -test -d ${DRILL_TESTDATA}/schema_change_empty_batch/json)
-    then
-
-        echo "Copying to hadoop"
-
-        hadoop fs -mkdir -p ${DRILL_TESTDATA}/schema_change_empty_batch/json
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/json/part ${DRILL_TESTDATA}/schema_change_empty_batch/json/
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/json/partsupp ${DRILL_TESTDATA}/schema_change_empty_batch/json/
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/json/empty ${DRILL_TESTDATA}/schema_change_empty_batch/json/
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/json/part_small ${DRILL_TESTDATA}/schema_change_empty_batch/json/
-
-fi
-
 set +x
diff --git a/framework/resources/Datasources/schema_change_empty_batch/text/dfs/setup.sh b/framework/resources/Datasources/schema_change_empty_batch/text/dfs/setup.sh
index 0836dfe..5fca382 100755
--- a/framework/resources/Datasources/schema_change_empty_batch/text/dfs/setup.sh
+++ b/framework/resources/Datasources/schema_change_empty_batch/text/dfs/setup.sh
@@ -1,5 +1,5 @@
 #!/bin/bash
-source conf/drillTestConfig.properties
+DRILL_TEST_DATA_DIR=$1
 
 set -x
 set -e
@@ -35,24 +35,7 @@
 
         touch ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/psv/partsupp/partsuppa{f..h}.tbl
 
-        touch ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/psv/empty/empty{a..d}.tbl
-
-fi
-
-if ! $(hadoop fs -test -d ${DRILL_TESTDATA}/schema_change_empty_batch/psv)
-    then
-
-        echo "Copying to hadoop"
-
-        hadoop fs -mkdir -p ${DRILL_TESTDATA}/schema_change_empty_batch/psv
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/psv/part ${DRILL_TESTDATA}/schema_change_empty_batch/psv/
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/psv/partsupp ${DRILL_TESTDATA}/schema_change_empty_batch/psv/
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/psv/empty ${DRILL_TESTDATA}/schema_change_empty_batch/psv/
-
-        hadoop fs -put ${DRILL_TEST_DATA_DIR}/Datasources/schema_change_empty_batch/data/psv/json_field ${DRILL_TESTDATA}/schema_change_empty_batch/psv/
+        touch ${DRILL_TEST_DATA_DIR}/Datasources/spsv/json_field/empty_json_field.tblchema_change_empty_batch/data/psv/empty/empty{a..d}.tbl
 
 fi
 
diff --git a/framework/resources/Datasources/subqueries/create_tables.sql b/framework/resources/Datasources/subqueries/create_tables.sql
index 6456459..25ee260 100644
--- a/framework/resources/Datasources/subqueries/create_tables.sql
+++ b/framework/resources/Datasources/subqueries/create_tables.sql
@@ -1,4 +1,4 @@
 alter session set `store.parquet.reader.int96_as_timestamp`=true;
-create table hive1_parquet_part partition by (create_timestamp1) as select * from hive1_parquet limit 10;
+create table dfs.subqueries.hive1_parquet_part partition by (create_timestamp1) as select * from dfs.subqueries.hive1_parquet limit 10;
 alter session set `store.parquet.reader.int96_as_timestamp`=false;
-refresh table metadata `hive1_parquet_part`;
+refresh table metadata dfs.subqueries.`hive1_parquet_part`;
diff --git a/framework/resources/Datasources/subqueries/create_views.sql b/framework/resources/Datasources/subqueries/create_views.sql
index d5d5951..28e2e71 100644
--- a/framework/resources/Datasources/subqueries/create_views.sql
+++ b/framework/resources/Datasources/subqueries/create_views.sql
@@ -1,4 +1,4 @@
-create or replace view optional_type_v as
+create or replace view dfs.subqueries.optional_type_v as
 select
         cast(c_varchar as varchar(100))		as c_varchar,
         cast(c_integer as integer)		as c_integer,
@@ -10,10 +10,10 @@
         cast(c_timestamp as timestamp)		as c_timestamp,
         cast(c_boolean as boolean)		as c_boolean
 from
-        j1
+    dfs.subqueries.j1
 ;
 
-create or replace view required_type_v as
+create or replace view dfs.subqueries.required_type_v as
 select
         cast(c_varchar as varchar(100))		as c_varchar,
         cast(c_integer as integer)		as c_integer,
@@ -25,5 +25,5 @@
         cast(c_timestamp as timestamp)		as c_timestamp,
         cast(c_boolean as boolean)		as c_boolean
 from
-        j3
+    dfs.subqueries.j3
 ;
diff --git a/framework/resources/Datasources/table_function/DRILL-5166_generate_data.sh b/framework/resources/Datasources/table_function/DRILL-5166_generate_data.sh
index 901bd21..9f4d4de 100755
--- a/framework/resources/Datasources/table_function/DRILL-5166_generate_data.sh
+++ b/framework/resources/Datasources/table_function/DRILL-5166_generate_data.sh
@@ -1,8 +1,7 @@
 #!/bin/bash
-source conf/drillTestConfig.properties
+DRILL_TEST_DATA_DIR=$1
 
 test_dir=${DRILL_TEST_DATA_DIR}/Datasources/table_function
-dfs_location=${DRILL_TESTDATA}/table_function
 
 untar_data () {
     local location=$1
@@ -12,37 +11,21 @@
     tar xzvf $location/$tar_name -C $location ./$file_name
 }
 
-copy_to_dfs () {
-    local location=$1
-    local file_name=$2
-    local file=$location/$file_name
-    local destination=$3
-
-    hadoop fs -test -f $destination/$file_name
-    if [ $? -eq 0 ]
-    then
-	    hadoop fs -rm -r $destination/$file_name
-    fi
-    hadoop fs -put $file $destination/
-}
-
 prepare_dataset () {
     local location=$1
     local file_name=$2
     local file=$test_dir/$file_name
     local tar_name=$3
-    local destination=$4
 
     # Reusing of existing file if exists
     if [ ! -f $file ]
     then
         untar_data $location $tar_name $file_name
-        copy_to_dfs $location $file_name $destination        
     fi
 }
 
 base_file_name="DRILL-5166_NPE_with_table_function"
 tar_name="DRILL-5166_data.tar.gz"
-prepare_dataset $test_dir ${base_file_name}_group_select_1.csv $tar_name $dfs_location
-prepare_dataset $test_dir ${base_file_name}_group_select_2.csv $tar_name $dfs_location
-prepare_dataset $test_dir ${base_file_name}_large_file.csv $tar_name $dfs_location
+prepare_dataset $test_dir ${base_file_name}_group_select_1.csv $tar_name
+prepare_dataset $test_dir ${base_file_name}_group_select_2.csv $tar_name
+prepare_dataset $test_dir ${base_file_name}_large_file.csv $tar_name
diff --git a/framework/resources/Datasources/table_stats/analyze_tables.sql b/framework/resources/Datasources/table_stats/analyze_tables.sql
index bf1c723..3079f3a 100755
--- a/framework/resources/Datasources/table_stats/analyze_tables.sql
+++ b/framework/resources/Datasources/table_stats/analyze_tables.sql
@@ -1,4 +1,5 @@
 set `store.parquet.reader.int96_as_timestamp`=true;
+use dfs.drillTestDir;
 analyze table `table_stats/alltypes_with_nulls` compute statistics;
 analyze table `table_stats/date_intervals` compute statistics;
 analyze table `table_stats/impala_parquet` compute statistics;
diff --git a/framework/resources/Datasources/tpcds/createViewsParquet.sql b/framework/resources/Datasources/tpcds/createViewsParquet.sql
index 76feaee..6c13527 100755
--- a/framework/resources/Datasources/tpcds/createViewsParquet.sql
+++ b/framework/resources/Datasources/tpcds/createViewsParquet.sql
@@ -1,6 +1,6 @@
 use dfs.tpcds_sf1_parquet_views;
 
-create or replace view customer as select
+create or replace view dfs.tpcds_sf1_parquet_views.customer as select
 cast(c_customer_sk as integer)  as c_customer_sk,
 cast(c_customer_id as varchar(200))  as c_customer_id,
 cast(c_current_cdemo_sk as integer)  as c_current_cdemo_sk,
@@ -21,7 +21,7 @@
 cast(c_last_review_date as varchar(200))  as c_last_review_date
 from dfs.`/drill/testdata/tpcds_sf1/parquet/customer`;
 
-create or replace view customer_address as select
+create or replace view dfs.tpcds_sf1_parquet_views.customer_address as select
 cast(ca_address_sk as integer)   as ca_address_sk,
 cast(ca_address_id as varchar(200))  as ca_address_id,
 cast(ca_street_number as varchar(200))  as ca_street_number,
@@ -37,7 +37,7 @@
 cast(ca_location_type as varchar(200))  as ca_location_type
 from dfs.`/drill/testdata/tpcds_sf1/parquet/customer_address`;
 
-create or replace view customer_demographics as select
+create or replace view dfs.tpcds_sf1_parquet_views.customer_demographics as select
 cast( cd_demo_sk as integer)  as cd_demo_sk,
 cast( cd_gender as varchar(200))  as cd_gender ,
 cast( cd_marital_status as varchar(200))  as cd_marital_status,
@@ -49,7 +49,7 @@
 cast( cd_dep_college_count as integer)  as cd_dep_college_count
 from dfs.`/drill/testdata/tpcds_sf1/parquet/customer_demographics`;
 
-create or replace view household_demographics as select
+create or replace view dfs.tpcds_sf1_parquet_views.household_demographics as select
 cast( hd_demo_sk as integer)  as hd_demo_sk,
 cast( hd_income_band_sk as integer)   as hd_income_band_sk,
 cast( hd_buy_potential as varchar(200))  as hd_buy_potential,
@@ -57,7 +57,7 @@
 cast( hd_vehicle_count as integer)   as hd_vehicle_count
 from dfs.`/drill/testdata/tpcds_sf1/parquet/household_demographics`;
 
-create or replace view item as select
+create or replace view dfs.tpcds_sf1_parquet_views.item as select
 cast( i_item_sk as integer)  as i_item_sk,
 cast( i_item_id as varchar(200))   as i_item_id,
 cast( i_rec_start_date as date)  as i_rec_start_date,
@@ -82,7 +82,7 @@
 cast( i_product_name as varchar(200))   as i_product_name
 from dfs.`/drill/testdata/tpcds_sf1/parquet/item`;
 
-create or replace view promotion as select
+create or replace view dfs.tpcds_sf1_parquet_views.promotion as select
 cast( p_promo_sk as integer)  as p_promo_sk,
 cast( p_promo_id as varchar(200))  as p_promo_id,
 cast( p_start_date_sk as integer)  as p_start_date_sk,
@@ -104,7 +104,7 @@
 cast( p_discount_active as varchar(200))  as p_discount_active
 from dfs.`/drill/testdata/tpcds_sf1/parquet/promotion`;
 
-create or replace view time_dim as select
+create or replace view dfs.tpcds_sf1_parquet_views.time_dim as select
 cast( t_time_sk as integer)  as t_time_sk,
 cast( t_time_id as varchar(200))  as t_time_id,
 cast( t_time as integer)  as t_time,
@@ -117,7 +117,7 @@
 cast( t_meal_time as varchar(200))  as t_meal_time
 from dfs.`/drill/testdata/tpcds_sf1/parquet/time_dim`;
 
-create or replace view date_dim as select
+create or replace view dfs.tpcds_sf1_parquet_views.date_dim as select
 cast( d_date_sk as integer)  as d_date_sk,
 cast( d_date_id as varchar(200))  as d_date_id,
 cast( d_date as date)  as d_date,
@@ -148,7 +148,7 @@
 cast( d_current_year as varchar(200))  as d_current_year
 from dfs.`/drill/testdata/tpcds_sf1/parquet/date_dim`;
 
-create or replace view store as select
+create or replace view dfs.tpcds_sf1_parquet_views.store as select
 cast( s_store_sk as integer)  as s_store_sk,
 cast( s_store_id as varchar(200))  as s_store_id,
 cast( s_rec_start_date as date)  as s_rec_start_date,
@@ -180,7 +180,7 @@
 cast( s_tax_precentage as double)  as s_tax_precentage
 from dfs.`/drill/testdata/tpcds_sf1/parquet/store`;
 
-create or replace view store_sales as select
+create or replace view dfs.tpcds_sf1_parquet_views.store_sales as select
 cast( ss_sold_date_sk as integer)  as ss_sold_date_sk,
 cast( ss_sold_time_sk as integer)  as ss_sold_time_sk,
 cast( ss_item_sk as integer)  as ss_item_sk,
@@ -206,7 +206,7 @@
 cast( ss_net_profit as double)  as ss_net_profit
 from dfs.`/drill/testdata/tpcds_sf1/parquet/store_sales`;
 
-create or replace view warehouse as select
+create or replace view dfs.tpcds_sf1_parquet_views.warehouse as select
 cast( w_warehouse_sk as integer)  as w_warehouse_sk,
 cast( w_warehouse_id as varchar(200))  as w_warehouse_id,
 cast( w_warehouse_name as varchar(200))  as w_warehouse_name,
@@ -223,7 +223,7 @@
 cast( w_gmt_offset as double)  as w_gmt_offset
 from dfs.`/drill/testdata/tpcds_sf1/parquet/warehouse`;
 
-create or replace view ship_mode as select
+create or replace view dfs.tpcds_sf1_parquet_views.ship_mode as select
 cast( sm_ship_mode_sk as integer)  as sm_ship_mode_sk,
 cast( sm_ship_mode_id as varchar(200))  as sm_ship_mode_id,
 cast( sm_type as varchar(200))  as sm_type,
@@ -232,19 +232,19 @@
 cast( sm_contract as varchar(200))  as sm_contract
 from dfs.`/drill/testdata/tpcds_sf1/parquet/ship_mode`;
 
-create or replace view reason as select
+create or replace view dfs.tpcds_sf1_parquet_views.reason as select
 cast( r_reason_sk as integer)  as r_reason_sk,
 cast( r_reason_id as varchar(200))  as r_reason_id,
 cast( r_reason_desc as varchar(200))  as r_reason_desc
 from dfs.`/drill/testdata/tpcds_sf1/parquet/reason`;
 
-create or replace view income_band as select
+create or replace view dfs.tpcds_sf1_parquet_views.income_band as select
 cast( ib_income_band_sk as integer)  as ib_income_band_sk,
 cast( ib_lower_bound as integer)  as ib_lower_bound,
 cast( ib_upper_bound as integer)  as ib_upper_bound
 from dfs.`/drill/testdata/tpcds_sf1/parquet/income_band`;
 
-create or replace view call_center as select
+create or replace view dfs.tpcds_sf1_parquet_views.call_center as select
 cast( cc_call_center_sk as integer)  as cc_call_center_sk,
 cast( cc_call_center_id as varchar(200))  as cc_call_center_id,
 cast( cc_rec_start_date as date)  as cc_rec_start_date,
@@ -278,7 +278,7 @@
 cast( cc_tax_percentage as double)  as cc_tax_percentage
 from dfs.`/drill/testdata/tpcds_sf1/parquet/call_center`;
 
-create or replace view web_site as select
+create or replace view dfs.tpcds_sf1_parquet_views.web_site as select
 cast( web_site_sk as integer)  as web_site_sk,
 cast( web_site_id as varchar(200))  as web_site_id,
 cast( web_rec_start_date as date)  as web_rec_start_date,
@@ -307,7 +307,7 @@
 cast( web_tax_percentage as double)  as web_tax_percentage
 from dfs.`/drill/testdata/tpcds_sf1/parquet/web_site`;
 
-create or replace view store_returns as select
+create or replace view dfs.tpcds_sf1_parquet_views.store_returns as select
 cast( sr_returned_date_sk as integer)  as sr_returned_date_sk,
 cast( sr_return_time_sk as integer)  as sr_return_time_sk,
 cast( sr_item_sk as integer)  as sr_item_sk,
@@ -330,7 +330,7 @@
 cast( sr_net_loss as double)  as sr_net_loss
 from dfs.`/drill/testdata/tpcds_sf1/parquet/store_returns`;
 
-create or replace view web_page as select
+create or replace view dfs.tpcds_sf1_parquet_views.web_page as select
 cast( wp_web_page_sk as integer)  as wp_web_page_sk,
 cast( wp_web_page_id as varchar(200))  as wp_web_page_id,
 cast( wp_rec_start_date as date)  as wp_rec_start_date,
@@ -347,7 +347,7 @@
 cast( wp_max_ad_count as integer)  as wp_max_ad_count
 from dfs.`/drill/testdata/tpcds_sf1/parquet/web_page`;
 
-create or replace view catalog_page as select
+create or replace view dfs.tpcds_sf1_parquet_views.catalog_page as select
 cast( cp_catalog_page_sk as integer)  as cp_catalog_page_sk,
 cast( cp_catalog_page_id as varchar(200))  as cp_catalog_page_id,
 cast( cp_start_date_sk as integer)  as cp_start_date_sk,
@@ -359,14 +359,14 @@
 cast( cp_type as varchar(200))  as cp_type
 from dfs.`/drill/testdata/tpcds_sf1/parquet/catalog_page`;
 
-create or replace view inventory as select
+create or replace view dfs.tpcds_sf1_parquet_views.inventory as select
 cast( inv_date_sk as integer)  as inv_date_sk,
 cast( inv_item_sk as integer)  as inv_item_sk,
 cast( inv_warehouse_sk as integer)  as inv_warehouse_sk,
 cast( inv_quantity_on_hand as integer)  as inv_quantity_on_hand
 from dfs.`/drill/testdata/tpcds_sf1/parquet/inventory`;
 
-create or replace view catalog_returns as select
+create or replace view dfs.tpcds_sf1_parquet_views.catalog_returns as select
 cast( cr_returned_date_sk as integer)  as cr_returned_date_sk,
 cast( cr_returned_time_sk as integer)  as cr_returned_time_sk,
 cast( cr_item_sk as integer)  as cr_item_sk,
@@ -396,7 +396,7 @@
 cast( cr_net_loss as double)  as cr_net_loss
 from dfs.`/drill/testdata/tpcds_sf1/parquet/catalog_returns`;
 
-create or replace view web_returns as select
+create or replace view dfs.tpcds_sf1_parquet_views.web_returns as select
 cast( wr_returned_date_sk as integer)  as wr_returned_date_sk,
 cast( wr_returned_time_sk as integer)  as wr_returned_time_sk,
 cast( wr_item_sk as integer)  as wr_item_sk,
@@ -423,7 +423,7 @@
 cast( wr_net_loss as double)  as wr_net_loss
 from dfs.`/drill/testdata/tpcds_sf1/parquet/web_returns`;
 
-create or replace view web_sales as select
+create or replace view dfs.tpcds_sf1_parquet_views.web_sales as select
 cast( ws_sold_date_sk as integer)  as ws_sold_date_sk,
 cast( ws_sold_time_sk as integer)  as ws_sold_time_sk,
 cast( ws_ship_date_sk as integer)  as ws_ship_date_sk,
@@ -460,7 +460,7 @@
 cast( ws_net_profit as double)  as ws_net_profit
 from dfs.`/drill/testdata/tpcds_sf1/parquet/web_sales`;
 
-create or replace view catalog_sales as select
+create or replace view dfs.tpcds_sf1_parquet_views.catalog_sales as select
 cast( cs_sold_date_sk as integer)  as cs_sold_date_sk,
 cast( cs_sold_time_sk as integer)  as cs_sold_time_sk,
 cast( cs_ship_date_sk as integer)  as cs_ship_date_sk,
diff --git a/framework/resources/Functional/complex/parquet/complex.json b/framework/resources/Functional/complex/parquet/complex.json
index 7f55eac..b734509 100644
--- a/framework/resources/Functional/complex/parquet/complex.json
+++ b/framework/resources/Functional/complex/parquet/complex.json
@@ -19,8 +19,18 @@
     ],
     "datasources": [
         {
-            "mode": "gen",
-            "src": "Datasources/ctas/create_tables_complex_parquet.sh",
+            "mode": "rm",
+            "src": "",
+            "dest": "complex/parquet/complex.json"
+        },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "complex/parquet"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/ctas/create_tables_complex_parquet.ddl",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json
index 8718d9d..0f7f7be 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "ctas_auto_partition_csv_data_pruning",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        } 
-    ]
+  "testId": "ctas_auto_partition_csv_data_pruning",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json
index 63e5532..a59220a 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "ctas_auto_partition_hierarchical_data",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        } 
-    ]
+  "testId": "ctas_auto_partition_hierarchical_data",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json
index ca07990..e473772 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "ctas_auto_partition_csv_plan_pruning",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "regex-no-order"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "ctas_auto_partition_csv_plan_pruning",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "regex-no-order"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json
index 13bc84c..e2bca86 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "ctas_auto_partition_json_data",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        } 
-    ]
+  "testId": "ctas_auto_partition_json_data",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json
index d69ba6f..876c97b 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "ctas_auto_partition_parquet_data",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        } 
-    ]
+  "testId": "ctas_auto_partition_parquet_data",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json
index 4975e4a..c6db2d5 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "ctas_auto_partition_parquet_plan",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        } 
-    ]
+  "testId": "ctas_auto_partition_parquet_plan",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json
index b0a5d5c..094cdd2 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json
@@ -1,46 +1,66 @@
 {
-    "testId": "ctas_auto_partition_csv_data_general",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/ctas_auto_partition/drill-3947",
-                "dest": "/drill/testdata/drill-3947"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_tpch_single_partition1.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "ctas_auto_partition_csv_data_general",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/ctas_auto_partition/drill-3947",
+      "dest": "/drill/testdata/drill-3947"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/tpch_single_partition1"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/tpch_single_partition1"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json
index 57fcdd6..e592c61 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json
@@ -1,46 +1,66 @@
 {
-    "testId": "ctas_auto_partition_csv_plan_general",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/ctas_auto_partition/drill-3947",
-                "dest": "/drill/testdata/drill-3947"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_tpch_single_partition1.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "ctas_auto_partition_csv_plan_general",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/ctas_auto_partition/drill-3947",
+      "dest": "/drill/testdata/drill-3947"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/tpch_single_partition1"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/tpch_single_partition1"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json
index 2eca696..2594f95 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json
@@ -18,9 +18,18 @@
     ],
     "datasources": [
         {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.sh",
-            "dest": ""
+            "mode": "rm",
+            "src": "",
+            "dest": "ctas_auto_partition/tpch_multiple_partitions"
+        },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "ctas_auto_partition/tpch_multiple_partitions"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl"
         }
     ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json
index 2e5d321..4bdcbac 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json
@@ -18,9 +18,18 @@
     ],
     "datasources": [
         {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.sh",
-            "dest": ""
+            "mode": "rm",
+            "src": "",
+            "dest": "ctas_auto_partition/tpch_multiple_partitions"
+        },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "ctas_auto_partition/tpch_multiple_partitions"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl"
         }
     ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json
index 825f7e3..b7dfff6 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json
@@ -58,9 +58,19 @@
             "dest": "Tpch0.01/parquet/orders/orders.parquet"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/setup.sh",
+            "mode": "rm",
+            "src": "",
+            "dest": "ctas_auto_partition/tpch_single_partition"
+        },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "ctas_auto_partition/tpch_single_partition"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/ctas_auto_partition/create_tables.ddl",
             "dest": ""
-        } 
+        }
     ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json
index 66f47c0..d38d95e 100644
--- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json
+++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json
@@ -17,10 +17,20 @@
         }
     ],
     "datasources": [
-       {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_tpch_single_partition1.sh",
-            "dest": ""
-       } 
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "ctas_auto_partition/tpch_single_partition1"
+      },
+      {
+        "mode": "mkdir",
+        "src": "",
+        "dest": "ctas_auto_partition/tpch_single_partition1"
+      },
+      {
+        "mode": "ddl",
+        "src": "Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl",
+        "dest": ""
+      }
     ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json b/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json
index 33324ab..723da34 100644
--- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json
+++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json
@@ -1,36 +1,36 @@
 {
-    "testId": "CTASFlattenOperators100000Rows",
-    "type": "group",
-    "description": "Test flatten with operators",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.flatten_operators_100000rows",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-	{
-          "mode": "cp",
-          "src": "Datasources/flatten_operators/100000rows",
-          "dest": "flatten_operators/100000rows"
-    	},
-        {
-          "mode": "cp",
-          "src": "Datasources/ctas_flatten",
-          "dest": "ctas_flatten"
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/ctas_flatten/deleteCTASTables.sh",
-           "dest": ""
-        }
-    ]
+  "testId": "CTASFlattenOperators100000Rows",
+  "type": "group",
+  "description": "Test flatten with operators",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.flatten_operators_100000rows",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_flatten"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/flatten_operators/100000rows",
+      "dest": "flatten_operators/100000rows"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/ctas_flatten",
+      "dest": "ctas_flatten"
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json b/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json
index e95f621..63d5cb8 100644
--- a/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json
+++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json
@@ -1,36 +1,36 @@
 {
-    "testId": "CTASFlattenOperators2Rows",
-    "type": "group",
-    "description": "Test flatten with operators",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.flatten_operators",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-	{
-          "mode": "cp",
-          "src": "Datasources/flatten_operators/2rows",
-          "dest": "flatten_operators/2rows"
-    	},
-        {
-          "mode": "cp",
-          "src": "Datasources/ctas_flatten",
-          "dest": "ctas_flatten"
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/ctas_flatten/deleteCTASTables.sh",
-           "dest": ""
-        }
-    ]
+  "testId": "CTASFlattenOperators2Rows",
+  "type": "group",
+  "description": "Test flatten with operators",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.flatten_operators",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_flatten"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/flatten_operators/2rows",
+      "dest": "flatten_operators/2rows"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/ctas_flatten",
+      "dest": "ctas_flatten"
+    }
+  ]
 }
diff --git a/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json b/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json
index bd14793..fe77ca9 100644
--- a/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json
+++ b/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json
@@ -1,31 +1,42 @@
 {
-    "testId": "ctas_joins",
-    "type": "group",
-    "description": "ctas with joins",
-    "categories": [ "functional" ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.ctas_parquet",
-            "output-format": "tsv",
-            "expected-file": ".*.res",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/ctas",
-            "dest": "ctas"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas/create_tables.sh",
-            "dest": ""
-        }
-
-    ]
+  "testId": "ctas_joins",
+  "type": "group",
+  "description": "ctas with joins",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.ctas_parquet",
+      "output-format": "tsv",
+      "expected-file": ".*.res",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas/parquet"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas/parquet"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/ctas",
+      "dest": "ctas"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas/create_tables_parquet.ddl",
+      "dest": ""
+    }
+  ]
 }
 
diff --git a/framework/resources/Functional/decimal_parquet/decimal_parquet.json b/framework/resources/Functional/decimal_parquet/decimal_parquet.json
index 58d4b9e..ad903e4 100755
--- a/framework/resources/Functional/decimal_parquet/decimal_parquet.json
+++ b/framework/resources/Functional/decimal_parquet/decimal_parquet.json
@@ -44,9 +44,9 @@
       "dest": "decimal/DRILL_6094/decimal.parquet"
     },
     {
-       "mode": "gen",
-       "src": "Datasources/ctas/decimal/create_decimals_test_data.sh",
-       "dest": ""
+      "mode": "ddl",
+      "src": "Datasources/ctas/decimal/createParquetWithDecimals.ddl",
+      "dest": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/drill_fragments/decimal/data/data.json b/framework/resources/Functional/drill_fragments/decimal/data/data.json
index 9410215..ccbe745 100755
--- a/framework/resources/Functional/drill_fragments/decimal/data/data.json
+++ b/framework/resources/Functional/drill_fragments/decimal/data/data.json
@@ -20,8 +20,33 @@
   "datasources": [
     {
        "mode": "gen",
-       "src": "Datasources/ctas/decimal/fragments/decimal_fragments.sh",
+       "src": "Datasources/ctas/decimal/fragments/decimal_fragments.sh $DRILL_TESTDATA_DIR",
        "dest": ""
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "decimal/fragments"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/ctas/decimal/fragments/decimal_big.tsv",
+      "dest": "decimal/fragments/decimal_big.tsv"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/ctas/decimal/fragments/decimal_big_zero_prec.tsv",
+      "dest": "decimal/fragments/decimal_big_zero_prec.tsv"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/ctas/decimal/fragments/decimal_big_zero_scale.tsv",
+      "dest": "decimal/fragments/decimal_big_zero_scale.tsv"
+    },
+    {
+      "mode": "ddl",
+      "src:": "Datasources/ctas/decimal/fragments/decimal_fragments.ddl",
+      "dest": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/drill_fragments/decimal/plan/plan.json b/framework/resources/Functional/drill_fragments/decimal/plan/plan.json
index fe8dc28..08f67a6 100755
--- a/framework/resources/Functional/drill_fragments/decimal/plan/plan.json
+++ b/framework/resources/Functional/drill_fragments/decimal/plan/plan.json
@@ -22,6 +22,36 @@
        "mode": "gen",
        "src": "Datasources/ctas/decimal/fragments/decimal_fragments.sh",
        "dest": ""
+    },
+    {
+      "mode": "gen",
+      "src": "Datasources/ctas/decimal/fragments/decimal_fragments.sh $DRILL_TESTDATA_DIR",
+      "dest": ""
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "decimal/fragments"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/ctas/decimal/fragments/decimal_big.tsv",
+      "dest": "decimal/fragments/decimal_big.tsv"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/ctas/decimal/fragments/decimal_big_zero_prec.tsv",
+      "dest": "decimal/fragments/decimal_big_zero_prec.tsv"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/ctas/decimal/fragments/decimal_big_zero_scale.tsv",
+      "dest": "decimal/fragments/decimal_big_zero_scale.tsv"
+    },
+    {
+      "mode": "ddl",
+      "src:": "Datasources/ctas/decimal/fragments/decimal_fragments.ddl",
+      "dest": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/filter/pushdown/item_star_operator/data.json b/framework/resources/Functional/filter/pushdown/item_star_operator/data.json
index 0a4ab09..0309bc0 100644
--- a/framework/resources/Functional/filter/pushdown/item_star_operator/data.json
+++ b/framework/resources/Functional/filter/pushdown/item_star_operator/data.json
@@ -28,8 +28,19 @@
       "dest": "filter/pushdown/DRILL_6118_complex.parquet"
     },
     {
-      "mode": "gen",
-      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_execute_ddl.sh"
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_files.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_folders.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_without_partitioning.ddl",
+      "dest": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/filter/pushdown/item_star_operator/plan.json b/framework/resources/Functional/filter/pushdown/item_star_operator/plan.json
index 9af22e6..3a33727 100644
--- a/framework/resources/Functional/filter/pushdown/item_star_operator/plan.json
+++ b/framework/resources/Functional/filter/pushdown/item_star_operator/plan.json
@@ -28,8 +28,19 @@
       "dest": "filter/pushdown/DRILL_6118_complex.parquet"
     },
     {
-      "mode": "gen",
-      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_execute_ddl.sh"
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_files.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_folders.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_without_partitioning.ddl",
+      "dest": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal.json b/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal.json
index 60f1490..184307b 100644
--- a/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal.json
+++ b/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal.json
@@ -18,6 +18,11 @@
   ],
   "datasources": [
     {
+      "mode": "rm",
+      "src": "",
+      "dest": "filter/pushdown/varchar_decimal"
+    },
+    {
       "mode": "cp",
       "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/no_metadata_file",
       "dest": "filter/pushdown/varchar_decimal/no_metadata_file/"
@@ -33,9 +38,14 @@
       "dest": "filter/pushdown/varchar_decimal/old_metadata_file/"
     },
     {
-      "mode": "gen",
-      "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/copy_metadata.sh",
-      "dest": ""
+      "mode": "post_cp",
+      "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/new_metadata_file",
+      "dest": "filter/pushdown/varchar_decimal/new_metadata_file/"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/old_metadata_file",
+      "dest": "filter/pushdown/varchar_decimal/old_metadata_file/"
     }
   ]
 }
\ No newline at end of file
diff --git a/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal_plan.json b/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal_plan.json
index 7821d03..7db8988 100644
--- a/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal_plan.json
+++ b/framework/resources/Functional/filter/pushdown/varchar_decimal/dfs/varchar_decimal_plan.json
@@ -18,6 +18,11 @@
   ],
   "datasources": [
     {
+      "mode": "rm",
+      "src": "",
+      "dest": "filter/pushdown/varchar_decimal"
+    },
+    {
       "mode": "cp",
       "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/no_metadata_file",
       "dest": "filter/pushdown/varchar_decimal/no_metadata_file/"
@@ -33,9 +38,14 @@
       "dest": "filter/pushdown/varchar_decimal/old_metadata_file/"
     },
     {
-      "mode": "gen",
-      "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/copy_metadata.sh",
-      "dest": ""
+      "mode": "post_cp",
+      "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/new_metadata_file",
+      "dest": "filter/pushdown/varchar_decimal/new_metadata_file/"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/parquet_storage/filter/pushdown/varchar_decimal/old_metadata_file",
+      "dest": "filter/pushdown/varchar_decimal/old_metadata_file/"
     }
   ]
 }
\ No newline at end of file
diff --git a/framework/resources/Functional/group_by_alias/group_by_alias.json b/framework/resources/Functional/group_by_alias/group_by_alias.json
index e54e9fc..9280c5c 100644
--- a/framework/resources/Functional/group_by_alias/group_by_alias.json
+++ b/framework/resources/Functional/group_by_alias/group_by_alias.json
@@ -24,8 +24,8 @@
       "dest": "DRILL-1248/group_by.tsv"
     },
     {
-       "mode": "gen",
-       "src": "Datasources/ctas/DRILL-1248/ctas.sh",
+       "mode": "ddl",
+       "src": "Datasources/ctas/DRILL-1248/group_by.ddl",
        "dest": ""
     }
   ]
diff --git a/framework/resources/Functional/int96/int96_data.json b/framework/resources/Functional/int96/int96_data.json
index 5bdae7d..69df6c6 100644
--- a/framework/resources/Functional/int96/int96_data.json
+++ b/framework/resources/Functional/int96/int96_data.json
@@ -21,8 +21,13 @@
             "dest": "subqueries"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/subqueries/create_tables.sh",
+            "mode": "rm",
+            "src": "",
+            "dest": "subqueries/hive1_parquet_part"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/subqueries/create_tables.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/int96/int96_plan.json b/framework/resources/Functional/int96/int96_plan.json
index a1282cd..6b8813a 100644
--- a/framework/resources/Functional/int96/int96_plan.json
+++ b/framework/resources/Functional/int96/int96_plan.json
@@ -19,6 +19,16 @@
             "mode": "cp",
             "src": "Datasources/subqueries",
             "dest": "subqueries"
+        },
+        {
+            "mode": "rm",
+            "src": "",
+            "dest": "subqueries/hive1_parquet_part"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/subqueries/create_tables.sql",
+            "dest": ""
         }
     ]
 }
diff --git a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json
index 1282cd9..8742ad9 100644
--- a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json
+++ b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "interpreted_ctas_auto_partition_hierarchical_data",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        } 
-    ]
+  "testId": "interpreted_ctas_auto_partition_hierarchical_data",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json
index 18ab92f..b53b5fd 100644
--- a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json
+++ b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json
@@ -1,36 +1,46 @@
 {
-    "testId": "interpreted_ctas_auto_partition_csv_plan",
-    "type": "group",
-    "description": "Test tpch queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.ctasAutoPartition",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "regex-no-order"
-            ]
-        }
-    ],
-    "datasources": [
-       {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/dfs",
-                "dest": "partition_pruning/dfs"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/partition_pruning/hive",
-                "dest": "partition_pruning/hive"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "interpreted_ctas_auto_partition_csv_plan",
+  "type": "group",
+  "description": "Test tpch queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.ctasAutoPartition",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "regex-no-order"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/dfs",
+      "dest": "partition_pruning/dfs"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/partition_pruning/hive",
+      "dest": "partition_pruning/hive"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "ctas_auto_partition/existing_partition_pruning"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json b/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json
index 62aa3c9..83d8438 100644
--- a/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json
+++ b/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json
@@ -24,8 +24,18 @@
             "dest": "join"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/join/crt_tbl_prtntd_tbl.sh",
+            "mode": "ddl",
+            "src": "Datasources/join/crt_tbls_partition_by_l.ddl",
+            "dest": ""
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/join/crt_tbls_partition_by_r.ddl",
+            "dest": ""
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/join/crt_tbl_prtnby_nulls.ddl",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json
index b3d59fd..1466432 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json
index 1493dbf..32e2bc3 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json
index cdd9486..827f200 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json
index 67fae1a..fb70678 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json
index 4ef74ca..81433f4 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json
index 08ddad7..581b2b6 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json
index 148ad31..198de8c 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json
index 26be65f..d4ec50f 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json
index 871ed7d..b1d8d4e 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json
index 93295f6..4e3c21c 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json
index 0feecf9..9b71c59 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json
index 522cc39..b163185 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json
index 884c6fc..934f9a1 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json
index 98d1e09..61bb91b 100644
--- a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json
+++ b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json
@@ -21,8 +21,8 @@
             "dest": "aggregation"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json
index bc80b3f..6dcc1f1 100644
--- a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json
+++ b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json
@@ -24,8 +24,8 @@
             "dest": "tpcds_sf1/parquet"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json
index e0676f7..7a426e9 100644
--- a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json
+++ b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json
@@ -24,8 +24,8 @@
             "dest": "tpcds_sf1/parquet"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/filters/data/data.json b/framework/resources/Functional/limit0/filters/data/data.json
index af3021a..e1880a5 100644
--- a/framework/resources/Functional/limit0/filters/data/data.json
+++ b/framework/resources/Functional/limit0/filters/data/data.json
@@ -1,30 +1,32 @@
 {
-    "testId": "filter_pushdown_limit0_data",
-    "type": "limit 0",
-    "description": "Filter pushdown tests: these tests verify query correctness",
-    "categories": [ "functional" ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.subqueries",
-            "output-format": "tsv",
-            "expected-file": ".*.res",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/subqueries",
-            "dest": "subqueries"
-        },
-        {
-         "mode": "gen",
-         "src": "Datasources/limit0/filters/create_filters_views.sh",
-         "dest": ""
-      }
-    ]
+  "testId": "filter_pushdown_limit0_data",
+  "type": "limit 0",
+  "description": "Filter pushdown tests: these tests verify query correctness",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.subqueries",
+      "output-format": "tsv",
+      "expected-file": ".*.res",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/subqueries",
+      "dest": "subqueries"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/limit0/filters/create_filters_views.sql",
+      "dest": ""
+    }
+  ]
 }
 
diff --git a/framework/resources/Functional/limit0/filters/plan/data.json b/framework/resources/Functional/limit0/filters/plan/data.json
index 183565a..157f1c9 100644
--- a/framework/resources/Functional/limit0/filters/plan/data.json
+++ b/framework/resources/Functional/limit0/filters/plan/data.json
@@ -1,30 +1,32 @@
 {
-    "testId": "filter_pushdown_limit0_plan",
-    "type": "group",
-    "description": "Filter pushdown tests: these tests verify query correctness",
-    "categories": [ "functional" ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.subqueries",
-            "output-format": "tsv",
-            "expected-file": ".*.res",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/subqueries",
-            "dest": "subqueries"
-        },
-        {
-         "mode": "gen",
-         "src": "Datasources/limit0/filters/create_filters_views.sh",
-         "dest": ""
-      }
-    ]
+  "testId": "filter_pushdown_limit0_plan",
+  "type": "group",
+  "description": "Filter pushdown tests: these tests verify query correctness",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.subqueries",
+      "output-format": "tsv",
+      "expected-file": ".*.res",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/subqueries",
+      "dest": "subqueries"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/limit0/filters/create_filters_views.sql",
+      "dest": ""
+    }
+  ]
 }
 
diff --git a/framework/resources/Functional/limit0/functions/data/limit0_functions.json b/framework/resources/Functional/limit0/functions/data/limit0_functions.json
index 71dde64..290ecee 100644
--- a/framework/resources/Functional/limit0/functions/data/limit0_functions.json
+++ b/framework/resources/Functional/limit0/functions/data/limit0_functions.json
@@ -21,11 +21,20 @@
             "dest": "subqueries"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/subqueries/create_views.sh",
+            "mode": "rm",
+            "src": "",
+            "dest": "subqueries/optional_type_v.view.drill"
+        },
+        {
+            "mode": "rm",
+            "src": "",
+            "dest": "subqueries/required_type_v.view.drill"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/subqueries/create_views.sql",
             "dest": ""
         }
-
     ]
 }
 
diff --git a/framework/resources/Functional/limit0/functions/plan/limit0_functions.json b/framework/resources/Functional/limit0/functions/plan/limit0_functions.json
index aa7bb2f..a7b0367 100644
--- a/framework/resources/Functional/limit0/functions/plan/limit0_functions.json
+++ b/framework/resources/Functional/limit0/functions/plan/limit0_functions.json
@@ -21,11 +21,20 @@
             "dest": "subqueries"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/subqueries/create_views.sh",
+            "mode": "rm",
+            "src": "",
+            "dest": "subqueries/optional_type_v.view.drill"
+        },
+        {
+            "mode": "rm",
+            "src": "",
+            "dest": "subqueries/required_type_v.view.drill"
+        },
+        {
+            "mode": "ddl",
+            "src": "Datasources/subqueries/create_views.sql",
             "dest": ""
         }
-
     ]
 }
 
diff --git a/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json b/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json
index bc9e8fe..82924bd 100644
--- a/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json
+++ b/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json
@@ -21,8 +21,8 @@
             "dest": "joins"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json b/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json
index 1596be6..8c4ef3c 100644
--- a/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json
+++ b/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json
@@ -21,8 +21,8 @@
             "dest": "joins"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/union/data/queries.json b/framework/resources/Functional/limit0/union/data/queries.json
index 0efb2c9..75fb49a 100644
--- a/framework/resources/Functional/limit0/union/data/queries.json
+++ b/framework/resources/Functional/limit0/union/data/queries.json
@@ -24,8 +24,8 @@
             "dest": "union"
         },
         {
-           "mode": "gen",
-           "src": "Datasources/limit0/union/create_union_views.sh",
+           "mode": "ddl",
+           "src": "Datasources/limit0/union/create_union_views.sql",
            "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/union/plan/queries.json b/framework/resources/Functional/limit0/union/plan/queries.json
index e24c9e9..f705c21 100644
--- a/framework/resources/Functional/limit0/union/plan/queries.json
+++ b/framework/resources/Functional/limit0/union/plan/queries.json
@@ -24,8 +24,8 @@
             "dest": "union"
         },
         {
-           "mode": "gen",
-           "src": "Datasources/limit0/union/create_union_views.sh",
+           "mode": "ddl",
+           "src": "Datasources/limit0/union/create_union_views.sql",
            "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json b/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json
index af88df5..ff0808a 100644
--- a/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json
+++ b/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json
@@ -24,8 +24,8 @@
             "dest": "union_all"
         },
         {
-           "mode": "gen",
-           "src": "Datasources/limit0/union_all/create_union_all_views.sh",
+           "mode": "ddl",
+           "src": "Datasources/limit0/union_all/create_union_all_views.sql",
            "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json b/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json
index 41eab72..150851e 100644
--- a/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json
+++ b/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json
@@ -24,8 +24,8 @@
             "dest": "union_all"
         },
         {
-           "mode": "gen",
-           "src": "Datasources/limit0/union_all/create_union_all_views.sh",
+           "mode": "ddl",
+           "src": "Datasources/limit0/union_all/create_union_all_views.sql",
            "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json b/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json
index 2a52227..d6b47e7 100644
--- a/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json
+++ b/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json
@@ -24,8 +24,8 @@
             "dest": "window_functions"
         },
         {
-           "mode": "gen",
-           "src": "Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sh",
+           "mode": "ddl",
+           "src": "Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql",
            "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json b/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json
index dda9d24..ecc373a 100644
--- a/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json
+++ b/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json
@@ -24,9 +24,9 @@
             "dest": "window_functions"
         },
         {
-           "mode": "gen",
-           "src": "Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sh",
-           "dest": ""
+            "mode": "ddl",
+            "src": "Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql",
+            "dest": ""
         }
     ]
 }
diff --git a/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json b/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json
index 9ac2e5e..57d8475 100644
--- a/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json
+++ b/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json
@@ -21,8 +21,8 @@
             "dest": "subqueries"
         },
         { 
-            "mode": "gen", 
-            "src": "Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sh", 
+            "mode": "ddl",
+            "src": "Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql",
             "dest": "" 
         } 
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json b/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json
index ade0036..2c1e1b6 100644
--- a/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json
+++ b/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json
@@ -21,8 +21,8 @@
             "dest": "subqueries"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json b/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json
index cd7e737..76da3c2 100644
--- a/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json
+++ b/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json
@@ -21,8 +21,8 @@
             "dest": "subqueries"
         },
         { 
-            "mode": "gen", 
-            "src": "Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sh", 
+            "mode": "ddl",
+            "src": "Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql",
             "dest": "" 
         } 
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json b/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json
index 59bc50f..ac460f0 100644
--- a/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json
+++ b/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json
@@ -21,8 +21,8 @@
             "dest": "subqueries"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json b/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json
index f92bcff..c8ab261 100644
--- a/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json
+++ b/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json
@@ -21,8 +21,8 @@
             "dest": "subqueries"
         },
         { 
-            "mode": "gen", 
-            "src": "Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sh", 
+            "mode": "ddl",
+            "src": "Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql",
             "dest": "" 
         } 
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json b/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json
index 0f98a62..77f8ddb 100644
--- a/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json
+++ b/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json
@@ -21,8 +21,8 @@
             "dest": "subqueries"
         },
         {
-            "mode": "gen",
-            "src": "Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sh",
+            "mode": "ddl",
+            "src": "Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json b/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json
index 06f277f..f71ce6d 100755
--- a/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json
+++ b/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "window_functions_tpcds_sf1_parquet-variants_limit0_data",
-    "type": "limit 0",
-    "description": "Test TPCDS SF 1 queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.tpcds_sf1_parquet_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/parquet",
-            "dest": "tpcds_sf1/parquet"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsParquet.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "window_functions_tpcds_sf1_parquet-variants_limit0_data",
+  "type": "limit 0",
+  "description": "Test TPCDS SF 1 queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.tpcds_sf1_parquet_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/parquet",
+      "dest": "tpcds_sf1/parquet"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/parquet/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsParquet.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json b/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json
index bbb09a8..1522524 100755
--- a/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json
+++ b/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "window_functions_tpcds_sf1_parquet-variants_limit0_plan",
-    "type": "group",
-    "description": "Test TPCDS SF 1 queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.tpcds_sf1_parquet_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/parquet",
-            "dest": "tpcds_sf1/parquet"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsParquet.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "window_functions_tpcds_sf1_parquet-variants_limit0_plan",
+  "type": "group",
+  "description": "Test TPCDS SF 1 queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.tpcds_sf1_parquet_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/parquet",
+      "dest": "tpcds_sf1/parquet"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/parquet/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsParquet.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json b/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json
index 8cc0cd9..475f5f6 100644
--- a/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json
+++ b/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json
@@ -1,51 +1,157 @@
 {
-    "testId": "Metadata_Caching_Small_Data",
-    "type": "group",
-    "description": "Metadata Caching Tests with smaller data sets",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.metadata_caching",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-	{
-          "mode": "cp",
-          "src": "Datasources/metadata_caching/data/",
-          "dest": "metadata_caching/"
-    	},
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/delete_cache.sh /drill/testdata/metadata_caching/nation",
-           "dest": ""
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/addremove_files.sh",
-           "dest": ""
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/delete_toplevel_cache.sh",
-           "dest": ""
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/delete_toplevel_cache.sh",
-           "dest": ""
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/metadata_caching/refresh_metadata_multilevel.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "Metadata_Caching_Small_Data",
+  "type": "group",
+  "description": "Metadata Caching Tests with smaller data sets",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.metadata_caching",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/nation"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_addfiles"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removefiles"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_adddir"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removedir"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_addautopartitioned_files"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removeautopartitioned_files"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/orders"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/metadata_caching/data/",
+      "dest": "metadata_caching/"
+    },
+    {
+      "mode": "gen",
+      "src": "Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "gen",
+      "src": "Datasources/metadata_caching/refresh_metadata_deletecache.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "gen",
+      "src": "Datasources/metadata_caching/refresh_metadata_multilevel.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removefiles/lineitem1.parquet"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removedir/feb"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removeautopartitioned_files/lineitem2.parquet"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/generated_caches"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_deletecache/.drill.parquet_metadata"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/metadata_caching/data/lineitem1.parquet",
+      "dest": "metadata_caching/lineitem_addfiles/lineitem1.parquet"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/metadata_caching/data/feb",
+      "dest": "metadata_caching/lineitem_adddir/"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/metadata_caching/data/lineitem2.parquet",
+      "dest": "metadata_caching/lineitem_addautopartitioned_files/"
+    },
+
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_summary_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    }
+  ]
 }
diff --git a/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json b/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json
index 3e1a555..7d17667 100644
--- a/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json
+++ b/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json
@@ -1,41 +1,152 @@
 {
-    "testId": "Metadata_Caching_GeneratedCaches",
-    "type": "group",
-    "description": "Metadata Caching Tests with smaller data sets",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.metadata_caching",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-	{
-          "mode": "cp",
-          "src": "Datasources/metadata_caching/data/",
-          "dest": "metadata_caching/"
-    	},
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/delete_cache.sh /drill/testdata/metadata_caching/nation",
-           "dest": ""
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/addremove_files.sh",
-           "dest": ""
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/delete_toplevel_cache.sh",
-           "dest": ""
-        }
-    ]
+  "testId": "Metadata_Caching_GeneratedCaches",
+  "type": "group",
+  "description": "Metadata Caching Tests with smaller data sets",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.metadata_caching",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/nation"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_addfiles"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removefiles"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_adddir"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removedir"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_addautopartitioned_files"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removeautopartitioned_files"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "metadata_caching/orders"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/metadata_caching/data/",
+      "dest": "metadata_caching/"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/metadata_caching/refresh_metadata_deletecache.ddl",
+      "dest": ""
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removefiles/lineitem1.parquet"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removedir/feb"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_removeautopartitioned_files/lineitem2.parquet"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/generated_caches"
+    },
+    {
+      "mode": "post_rm",
+      "src": "",
+      "dest": "metadata_caching/lineitem_deletecache/.drill.parquet_metadata"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/metadata_caching/data/lineitem1.parquet",
+      "dest": "metadata_caching/lineitem_addfiles/lineitem1.parquet"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/metadata_caching/data/feb",
+      "dest": "metadata_caching/lineitem_adddir/"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/metadata_caching/data/lineitem2.parquet",
+      "dest": "metadata_caching/lineitem_addautopartitioned_files/"
+    },
+
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_summary_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    },
+    {
+      "mode": "dfs_cp",
+      "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+      "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+    }
+  ]
 }
diff --git a/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json b/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json
index ea32505..3c8a044 100644
--- a/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json
+++ b/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json
@@ -1,31 +1,31 @@
 {
-    "testId": "Metadata_Caching_Small_PP_data",
-    "type": "group",
-    "description": "Metadata Caching Tests with smaller data sets",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.metadata_caching_pp",
-            "output-format": "tsv",
-            "expected-file": ".*.e",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-      {
-          "mode": "cp",
-          "src": "Datasources/metadata_caching/data_pp/",
-          "dest": "metadata_caching_pp/"
-        } ,
-        {
-            "mode": "gen",
-            "src": "Datasources/metadata_caching/refresh_metadata_multilevel.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "Metadata_Caching_Small_PP_data",
+  "type": "group",
+  "description": "Metadata Caching Tests with smaller data sets",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.metadata_caching_pp",
+      "output-format": "tsv",
+      "expected-file": ".*.e",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/metadata_caching/data_pp/",
+      "dest": "metadata_caching_pp/"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/metadata_caching/refresh_metadata_multilevel.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json b/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json
index 7a4afaf..ce00416 100644
--- a/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json
+++ b/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json
@@ -1,31 +1,31 @@
 {
-    "testId": "Metadata_Caching_Small_PP",
-    "type": "group",
-    "description": "Metadata Caching Tests with smaller data sets",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.metadata_caching_pp",
-            "output-format": "tsv",
-            "expected-file": ".*.e",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-      {
-          "mode": "cp",
-          "src": "Datasources/metadata_caching/data_pp/",
-          "dest": "metadata_caching_pp/"
-        }, 
-       {
-            "mode": "gen",
-            "src": "Datasources/metadata_caching/refresh_metadata_multilevel.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "Metadata_Caching_Small_PP",
+  "type": "group",
+  "description": "Metadata Caching Tests with smaller data sets",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.metadata_caching_pp",
+      "output-format": "tsv",
+      "expected-file": ".*.e",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/metadata_caching/data_pp/",
+      "dest": "metadata_caching_pp/"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/metadata_caching/refresh_metadata_multilevel.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json b/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json
index 7b3bbb2..c036e85 100644
--- a/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json
+++ b/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json
@@ -17,25 +17,141 @@
         }
     ],
     "datasources": [
-        {
-          "mode": "cp",
-          "src": "Datasources/metadata_caching/data/",
-          "dest": "metadata_caching/"
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/delete_cache.sh /drill/testdata/metadata_caching/nation",
-           "dest": ""
-        },
-        {
-           "mode": "gen",
-           "src": "Datasources/metadata_caching/addremove_files.sh",
-           "dest": ""
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/metadata_caching/refresh_metadata_multilevel.sh",
-            "dest": ""
-        }
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/nation"
+      },
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_addfiles"
+      },
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_removefiles"
+      },
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_adddir"
+      },
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_removedir"
+      },
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_addautopartitioned_files"
+      },
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_removeautopartitioned_files"
+      },
+      {
+        "mode": "rm",
+        "src": "",
+        "dest": "metadata_caching/orders"
+      },
+      {
+        "mode": "cp",
+        "src": "Datasources/metadata_caching/data/",
+        "dest": "metadata_caching/"
+      },
+      {
+        "mode": "gen",
+        "src": "Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl",
+        "dest": ""
+      },
+      {
+        "mode": "gen",
+        "src": "Datasources/metadata_caching/refresh_metadata_deletecache.ddl",
+        "dest": ""
+      },
+      {
+        "mode": "gen",
+        "src": "Datasources/metadata_caching/refresh_metadata_multilevel.ddl",
+        "dest": ""
+      },
+      {
+        "mode": "post_rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_removefiles/lineitem1.parquet"
+      },
+      {
+        "mode": "post_rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_removedir/feb"
+      },
+      {
+        "mode": "post_rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_removeautopartitioned_files/lineitem2.parquet"
+      },
+      {
+        "mode": "post_rm",
+        "src": "",
+        "dest": "metadata_caching/generated_caches"
+      },
+      {
+        "mode": "post_rm",
+        "src": "",
+        "dest": "metadata_caching/lineitem_deletecache/.drill.parquet_metadata"
+      },
+      {
+        "mode": "post_cp",
+        "src": "Datasources/metadata_caching/data/lineitem1.parquet",
+        "dest": "metadata_caching/lineitem_addfiles/lineitem1.parquet"
+      },
+      {
+        "mode": "post_cp",
+        "src": "Datasources/metadata_caching/data/feb",
+        "dest": "metadata_caching/lineitem_adddir/"
+      },
+      {
+        "mode": "post_cp",
+        "src": "Datasources/metadata_caching/data/lineitem2.parquet",
+        "dest": "metadata_caching/lineitem_addautopartitioned_files/"
+      },
+
+      {
+        "mode": "dfs_cp",
+        "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+        "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+      },
+      {
+        "mode": "dfs_cp",
+        "src": "metadata_caching/fewtypes/.drill.parquet_summary_metadata.v4",
+        "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+      },
+      {
+        "mode": "dfs_cp",
+        "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+        "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+      },
+      {
+        "mode": "dfs_cp",
+        "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+        "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+      },
+      {
+        "mode": "dfs_cp",
+        "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+        "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+      },
+      {
+        "mode": "dfs_cp",
+        "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+        "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+      },
+      {
+        "mode": "dfs_cp",
+        "src": "metadata_caching/fewtypes/.drill.parquet_file_metadata.v4",
+        "dest": "metadata_caching/generated_caches/fewtypes_cache/parquet_file_metadata.json"
+      }
     ]
 }
diff --git a/framework/resources/Functional/min_max_dir/min_max_dir.json b/framework/resources/Functional/min_max_dir/min_max_dir.json
index 8c35483..cd23d55 100644
--- a/framework/resources/Functional/min_max_dir/min_max_dir.json
+++ b/framework/resources/Functional/min_max_dir/min_max_dir.json
@@ -18,6 +18,11 @@
     ],
     "datasources": [
       {
+        "mode": "rm",
+        "src": "",
+        "dest": "min_max_dir_metadatacache"
+      },
+      {
          "mode": "cp",
          "src": "Datasources/min_max_dir/data",
          "dest": "min_max_dir"
@@ -33,8 +38,8 @@
          "dest": "drill-3474"
       },
       {
-        "mode": "gen",
-        "src": "Datasources/min_max_dir/scripts/refresh_metadata.sh",
+        "mode": "ddl",
+        "src": "Datasources/min_max_dir/scripts/refresh_metadata.ddl",
         "dest": ""
       }
     ]
diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json
index 3f3da0c..7b66d45 100644
--- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json
+++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json
@@ -1,36 +1,66 @@
 {
-    "testId": "mcAutoPartitionParquetDate",
-    "type": "group",
-    "description": "Test drill's parquet date compatibility",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs",
-            "output-format": "tsv",
-            "expected-file": ".*.e",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-                "mode": "cp",
-                "src": "Datasources/parquet_date",
-                "dest": "mc_parquet_date"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/parquet_date",
-                "dest": "parquet_date"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/parquet_date/gen.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "mcAutoPartitionParquetDate",
+  "type": "group",
+  "description": "Test drill's parquet date compatibility",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs",
+      "output-format": "tsv",
+      "expected-file": ".*.e",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "parquet_date/metadata_cache"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "parquet_date/auto_partition"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date",
+      "dest": "mc_parquet_date"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date",
+      "dest": "parquet_date"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/metadata_cache/metadata_cache1.2",
+      "dest": "parquet_date/metadata_cache/metadata_cache1.2_autogen"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/metadata_cache/metadata_cache1.6",
+      "dest": "parquet_date/metadata_cache/metadata_cache1.6_autogen"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/auto_partition/item_multipart",
+      "dest": "parquet_date/auto_partition/item_multipart_autorefresh"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/auto_partition/item_single/1.2",
+      "dest": "parquet_date/auto_partition/item_single_1.2"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_date/metadata_cache.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json
index fb80943..59a2118 100644
--- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json
+++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json
@@ -1,36 +1,66 @@
 {
-    "testId": "mcAutoPartitionParquetDate_Plan",
-    "type": "group",
-    "description": "Test drill's parquet date compatibility",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs",
-            "output-format": "tsv",
-            "expected-file": ".*.e",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-                "mode": "cp",
-                "src": "Datasources/parquet_date",
-                "dest": "mc_parquet_date"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/parquet_date",
-                "dest": "parquet_date"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/parquet_date/gen.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "mcAutoPartitionParquetDate_Plan",
+  "type": "group",
+  "description": "Test drill's parquet date compatibility",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs",
+      "output-format": "tsv",
+      "expected-file": ".*.e",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "parquet_date/metadata_cache"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "parquet_date/auto_partition"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date",
+      "dest": "mc_parquet_date"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date",
+      "dest": "parquet_date"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/metadata_cache/metadata_cache1.2",
+      "dest": "parquet_date/metadata_cache/metadata_cache1.2_autogen"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/metadata_cache/metadata_cache1.6",
+      "dest": "parquet_date/metadata_cache/metadata_cache1.6_autogen"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/auto_partition/item_multipart",
+      "dest": "parquet_date/auto_partition/item_multipart_autorefresh"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/auto_partition/item_single/1.2",
+      "dest": "parquet_date/auto_partition/item_single_1.2"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_date/metadata_cache.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json
index e98a5eb..f2fa89a 100644
--- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json
+++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json
@@ -1,36 +1,66 @@
 {
-    "testId": "mcParquetDate",
-    "type": "group",
-    "description": "Test drill's parquet date compatibility",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs",
-            "output-format": "tsv",
-            "expected-file": ".*.e",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-                "mode": "cp",
-                "src": "Datasources/parquet_date",
-                "dest": "mc_parquet_date"
-        },
-        {
-                "mode": "cp",
-                "src": "Datasources/parquet_date",
-                "dest": "parquet_date"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/parquet_date/gen.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "mcParquetDate",
+  "type": "group",
+  "description": "Test drill's parquet date compatibility",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs",
+      "output-format": "tsv",
+      "expected-file": ".*.e",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "parquet_date/metadata_cache"
+    },
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "parquet_date/auto_partition"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date",
+      "dest": "mc_parquet_date"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date",
+      "dest": "parquet_date"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/metadata_cache/metadata_cache1.2",
+      "dest": "parquet_date/metadata_cache/metadata_cache1.2_autogen"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/metadata_cache/metadata_cache1.6",
+      "dest": "parquet_date/metadata_cache/metadata_cache1.6_autogen"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/auto_partition/item_multipart",
+      "dest": "parquet_date/auto_partition/item_multipart_autorefresh"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/parquet_date/auto_partition/item_single/1.2",
+      "dest": "parquet_date/auto_partition/item_single_1.2"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_date/metadata_cache.ddl",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json
index 0428371..a496cfb 100644
--- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json
+++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json
@@ -43,8 +43,19 @@
       "dest": "parquet_storage/DRILL_3855/DRILL_3855_test_data"
     },
     {
-      "mode": "gen",
-      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_execute_ddl.sh"
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_files.ddl",
+      "dst": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_folders.ddl",
+      "dst": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_without_partitioning.ddl",
+      "dst": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/regex_no_order/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/regex_no_order/partitionDirectory.json
index 0e2ee58..a6afeee 100644
--- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/regex_no_order/partitionDirectory.json
+++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/regex_no_order/partitionDirectory.json
@@ -38,8 +38,19 @@
       "dest": "parquet_storage/DRILL_6118/DRILL_6118_data_source.csv"
     },
     {
-      "mode": "gen",
-      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_execute_ddl.sh"
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_files.ddl",
+      "dst": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_partitioned_by_folders.ddl",
+      "dst": ""
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/parquet_storage/DRILL-6118/DRILL_6118_parquet_without_partitioning.ddl",
+      "dst": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json b/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json
index e443a4f..a3e8d16 100644
--- a/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json
+++ b/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json
@@ -20,8 +20,28 @@
   "datasources" : [
     {
       "mode": "gen",
-      "src": "Datasources/schema_change_empty_batch/json/setup.sh",
+      "src": "Datasources/schema_change_empty_batch/json/setup.sh $DRILL_TESTDATA_DIR",
       "dest": ""
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/json/part",
+      "dest": "schema_change_empty_batch/json/part"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/json/partsupp",
+      "dest": "schema_change_empty_batch/json/partsupp"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/json/empty",
+      "dest": "schema_change_empty_batch/json/empty"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/json/part_small",
+      "dest": "schema_change_empty_batch/json/part_small"
     }
   ]
 }
diff --git a/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json b/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json
index 1852c8a..d77bdfd 100644
--- a/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json
+++ b/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json
@@ -20,8 +20,28 @@
   "datasources" : [
     {
       "mode": "gen",
-      "src": "Datasources/schema_change_empty_batch/text/dfs/setup.sh",
+      "src": "Datasources/schema_change_empty_batch/text/dfs/setup.sh $DRILL_TESTDATA_DIR",
       "dest": ""
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/psv/part",
+      "dest": "schema_change_empty_batch/psv/part"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/psv/partsupp",
+      "dest": "schema_change_empty_batch/psv/partsupp"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/psv/empty",
+      "dest": "schema_change_empty_batch/psv/empty"
+    },
+    {
+      "mode": "post_cp",
+      "src": "Datasources/schema_change_empty_batch/data/psv/json_field",
+      "dest": "schema_change_empty_batch/psv/json_field"
     }
   ]
 }
diff --git a/framework/resources/Functional/subqueries/subquery_in_select/subquery_in_select.json b/framework/resources/Functional/subqueries/subquery_in_select/subquery_in_select.json
index b971ee1..1bcfd5d 100755
--- a/framework/resources/Functional/subqueries/subquery_in_select/subquery_in_select.json
+++ b/framework/resources/Functional/subqueries/subquery_in_select/subquery_in_select.json
@@ -19,8 +19,8 @@
   ],
   "datasources": [
     {
-       "mode": "gen",
-       "src": "Datasources/ctas/subqueries/subqueries_select.sh",
+       "mode": "ddl",
+       "src": "Datasources/ctas/subqueries/subqueries_select.ddl",
        "dest": ""
     }
   ]
diff --git a/framework/resources/Functional/table_function/positive/data/table_function.json b/framework/resources/Functional/table_function/positive/data/table_function.json
index d125fc4..802b0d4 100644
--- a/framework/resources/Functional/table_function/positive/data/table_function.json
+++ b/framework/resources/Functional/table_function/positive/data/table_function.json
@@ -18,13 +18,13 @@
     ],
     "datasources": [
       {
-         "mode": "cp",
+         "mode": "post_cp",
          "src": "Datasources/table_function",
          "dest": "table_function/"
       },
       {
         "mode": "gen",
-        "src": "Datasources/table_function/DRILL-5166_generate_data.sh",
+        "src": "Datasources/table_function/DRILL-5166_generate_data.sh $DRILL_TESTDATA_DIR",
         "dest": ""
       }
     ]
diff --git a/framework/resources/Functional/table_function/positive/plan/table_function_plan_check.json b/framework/resources/Functional/table_function/positive/plan/table_function_plan_check.json
index 46a628b..cfd242b 100644
--- a/framework/resources/Functional/table_function/positive/plan/table_function_plan_check.json
+++ b/framework/resources/Functional/table_function/positive/plan/table_function_plan_check.json
@@ -18,13 +18,13 @@
   ],
   "datasources": [
     {
-      "mode": "cp",
+      "mode": "post_cp",
       "src": "Datasources/table_function",
       "dest": "table_function/"
     },
     {
       "mode": "gen",
-      "src": "Datasources/table_function/DRILL-5166_generate_data.sh",
+      "src": "Datasources/table_function/DRILL-5166_generate_data.sh $DRILL_TESTDATA_DIR",
       "dest": ""
     }
   ]
diff --git a/framework/resources/Functional/table_stats/stats/positive/stats.json b/framework/resources/Functional/table_stats/stats/positive/stats.json
index d928066..72064df 100644
--- a/framework/resources/Functional/table_stats/stats/positive/stats.json
+++ b/framework/resources/Functional/table_stats/stats/positive/stats.json
@@ -1,31 +1,36 @@
 {
-    "testId": "Stats_positive_tests",
-    "type": "group",
-    "description": "Test queries using stats",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.drillTestDir",
-            "output-format": "tsv",
-            "expected-file": ".*.plan",
-            "verification-type": [
-                "regex"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-                "mode": "cp",
-                "src": "Datasources/table_stats/tables",
-                "dest": "table_stats"
-        },
-        {
-            "mode": "gen",
-            "src": "Datasources/table_stats/table_stats.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "Stats_positive_tests",
+  "type": "group",
+  "description": "Test queries using stats",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.drillTestDir",
+      "output-format": "tsv",
+      "expected-file": ".*.plan",
+      "verification-type": [
+        "regex"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "rm",
+      "src": "",
+      "dest": "table_stats"
+    },
+    {
+      "mode": "cp",
+      "src": "Datasources/table_stats/tables",
+      "dest": "table_stats"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/table_stats/analyze_tables.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/table_stats/stats/query/query.json b/framework/resources/Functional/table_stats/stats/query/query.json
index eb27973..58312a9 100644
--- a/framework/resources/Functional/table_stats/stats/query/query.json
+++ b/framework/resources/Functional/table_stats/stats/query/query.json
@@ -18,13 +18,18 @@
   ],
   "datasources": [
     {
+      "mode": "rm",
+      "src": "",
+      "dest": "table_stats"
+    },
+    {
       "mode": "cp",
       "src": "Datasources/table_stats/tables",
       "dest": "table_stats"
     },
     {
-      "mode": "gen",
-      "src": "Datasources/table_stats/table_stats.sh",
+      "mode": "ddl",
+      "src": "Datasources/table_stats/analyze_tables.sql",
       "dest": ""
     }
   ]
diff --git a/framework/resources/Functional/text_storage/testcases/textReadGroup.json b/framework/resources/Functional/text_storage/testcases/textReadGroup.json
index 3d1eab7..0523c61 100644
--- a/framework/resources/Functional/text_storage/testcases/textReadGroup.json
+++ b/framework/resources/Functional/text_storage/testcases/textReadGroup.json
@@ -18,14 +18,39 @@
   ],
   "datasources": [
     {
+      "mode": "rm",
+      "src": "",
+      "dest": "text_storage/drill-4484"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "text_storage/drill-4484/20160401/3"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "text_storage/drill-4484/20160404"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "text_storage/drill-4484/20160501/1"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "text_storage/drill-4484/20160501/2"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "text_storage/drill-4484/20160502"
+    },
+    {
       "mode": "cp",
       "src": "Datasources/text_storage",
       "dest": "text_storage"
-    },
-    {
-      "mode": "gen",
-      "src": "Datasources/text_storage/drill-4484.sh",
-      "dest": ""
     }
   ]
 }
diff --git a/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json b/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json
index 80c93a6..9e712d1 100755
--- a/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json
+++ b/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json
@@ -1,31 +1,36 @@
 {
-    "testId": "tpcds_sf1_impala_json",
-    "type": "group",
-    "description": "Test TPCDS SF 1 queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.tpcds_sf1_json_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/json",
-            "dest": "tpcds_sf1/json"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsJson.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "tpcds_sf1_impala_json",
+  "type": "group",
+  "description": "Test TPCDS SF 1 queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.tpcds_sf1_json_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/json",
+      "dest": "tpcds_sf1/json"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/json/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsJson.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json b/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json
index f6cf6cb..d072042 100755
--- a/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json
+++ b/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json
@@ -22,9 +22,14 @@
             "src": "Datasources/tpcds/sf1/parquet",
             "dest": "tpcds_sf1/parquet"
         },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "tpcds_sf1/parquet/views"
+        },
 	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsParquet.sh",
+            "mode": "ddl",
+            "src": "Datasources/tpcds/createViewsParquet.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json b/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json
index 93153f7..e6368c5 100755
--- a/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json
+++ b/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "tpcds_sf1_impala_text",
-    "type": "group",
-    "description": "Test TPCDS SF 1 queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.q",
-            "schema": "dfs.tpcds_sf1_text_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/text",
-            "dest": "tpcds_sf1/text"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsText.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "tpcds_sf1_impala_text",
+  "type": "group",
+  "description": "Test TPCDS SF 1 queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.q",
+      "schema": "dfs.tpcds_sf1_text_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/text",
+      "dest": "tpcds_sf1/text"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/text/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsText.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json b/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json
index 37167bb..13a49e1 100755
--- a/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json
+++ b/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json
@@ -22,9 +22,14 @@
             "src": "Datasources/tpcds/sf1/json",
             "dest": "tpcds_sf1/json"
         },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "tpcds_sf1/json/views"
+        },
 	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsJson.sh",
+            "mode": "ddl",
+            "src": "Datasources/tpcds/createViewsJson.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json b/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json
index 80995af..5e39bcc 100755
--- a/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json
+++ b/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "tpcds_sf1_sanity_parquet",
-    "type": "group",
-    "description": "Test TPCDS SF 1 queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.tpcds_sf1_parquet_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/parquet",
-            "dest": "tpcds_sf1/parquet"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsParquet.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "tpcds_sf1_sanity_parquet",
+  "type": "group",
+  "description": "Test TPCDS SF 1 queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.tpcds_sf1_parquet_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/parquet",
+      "dest": "tpcds_sf1/parquet"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/parquet/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsParquet.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json b/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json
index 28a91cf..7f49946 100755
--- a/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json
+++ b/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "tpcds_sf1_sanity_text",
-    "type": "group",
-    "description": "Test TPCDS SF 1 data sanity queries on text (with views on top) via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.tpcds_sf1_text_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/text",
-            "dest": "tpcds_sf1/text"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsText.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "tpcds_sf1_sanity_text",
+  "type": "group",
+  "description": "Test TPCDS SF 1 data sanity queries on text (with views on top) via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.tpcds_sf1_text_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/text",
+      "dest": "tpcds_sf1/text"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/text/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsText.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json b/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json
index effb7fc..fc0d317 100755
--- a/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json
+++ b/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "tpcds_sf1_variants_json",
-    "type": "group",
-    "description": "Test TPCDS SF 1 original queries on text (with views on top) via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.tpcds_sf1_json_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/json",
-            "dest": "tpcds_sf1/json"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsJson.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "tpcds_sf1_variants_json",
+  "type": "group",
+  "description": "Test TPCDS SF 1 original queries on text (with views on top) via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.tpcds_sf1_json_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/json",
+      "dest": "tpcds_sf1/json"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/json/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsJson.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json b/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json
index 371ec3a..56074e1 100755
--- a/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json
+++ b/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "tpcds_sf1_variants_parquet",
-    "type": "group",
-    "description": "Test TPCDS SF 1 queries via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.tpcds_sf1_parquet_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/parquet",
-            "dest": "tpcds_sf1/parquet"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsParquet.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "tpcds_sf1_variants_parquet",
+  "type": "group",
+  "description": "Test TPCDS SF 1 queries via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.tpcds_sf1_parquet_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/parquet",
+      "dest": "tpcds_sf1/parquet"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/parquet/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsParquet.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json b/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json
index b41512a..59c7fd8 100755
--- a/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json
+++ b/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json
@@ -1,31 +1,36 @@
 {
-    "testId": "tpcds_sf1_variants_text",
-    "type": "group",
-    "description": "Test TPCDS SF 1 original queries on text (with views on top) via jdbc",
-    "categories": [
-        "functional"
-    ],
-    "matrices": [
-        {
-            "query-file": ".*.sql",
-            "schema": "dfs.tpcds_sf1_text_views",
-            "output-format": "tsv",
-            "expected-file": ".*.e_tsv",
-            "verification-type": [
-                "in-memory"
-            ]
-        }
-    ],
-    "datasources": [
-        {
-            "mode": "cp",
-            "src": "Datasources/tpcds/sf1/text",
-            "dest": "tpcds_sf1/text"
-        },
-	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsText.sh",
-            "dest": ""
-        }
-    ]
+  "testId": "tpcds_sf1_variants_text",
+  "type": "group",
+  "description": "Test TPCDS SF 1 original queries on text (with views on top) via jdbc",
+  "categories": [
+    "functional"
+  ],
+  "matrices": [
+    {
+      "query-file": ".*.sql",
+      "schema": "dfs.tpcds_sf1_text_views",
+      "output-format": "tsv",
+      "expected-file": ".*.e_tsv",
+      "verification-type": [
+        "in-memory"
+      ]
+    }
+  ],
+  "datasources": [
+    {
+      "mode": "cp",
+      "src": "Datasources/tpcds/sf1/text",
+      "dest": "tpcds_sf1/text"
+    },
+    {
+      "mode": "mkdir",
+      "src": "",
+      "dest": "tpcds_sf1/text/views"
+    },
+    {
+      "mode": "ddl",
+      "src": "Datasources/tpcds/createViewsText.sql",
+      "dest": ""
+    }
+  ]
 }
diff --git a/framework/resources/Functional/udfs/udfs.json b/framework/resources/Functional/udfs/udfs.json
index 1a52b87..ee22cd4 100644
--- a/framework/resources/Functional/udfs/udfs.json
+++ b/framework/resources/Functional/udfs/udfs.json
@@ -4,7 +4,7 @@
     "description": "Test UDF functions",
     "submit-type": "jdbc",
     "categories": [
-        "functional"
+        "excluded"
     ],
     "matrices": [
         {
diff --git a/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json b/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json
index fb89cec..8ec6fb4 100755
--- a/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json
+++ b/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json
@@ -22,9 +22,14 @@
             "src": "Datasources/tpcds/sf1/parquet",
             "dest": "tpcds_sf1/parquet"
         },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "tpcds_sf1/parquet/views"
+        },
 	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsParquet.sh",
+            "mode": "ddl",
+            "src": "Datasources/tpcds/createViewsParquet.sql",
             "dest": ""
         }
     ]
diff --git a/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json b/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json
index 88575cd..e1b9ec3 100755
--- a/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json
+++ b/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json
@@ -22,9 +22,14 @@
             "src": "Datasources/tpcds/sf1/parquet",
             "dest": "tpcds_sf1/parquet"
         },
+        {
+            "mode": "mkdir",
+            "src": "",
+            "dest": "tpcds_sf1/parquet/views"
+        },
 	{
-            "mode": "gen",
-            "src": "Datasources/tpcds/createViewsParquet.sh",
+            "mode": "ddl",
+            "src": "Datasources/tpcds/createViewsParquet.sql",
             "dest": ""
         }
     ]
diff --git a/framework/src/main/java/org/apache/drill/test/framework/ConnectionPool.java b/framework/src/main/java/org/apache/drill/test/framework/ConnectionPool.java
index 6eca059..e5d5705 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/ConnectionPool.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/ConnectionPool.java
@@ -19,7 +19,9 @@
 
 import com.google.common.collect.Queues;
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.SQLException;
@@ -29,7 +31,7 @@
 import java.util.Queue;
 
 public class ConnectionPool implements AutoCloseable {
-  private static final Logger LOG = Logger.getLogger("DrillTestLogger");
+  private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
   private final Map<String, Queue<Connection>> connections;
   private Properties connectionProperties;
 
diff --git a/framework/src/main/java/org/apache/drill/test/framework/DBMetaData.java b/framework/src/main/java/org/apache/drill/test/framework/DBMetaData.java
index d3b883b..9d643c2 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/DBMetaData.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/DBMetaData.java
@@ -17,17 +17,19 @@
  */
 package org.apache.drill.test.framework;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.sql.SQLException;
 import java.sql.DatabaseMetaData;
 
-import org.apache.log4j.Logger;
 
 /**
 * Retrieves DataBase MetaData.
 */
 
 public class DBMetaData {
-	private static final Logger LOG = Logger.getLogger("DrillTestLogger");
+	private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
         private DatabaseMetaData md = null;
 	public DBMetaData(DatabaseMetaData md) {
 		this.md = md;
diff --git a/framework/src/main/java/org/apache/drill/test/framework/DrillQueryProfile.java b/framework/src/main/java/org/apache/drill/test/framework/DrillQueryProfile.java
index 83c5b95..2d255fd 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/DrillQueryProfile.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/DrillQueryProfile.java
@@ -3,7 +3,7 @@
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
-import oadd.org.apache.drill.exec.proto.UserBitShared;
+import org.apache.drill.exec.server.rest.profile.CoreOperatorType;
 
 import java.util.List;
 import java.util.stream.Collectors;
@@ -208,13 +208,13 @@
      * @param operator
      * @return
      */
-    public long getOptimalMemoryPerOperator(final UserBitShared.CoreOperatorType operator) {
+    public long getOptimalMemoryPerOperator(final CoreOperatorType operator) {
         return this.fragmentProfiles
                 .stream()
                 .flatMap(f -> f.minorFragmentProfiles
                         .stream()
                         .flatMap(m -> m.operatorProfiles.stream())
-                ).filter(o -> o.operatorId == operator.getNumber())
+                ).filter(o -> o.operatorId == operator.getId())
                 .mapToLong(o -> o.optimalMemAllocation)
                 .sum();
     }
@@ -223,14 +223,14 @@
      * Get different operators in the profile.
      * @return a list of operators in the query profile.
      */
-    public List<UserBitShared.CoreOperatorType> getOperatorsFromProfile() {
+    public List<CoreOperatorType> getOperatorsFromProfile() {
         return this.fragmentProfiles
                 .stream().flatMap(f -> f.minorFragmentProfiles
                         .stream()
                         .flatMap(m -> m.operatorProfiles.stream())
                 ).mapToInt(o -> o.operatorId)
                 .distinct()
-                .mapToObj(UserBitShared.CoreOperatorType::forNumber)
+                .mapToObj(CoreOperatorType::valueOf)
                 .collect(Collectors.toList());
     }
 
diff --git a/framework/src/main/java/org/apache/drill/test/framework/DrillRMConfig.java b/framework/src/main/java/org/apache/drill/test/framework/DrillRMConfig.java
index 3c97fc8..f5aef85 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/DrillRMConfig.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/DrillRMConfig.java
@@ -9,7 +9,8 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigRenderOptions;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
@@ -24,7 +25,7 @@
  * Represents a Drill RM Resource Pool configuration.
  */
 public class DrillRMConfig implements DrillConfigRenderer {
-    private static final Logger LOG = Logger.getLogger(DrillRMConfig.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DrillRMConfig.class);
     //Resource Pool Configurations
     public static final String RESOURCE_POOL_NAME_KEY = "pool_name";
     public static final String MEMORY_KEY = "memory";
diff --git a/framework/src/main/java/org/apache/drill/test/framework/DrillTestDefaults.java b/framework/src/main/java/org/apache/drill/test/framework/DrillTestDefaults.java
index 71b21f1..72bfece 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/DrillTestDefaults.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/DrillTestDefaults.java
@@ -24,6 +24,10 @@
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.Arrays;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.PropertyResourceBundle;
 import java.util.ResourceBundle;
@@ -149,6 +153,10 @@
    
   private static final Map<String, String> drillProperties;
 
+  private static class DrillDefaultsHolder {
+    private static final Map<String, String> drillDefaults = generateDefaultsMap();
+  }
+
   static {
     drillProperties = getConfigProperties();
     loadConfigProperties();
@@ -158,6 +166,10 @@
     return drillProperties;
   }
 
+  public static Map<String, String> getDrillDefaults() {
+    return DrillDefaultsHolder.drillDefaults;
+  }
+
   /**
 
    * Reads properties from drill test configuration file
@@ -185,6 +197,23 @@
     return ImmutableMap.copyOf(properties);
   }
 
+  private static Map<String, String> generateDefaultsMap() {
+    Field[] fields = DrillTestDefaults.class.getDeclaredFields();
+    Map<String, String> defaults = new HashMap<>();
+    Arrays.stream(fields)
+            .filter(f -> String.class.equals(f.getType()))
+            .filter(f -> Modifier.isPublic(f.getModifiers()))
+            .filter(f -> Modifier.isStatic(f.getModifiers()))
+            .forEach(f -> {
+              try {
+                defaults.put(f.getName(), (String) f.get(null));
+              } catch (IllegalAccessException e) {
+                throw new IllegalStateException(e); // Already filtered for public only fields
+              }
+            });
+    return defaults;
+  }
+
   /**
    * Load configuration properties
    */
diff --git a/framework/src/main/java/org/apache/drill/test/framework/DrillTestJdbc.java b/framework/src/main/java/org/apache/drill/test/framework/DrillTestJdbc.java
index 18e3107..701e25b 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/DrillTestJdbc.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/DrillTestJdbc.java
@@ -20,10 +20,11 @@
 import com.google.common.base.Stopwatch;
 import com.google.common.collect.Lists;
 import org.apache.drill.test.framework.TestCaseModeler.TestMatrix;
+import org.apache.drill.test.framework.TestVerifier.PlanVerificationException;
 import org.apache.drill.test.framework.TestVerifier.TestStatus;
 import org.apache.drill.test.framework.TestVerifier.VerificationException;
-import org.apache.drill.test.framework.TestVerifier.PlanVerificationException;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedWriter;
 import java.io.File;
@@ -33,14 +34,13 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.sql.Types;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 public class DrillTestJdbc implements DrillTest {
-  private static final Logger LOG = Logger.getLogger("DrillTestLogger");
+  private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
   private static final String LINE_BREAK = "------------------------------------------------------------------------";
 
   private ConnectionPool connectionPool;
@@ -248,7 +248,7 @@
       }
 
       LOG.debug("Result set data types:");
-      LOG.debug(Utils.getTypesInStrings(columnTypes));
+      LOG.debug(Utils.getTypesInStrings(columnTypes).toString());
 
       if (resultSet != null) {
         while (resultSet.next()) {
@@ -258,7 +258,7 @@
         }
       }
     } catch (IllegalArgumentException | IllegalAccessException | IOException e1) {
-		LOG.warn(e1);
+		LOG.warn(e1.getMessage(), e1);
 	} finally {
 	  doneProcessingResultSet.set(true);
       if (resultSet != null) {
@@ -322,7 +322,7 @@
             : new VerificationException(exception + "\n" + msg);
       }
     } catch (IllegalArgumentException | IllegalAccessException e1) {
-      LOG.warn(e1);
+      LOG.warn(e1.getMessage(), e1);
     } finally {
       if (resultSet != null) resultSet.close();
       if (writer != null) writer.close();
diff --git a/framework/src/main/java/org/apache/drill/test/framework/DrillTestOdbc.java b/framework/src/main/java/org/apache/drill/test/framework/DrillTestOdbc.java
index b1830e5..220db6f 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/DrillTestOdbc.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/DrillTestOdbc.java
@@ -23,7 +23,8 @@
 import org.apache.drill.test.framework.TestVerifier.TestStatus;
 import org.apache.drill.test.framework.TestVerifier.VerificationException;
 import org.apache.drill.test.framework.TestVerifier.PlanVerificationException;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -42,7 +43,7 @@
 import java.util.concurrent.atomic.AtomicBoolean;
 
 public class DrillTestOdbc implements DrillTest{
-  private static final Logger LOG = Logger.getLogger("DrillTestLogger");
+  private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
   private String query = null;
   private String outputFilename;
   private volatile TestStatus testStatus = TestStatus.PENDING;
diff --git a/framework/src/main/java/org/apache/drill/test/framework/DrillTestScript.java b/framework/src/main/java/org/apache/drill/test/framework/DrillTestScript.java
index 1933f42..664ecf6 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/DrillTestScript.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/DrillTestScript.java
@@ -18,28 +18,15 @@
 package org.apache.drill.test.framework;
 
 import com.google.common.base.Stopwatch;
-import com.google.common.collect.Lists;
 import org.apache.drill.test.framework.TestCaseModeler.TestMatrix;
 import org.apache.drill.test.framework.TestVerifier.TestStatus;
-import org.apache.drill.test.framework.TestVerifier.VerificationException;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
 import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.sql.Types;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-import java.util.concurrent.atomic.AtomicBoolean;
 
 public class DrillTestScript implements DrillTest {
-  private static final Logger LOG = Logger.getLogger("DrillTestLogger");
+  private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
   private String query;
   private String outputFilename;
   private volatile TestStatus testStatus = TestStatus.PENDING;
@@ -77,7 +64,7 @@
     try {
 
   	  cmdConsOut = Utils.execCmd(command);
-  	  LOG.info(cmdConsOut);
+  	  LOG.info(cmdConsOut.toString());
 
       switch (cmdConsOut.exitCode) {
       case 0:
diff --git a/framework/src/main/java/org/apache/drill/test/framework/TestDriver.java b/framework/src/main/java/org/apache/drill/test/framework/TestDriver.java
index 079e89a..b0337a5 100644
--- a/framework/src/main/java/org/apache/drill/test/framework/TestDriver.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/TestDriver.java
@@ -22,7 +22,6 @@
 import com.google.common.base.Stopwatch;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-
 import org.apache.drill.test.framework.TestCaseModeler.DataSource;
 import org.apache.drill.test.framework.TestVerifier.TestStatus;
 import org.apache.hadoop.conf.Configuration;
@@ -31,9 +30,10 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.FileAlreadyExistsException;
-import org.apache.log4j.Logger;
 import org.ojai.Document;
 import org.ojai.json.Json;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -42,14 +42,25 @@
 import java.io.FileWriter;
 import java.io.IOException;
 import java.net.URISyntaxException;
-import java.util.*;
 import java.sql.Connection;
+import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
-import java.sql.DatabaseMetaData;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+import java.util.Set;
 
 public class TestDriver {
-  private static final Logger LOG = Logger.getLogger("DrillTestLogger");
+  private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
   private Connection connection = null;
   public static String commitId, version;
   private String[] injectionKeys = {"DRILL_VERSION"};
@@ -59,6 +70,7 @@
   private ConnectionPool connectionPool;
   private int countTotalTests;
   private Properties connectionProperties;
+  private static boolean minioEnabled;
 
   private static Configuration conf = new Configuration();
   public static final CmdParam cmdParam = new CmdParam();
@@ -184,8 +196,10 @@
   	  queryMemoryUsage();
     }
 
+    List<String> excludedDependencies = cmdParam.excludeDependenciesAsList();
+    minioEnabled = !excludedDependencies.contains("all") && !excludedDependencies.contains("s3minio");
     // Run Apache Minio server if s3minio tests aren't excluded
-    if(cmdParam.excludeDependencies == null || !cmdParam.excludeDependencies.contains("s3minio")) {
+    if(minioEnabled) {
       Utils.startMinio();
     } else {
       // Disable s3minio storage plugin if Minio server is down
@@ -667,7 +681,7 @@
     	  injections.put(injectionKeys[i], version);
     	  break;
     	default:
-    	  LOG.fatal("Injection parameter not recognized!");
+    	  LOG.error("Injection parameter not recognized!");
     	}    	
       }
       connectionPool.releaseConnection(connection);
@@ -712,7 +726,7 @@
 	connectionPool.releaseConnection(connection);
 
     // Stop Apache Minio server if it was started
-    if(cmdParam.excludeDependencies == null || !cmdParam.excludeDependencies.contains("s3minio")) {
+    if(minioEnabled) {
       Utils.stopMinio();
     }
   }
@@ -728,66 +742,236 @@
 
     boolean restartDrillbits = false;
 
-    CancelingExecutor copyExecutor = new CancelingExecutor(cmdParam.threads, Integer.MAX_VALUE);
-    CancelingExecutor genExecutor = new CancelingExecutor(cmdParam.threads, Integer.MAX_VALUE);
-    List<Cancelable> copyTasks = Lists.newArrayList();
-    List<Cancelable> genTasks = Lists.newArrayList();
+    List<Cancelable> rmTasks = new ArrayList<>();
+    List<Cancelable> copyTasks = new ArrayList<>();
+    List<Cancelable> mkdirTasks = new ArrayList<>();
+    List<Cancelable> genTasks = new ArrayList<>();
+    List<Cancelable> postRmTasks = new ArrayList<>();
+    List<Cancelable> postCopyTasks = new ArrayList<>();
+    List<Cancelable> dfsCopyTasks = new ArrayList<>();
+    List<Cancelable> ddlTasks = new ArrayList<>();
     for (final TestCaseModeler.DataSource datasource : dataSources) {
       String mode = datasource.mode;
-      if (mode.equals("cp")) {
-        Cancelable task = new Cancelable() {
-          @Override
-          public void cancel() {
-            // no op, as this will not time out
-          }
-
-          @Override
-          public void run() {
-            try {
-              Path src = new Path(DrillTestDefaults.TEST_ROOT_DIR + "/" + DrillTestDefaults.DRILL_TESTDATA_DIR + "/" + datasource.src);
-              Path dest = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.dest);
-              dfsCopy(src, dest, DrillTestDefaults.FS_MODE);
-            } catch (IOException e) {
-              throw new RuntimeException(e);
+      switch (mode) {
+        case "rm": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
             }
-          }
-        };
-        copyTasks.add(task);
-      } else if (mode.equals("gen")) {
-        Cancelable task = new Cancelable() {
-          @Override
-          public void cancel() {
-            // no op, as this will not time out
-          }
 
-          @Override
-          public void run() {
-            runGenerateScript(datasource);
-          }
-        };
-        genTasks.add(task);
-      } else if (mode.equals("restart-drill")) {
-        restartDrillbits = true;
+            @Override
+            public void run() {
+              try {
+                Path dest = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.dest);
+                dfsDelete(dest, DrillTestDefaults.FS_MODE);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              }
+            }
+          };
+          rmTasks.add(task);
+          break;
+        }
+        case "cp": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
+            }
+
+            @Override
+            public void run() {
+              try {
+                Path src = new Path(DrillTestDefaults.TEST_ROOT_DIR + "/" + DrillTestDefaults.DRILL_TESTDATA_DIR + "/" + datasource.src);
+                Path dest = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.dest);
+                dfsCopy(src, dest, DrillTestDefaults.FS_MODE);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              }
+            }
+          };
+          copyTasks.add(task);
+          break;
+        }
+        case "mkdir": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
+            }
+
+            @Override
+            public void run() {
+              try {
+                Path dest = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.dest);
+                dfsMkdir(dest, DrillTestDefaults.FS_MODE);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              }
+            }
+          };
+          mkdirTasks.add(task);
+          break;
+        }
+        case "gen": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
+            }
+
+            @Override
+            public void run() {
+              runGenerateScript(datasource);
+            }
+          };
+          genTasks.add(task);
+          break;
+        }
+        case "post_rm": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
+            }
+
+            @Override
+            public void run() {
+              try {
+                Path dest = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.dest);
+                dfsDelete(dest, DrillTestDefaults.FS_MODE);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              }
+            }
+          };
+          postRmTasks.add(task);
+          break;
+        }
+        case "post_cp": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
+            }
+
+            @Override
+            public void run() {
+              try {
+                Path src = new Path(DrillTestDefaults.TEST_ROOT_DIR + "/" + DrillTestDefaults.DRILL_TESTDATA_DIR + "/" + datasource.src);
+                Path dest = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.dest);
+                dfsCopy(src, dest, DrillTestDefaults.FS_MODE);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              }
+            }
+          };
+          postCopyTasks.add(task);
+          break;
+        }
+        case "dfs_cp": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
+            }
+
+            @Override
+            public void run() {
+              try {
+                Path src = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.src);
+                Path dest = new Path(DrillTestDefaults.DRILL_TESTDATA, datasource.dest);
+                dfsToDfsCopy(src, dest, DrillTestDefaults.FS_MODE);
+              } catch (IOException e) {
+                throw new RuntimeException(e);
+              }
+            }
+          };
+          dfsCopyTasks.add(task);
+          break;
+        }
+        case "ddl": {
+          Cancelable task = new Cancelable() {
+            @Override
+            public void cancel() {
+              // no op, as this will not time out
+            }
+
+            @Override
+            public void run() {
+              Path src = new Path(DrillTestDefaults.TEST_ROOT_DIR + "/" + DrillTestDefaults.DRILL_TESTDATA_DIR + "/" + datasource.src);
+              runDDL(src);
+            }
+          };
+          ddlTasks.add(task);
+          break;
+        }
+        case "restart-drill":
+          restartDrillbits = true;
+          break;
       }
     }
 
     final Stopwatch stopwatch = Stopwatch.createStarted();
-    
-    LOG.info("> Copying Data");
-    copyExecutor.executeAll(copyTasks);
-    copyExecutor.close();
-    LOG.info(">> Copy duration: " + stopwatch + "\n");
-    stopwatch.reset().start();
-    LOG.info("> Generating Data");
-    genExecutor.executeAll(genTasks);
-    genExecutor.close();
-    LOG.info("\n>> Generation duration: " + stopwatch + "\n");
+
+    try (CancelingExecutor executor = new CancelingExecutor(cmdParam.threads, Integer.MAX_VALUE)) {
+      if (!rmTasks.isEmpty()) {
+        LOG.info("> Clearing Data");
+        executor.executeAll(rmTasks);
+      }
+      if (!copyTasks.isEmpty()) {
+        LOG.info("> Copying Data");
+        executor.executeAll(copyTasks);
+        LOG.info(">> Copy duration: " + stopwatch + "\n");
+        stopwatch.reset().start();
+      }
+      if (!mkdirTasks.isEmpty()) {
+        LOG.info("> Making directories");
+        executor.executeAll(mkdirTasks);
+      }
+      if (!genTasks.isEmpty()) {
+        LOG.info("> Generating Data");
+        executor.executeAll(genTasks);
+        LOG.info("\n>> Generation duration: " + stopwatch + "\n");
+      }
+      if (!rmTasks.isEmpty()) {
+        LOG.info("> Clearing Data after generating");
+        executor.executeAll(postRmTasks);
+      }
+      if (!postCopyTasks.isEmpty()) {
+        LOG.info("> Copying generated Data");
+        executor.executeAll(postCopyTasks);
+      }
+      if (!dfsCopyTasks.isEmpty()) {
+        LOG.info("> Rearranging Data on DFS");
+        executor.executeAll(dfsCopyTasks);
+      }
+      if (!ddlTasks.isEmpty()) {
+        LOG.info("> Executing DDL scripts");
+        executor.executeAll(ddlTasks);
+      }
+    }
 
     if (restartDrillbits) {
       Utils.restartDrill();
     }
   }
 
+  private void dfsDelete(Path dest, String fsMode) throws IOException {
+    FileSystem fs;
+
+    if (fsMode.equals("distributedFS")) {
+      fs = FileSystem.get(conf);
+    } else {
+      fs = FileSystem.getLocal(conf);
+    }
+
+    if (fs.exists(dest)) {
+      fs.delete(dest, true);
+    }
+  }
+
   private void dfsCopy(Path src, Path dest, String fsMode)
           throws IOException {
 
@@ -818,31 +1002,95 @@
           LOG.debug("File " + src + " already exists as " + dest);
         }
       }
-    } catch (FileAlreadyExistsException e) {
-      LOG.debug("File " + src + " already exists as " + dest);
     } catch (IOException e) {
       LOG.debug("File " + src + " already exists as " + dest);
     }
   }
 
+  private void dfsToDfsCopy(Path src, Path dest, String fsMode)
+          throws IOException {
+
+    FileSystem fs;
+
+    if (fsMode.equals("distributedFS")) {
+      fs = FileSystem.get(conf);
+    } else {
+      fs = FileSystem.getLocal(conf);
+    }
+
+    try {
+      if (fs.getFileStatus(src).isDirectory()) {
+        for (FileStatus file : fs.listStatus(src)) {
+          Path srcChild = file.getPath();
+          Path newDest = new Path(dest + "/" + srcChild.getName());
+          dfsCopy(srcChild, newDest, fsMode);
+        }
+      } else {
+        if (!fs.exists(dest.getParent())) {
+          fs.mkdirs(dest.getParent());
+        }
+        if (!fs.exists(dest)) {
+          FileUtil.copy(fs, src, fs, dest, false, fs.getConf());
+          LOG.debug("Copying file " + src + " to " + dest);
+        } else {
+          LOG.debug("File " + src + " already exists as " + dest);
+        }
+      }
+    } catch (IOException e) {
+      LOG.debug("File " + src + " already exists as " + dest);
+    }
+  }
+
+  private void dfsMkdir(Path dest, String fsMode) throws IOException {
+    FileSystem fs;
+
+    if (fsMode.equals("distributedFS")) {
+      fs = FileSystem.get(conf);
+    } else {
+      fs = FileSystem.getLocal(conf);
+    }
+
+    if (!fs.exists(dest)) {
+      fs.mkdirs(dest);
+    }
+  }
+
   private void runGenerateScript(DataSource datasource) {
-	String command = DrillTestDefaults.TEST_ROOT_DIR + "/" + DrillTestDefaults.DRILL_TESTDATA_DIR + "/" + datasource.src;
-	LOG.info("Running command " + command);
-	CmdConsOut cmdConsOut;
-	try {
-	  cmdConsOut = Utils.execCmd(command);
-	  LOG.debug(cmdConsOut);
-	} catch (Exception e) {
-	  cmdConsOut = new CmdConsOut();
-	  cmdConsOut.cmd = command;
-	  cmdConsOut.consoleErr = e.getMessage();
-	  LOG.error("Error: Failed to execute the command " + cmdConsOut);
-	  throw new RuntimeException(e);
-	}
-	if (cmdConsOut.exitCode != 0) {
-	  throw new RuntimeException("Error executing the command " + command
-	          + " has return code " + cmdConsOut.exitCode);
-	}
+    String command = DrillTestDefaults.TEST_ROOT_DIR + "/" + DrillTestDefaults.DRILL_TESTDATA_DIR + "/" + datasource.src;
+    if (command.endsWith(".ddl") && command.split(" ").length == 1) {
+      runDDL(new Path(command));
+    } else {
+      command = Utils.substituteArguments(command);
+      LOG.info("Running command " + command);
+      CmdConsOut cmdConsOut;
+      try {
+        cmdConsOut = Utils.execCmd(command);
+        LOG.debug(cmdConsOut.toString());
+      } catch (Exception e) {
+        cmdConsOut = new CmdConsOut();
+        cmdConsOut.cmd = command;
+        cmdConsOut.consoleErr = e.getMessage();
+        LOG.error("Error: Failed to execute the command " + cmdConsOut);
+        throw new RuntimeException(e);
+      }
+      if (cmdConsOut.exitCode != 0) {
+        throw new RuntimeException("Error executing the command " + command
+                + " has return code " + cmdConsOut.exitCode);
+      }
+    }
+  }
+
+  private void runDDL(Path src) {
+    try (Connection conn = connectionPool.getOrCreateConnection()) {
+      String[] queries = Utils.getSqlStatements(src.toString());
+      try (Statement statement = conn.createStatement()) {
+        for (String query : queries) {
+          statement.execute(query);
+        }
+      }
+    } catch (SQLException | IOException e) {
+      LOG.error("Error executing ddl " + src, e);
+    }
   }
   
   private DrillTest getDrillTest(DrillTestCase modeler, ConnectionPool connectionPool, int cloneId, int totalCases) {
@@ -884,7 +1132,7 @@
       }
 
       LOG.debug("Result set data types:");
-      LOG.debug(Utils.getTypesInStrings(types));
+      LOG.debug(Utils.getTypesInStrings(types).toString());
 
       if (resultSet != null) {
         while (resultSet.next()) {
diff --git a/framework/src/main/java/org/apache/drill/test/framework/TestVerifier.java b/framework/src/main/java/org/apache/drill/test/framework/TestVerifier.java
index 57aedfb..62951ea 100755
--- a/framework/src/main/java/org/apache/drill/test/framework/TestVerifier.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/TestVerifier.java
@@ -42,7 +42,8 @@
 import com.fasterxml.jackson.databind.JsonNode;
 import com.google.common.collect.Lists;
 import org.apache.drill.test.framework.TestCaseModeler.TestMatrix;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Verification of drill tests by comparing actual query output with expected
@@ -50,7 +51,7 @@
  * 
  */
 public class TestVerifier {
-  private static final Logger LOG = Logger.getLogger("DrillTestLogger"); 
+  private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
   private static final int MAX_MISMATCH_SIZE = 10;
   public TestStatus testStatus = TestStatus.PENDING;
   private int mapSize = 0;
diff --git a/framework/src/main/java/org/apache/drill/test/framework/Utils.java b/framework/src/main/java/org/apache/drill/test/framework/Utils.java
index 04f0fc2..7fbfb6e 100755
--- a/framework/src/main/java/org/apache/drill/test/framework/Utils.java
+++ b/framework/src/main/java/org/apache/drill/test/framework/Utils.java
@@ -72,7 +72,8 @@
 import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.client.HttpClients;
 import org.apache.http.message.BasicNameValuePair;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.SSLContext;
@@ -86,7 +87,7 @@
  *
  */
 public class Utils {
-  private static final Logger LOG = Logger.getLogger("DrillTestLogger");
+  private static final Logger LOG = LoggerFactory.getLogger("DrillTestLogger");
   private static final Map<Integer, String> sqlTypes;
   private static final Map<Integer, String> sqlNullabilities;
   private static HttpClient client;
@@ -343,9 +344,11 @@
         }
         boolean skipSuite = false;
         if (modeler.dependencies != null) {
+          List<String> excludedDependencies = TestDriver.cmdParam.excludeDependenciesAsList();
           for (String dependency : modeler.dependencies) {
-            if (TestDriver.cmdParam.excludeDependenciesAsList().contains(dependency)) {
+            if (excludedDependencies.contains("all") || excludedDependencies.contains(dependency)) {
               skipSuite = true;
+              break;
             }
           }
         }
@@ -571,7 +574,7 @@
 	  }
 
 	  LOG.debug("Result set data types:");
-	  LOG.debug(Utils.getTypesInStrings(types));
+	  LOG.debug(Utils.getTypesInStrings(types).toString());
 	  stringBuffer.append(new ColumnList(types, columnLabels).toString()).append("\n");
 
 	  while (resultSet.next()) {
@@ -1156,4 +1159,18 @@
     }
     return String.format("Commit: %s\nAuthor: %s <%s>\n\n%s", commitID, commitAuthor, commitEmail, commitMessage);
   }
+
+  public static String substituteArguments(String cmd) {
+    String[] command = cmd.split(" ");
+    for (int i = 1; i < command.length; i++) {
+      String arg = command[i];
+      if (arg.startsWith("$")) {
+        arg = arg.substring(1);
+        if (DrillTestDefaults.getDrillDefaults().containsKey(arg)) {
+          command[i] = DrillTestDefaults.getDrillDefaults().get(arg);
+        }
+      }
+    }
+    return String.join(" ", command);
+  }
 }
diff --git a/framework/src/test/java/org/apache/drill/test/framework/DrillTestFrameworkUnitTests.java b/framework/src/test/java/org/apache/drill/test/framework/DrillTestFrameworkUnitTests.java
index d7c3994..90a190d 100644
--- a/framework/src/test/java/org/apache/drill/test/framework/DrillTestFrameworkUnitTests.java
+++ b/framework/src/test/java/org/apache/drill/test/framework/DrillTestFrameworkUnitTests.java
@@ -2,9 +2,10 @@
 
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
-import oadd.org.apache.drill.exec.proto.UserBitShared;
+import org.apache.drill.exec.server.rest.profile.CoreOperatorType;
 import org.apache.drill.test.framework.common.DrillJavaTestBase;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
@@ -26,7 +27,7 @@
 
 @Test(groups = UNIT_GROUP)
 public class DrillTestFrameworkUnitTests extends DrillJavaTestBase {
-    private static final Logger LOG = Logger.getLogger(DrillTestFrameworkUnitTests.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DrillTestFrameworkUnitTests.class);
 
 
     @BeforeTest(alwaysRun = true)
@@ -182,10 +183,10 @@
             LOG.info("Memory estimated by RM planner: " + rmMemEstimate);
             Assert.assertTrue(rmMemEstimate > 0,
                     "RM estimated memory should be greater than 0");
-            List<UserBitShared.CoreOperatorType> operators = profile.getOperatorsFromProfile();
+            List<CoreOperatorType> operators = profile.getOperatorsFromProfile();
             Assert.assertTrue(operators.size() > 0,
                     "Number of operators in the profile should be greater than 0");
-            operators.forEach(LOG::info);
+            operators.forEach(o -> LOG.info(o.name()));
         } catch (Exception e) {
             e.printStackTrace();
             Assert.fail(e.getMessage());
diff --git a/framework/src/test/java/org/apache/drill/test/framework/common/DrillJavaTestBase.java b/framework/src/test/java/org/apache/drill/test/framework/common/DrillJavaTestBase.java
index 3bb7b1b..8c70f5c 100644
--- a/framework/src/test/java/org/apache/drill/test/framework/common/DrillJavaTestBase.java
+++ b/framework/src/test/java/org/apache/drill/test/framework/common/DrillJavaTestBase.java
@@ -4,7 +4,8 @@
 import org.apache.drill.test.framework.ConnectionPool;
 import org.apache.drill.test.framework.Utils;
 import org.apache.drill.test.framework.ssh.DrillCluster;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.AfterMethod;
 import org.testng.annotations.AfterSuite;
@@ -20,7 +21,7 @@
 import java.util.Properties;
 
 public class DrillJavaTestBase {
-    private static final Logger LOG = Logger.getLogger(DrillJavaTestBase.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DrillJavaTestBase.class);
     protected ConnectionPool connectionPool;
     protected DrillCluster drillCluster;
 
diff --git a/framework/src/test/java/org/apache/drill/test/framework/resourcemanagement/QueueSelectionTests.java b/framework/src/test/java/org/apache/drill/test/framework/resourcemanagement/QueueSelectionTests.java
index 169e469..875405d 100644
--- a/framework/src/test/java/org/apache/drill/test/framework/resourcemanagement/QueueSelectionTests.java
+++ b/framework/src/test/java/org/apache/drill/test/framework/resourcemanagement/QueueSelectionTests.java
@@ -4,7 +4,8 @@
 import org.apache.drill.test.framework.*;
 import org.apache.drill.test.framework.common.DrillJavaTestBase;
 import org.apache.drill.test.framework.common.DrillTestNGDefaults;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.testng.Assert;
 import org.testng.annotations.*;
 
@@ -23,7 +24,7 @@
 @SuppressWarnings("Duplicates")
 @Test(groups = FUNCTIONAL_GROUP)
 public class QueueSelectionTests extends DrillJavaTestBase {
-    private static final Logger LOG = Logger.getLogger(QueueSelectionTests.class);
+    private static final Logger LOG = LoggerFactory.getLogger(QueueSelectionTests.class);
 
     @BeforeClass(alwaysRun = true, description = "Invoked before all tests in the class")
     private void setup() throws IOException {