DRILL-7523: Update Calcite to 1.31.0
diff --git a/common/src/main/java/org/apache/drill/common/config/NestedConfig.java b/common/src/main/java/org/apache/drill/common/config/NestedConfig.java
index 79bb88c..3e07fc2 100644
--- a/common/src/main/java/org/apache/drill/common/config/NestedConfig.java
+++ b/common/src/main/java/org/apache/drill/common/config/NestedConfig.java
@@ -17,12 +17,17 @@
  */
 package org.apache.drill.common.config;
 
+import java.time.Duration;
+import java.time.Period;
+import java.time.temporal.TemporalAmount;
 import java.util.List;
 import java.util.Map.Entry;
 import java.util.Set;
+import java.util.concurrent.TimeUnit;
 
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigList;
+import com.typesafe.config.ConfigMemorySize;
 import com.typesafe.config.ConfigMergeable;
 import com.typesafe.config.ConfigObject;
 import com.typesafe.config.ConfigOrigin;
@@ -235,4 +240,79 @@
   public Config withValue(String path, ConfigValue value) {
     return c.withValue(path, value);
   }
+
+  @Override
+  public boolean isResolved() {
+    return c.isResolved();
+  }
+
+  @Override
+  public Config resolveWith(Config config) {
+    return c.resolveWith(config);
+  }
+
+  @Override
+  public Config resolveWith(Config config, ConfigResolveOptions configResolveOptions) {
+    return c.resolveWith(config, configResolveOptions);
+  }
+
+  @Override
+  public boolean hasPathOrNull(String s) {
+    return c.hasPathOrNull(s);
+  }
+
+  @Override
+  public boolean getIsNull(String s) {
+    return c.getIsNull(s);
+  }
+
+  @Override
+  public <T extends Enum<T>> T getEnum(Class<T> aClass, String s) {
+    return c.getEnum(aClass, s);
+  }
+
+  @Override
+  public ConfigMemorySize getMemorySize(String s) {
+    return c.getMemorySize(s);
+  }
+
+  @Override
+  public long getDuration(String s, TimeUnit timeUnit) {
+    return c.getDuration(s, timeUnit);
+  }
+
+  @Override
+  public Duration getDuration(String s) {
+    return c.getDuration(s);
+  }
+
+  @Override
+  public Period getPeriod(String s) {
+    return c.getPeriod(s);
+  }
+
+  @Override
+  public TemporalAmount getTemporal(String s) {
+    return c.getTemporal(s);
+  }
+
+  @Override
+  public <T extends Enum<T>> List<T> getEnumList(Class<T> aClass, String s) {
+    return c.getEnumList(aClass, s);
+  }
+
+  @Override
+  public List<ConfigMemorySize> getMemorySizeList(String s) {
+    return c.getMemorySizeList(s);
+  }
+
+  @Override
+  public List<Long> getDurationList(String s, TimeUnit timeUnit) {
+    return c.getDurationList(s, timeUnit);
+  }
+
+  @Override
+  public List<Duration> getDurationList(String s) {
+    return c.getDurationList(s);
+  }
 }
diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java
index 775c248..4f152fd 100644
--- a/common/src/main/java/org/apache/drill/common/types/Types.java
+++ b/common/src/main/java/org/apache/drill/common/types/Types.java
@@ -35,6 +35,7 @@
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Types.class);
 
   public static final int MAX_VARCHAR_LENGTH = 65535;
+  public static final int DEFAULT_TIMESTAMP_PRECISION = 3;
   public static final int UNDEFINED = 0;
 
   public static final MajorType NULL = required(MinorType.NULL);
diff --git a/contrib/storage-cassandra/pom.xml b/contrib/storage-cassandra/pom.xml
index 1d5a5f5..972ccb5 100644
--- a/contrib/storage-cassandra/pom.xml
+++ b/contrib/storage-cassandra/pom.xml
@@ -47,7 +47,7 @@
           <artifactId>commons-logging</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>com.datastax.cassandra</groupId>
+          <groupId>com.datastax.oss</groupId>
           <artifactId>cassandra-driver-core</artifactId>
         </exclusion>
         <exclusion>
@@ -59,7 +59,12 @@
     <dependency>
       <groupId>com.scylladb</groupId>
       <artifactId>scylla-driver-core</artifactId>
-      <version>3.10.2-scylla-1</version>
+      <version>3.11.2.0</version>
+    </dependency>
+    <dependency>
+      <groupId>com.scylladb</groupId>
+      <artifactId>java-driver-core-shaded</artifactId>
+      <version>4.13.0.0</version>
     </dependency>
     <dependency>
       <groupId>org.ow2.asm</groupId>
diff --git a/contrib/storage-cassandra/src/main/java/org/apache/calcite/adapter/cassandra/CalciteUtils.java b/contrib/storage-cassandra/src/main/java/org/apache/calcite/adapter/cassandra/CalciteUtils.java
index b952eee..9eaa950 100644
--- a/contrib/storage-cassandra/src/main/java/org/apache/calcite/adapter/cassandra/CalciteUtils.java
+++ b/contrib/storage-cassandra/src/main/java/org/apache/calcite/adapter/cassandra/CalciteUtils.java
@@ -17,7 +17,7 @@
  */
 package org.apache.calcite.adapter.cassandra;
 
-import com.datastax.driver.core.Session;
+import com.datastax.oss.driver.api.core.CqlSession;
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.plan.RelOptTable;
@@ -36,8 +36,8 @@
 import java.util.stream.Collectors;
 
 public class CalciteUtils {
-  private static final VertexDrelConverterRule VERTEX_DREL_CONVERTER_RULE =
-      new VertexDrelConverterRule(CassandraRel.CONVENTION);
+  private static final RelOptRule VERTEX_DREL_CONVERTER_RULE =
+    VertexDrelConverterRule.create(CassandraRel.CONVENTION);
 
   private static final RelOptRule ENUMERABLE_INTERMEDIATE_PREL_CONVERTER_RULE =
       new EnumerableIntermediatePrelConverterRule(
@@ -58,7 +58,7 @@
     return rules;
   }
 
-  public static Session getSession(SchemaPlus schema) {
+  public static CqlSession getSession(SchemaPlus schema) {
     return schema.unwrap(CassandraDrillSchema.class).getDelegatingSchema().session;
   }
 }
diff --git a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/CassandraColumnConverterFactory.java b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/CassandraColumnConverterFactory.java
index 861b83f..a3a7dff 100644
--- a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/CassandraColumnConverterFactory.java
+++ b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/CassandraColumnConverterFactory.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.cassandra;
 
-import com.datastax.driver.core.Duration;
+import com.datastax.oss.driver.api.core.data.CqlDuration;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.record.ColumnConverter;
 import org.apache.drill.exec.record.ColumnConverterFactory;
@@ -67,7 +67,7 @@
     switch (readerSchema.type()) {
       case INTERVAL:
         return new ColumnConverter.ScalarColumnConverter(value -> {
-          Duration duration = (Duration) value;
+          CqlDuration duration = (CqlDuration) value;
           writer.setPeriod(Period.parse(duration.toString(), FORMATTER));
         });
       case BIGINT:
@@ -128,7 +128,7 @@
 
     @Override
     protected TypeProtos.MinorType getScalarMinorType(Class<?> clazz) {
-      if (clazz == Duration.class) {
+      if (clazz == CqlDuration.class) {
         return TypeProtos.MinorType.INTERVAL;
       } else if (clazz == Inet4Address.class
         || clazz == UUID.class) {
diff --git a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/CassandraEnumerablePrelContext.java b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/CassandraEnumerablePrelContext.java
index 91e5608..46bbbcd 100644
--- a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/CassandraEnumerablePrelContext.java
+++ b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/CassandraEnumerablePrelContext.java
@@ -24,6 +24,7 @@
 import org.apache.calcite.linq4j.tree.Expressions;
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.convert.ConverterRule;
 import org.apache.calcite.rel.core.TableScan;
 import org.apache.calcite.rel.type.RelDataTypeField;
 import org.apache.drill.exec.planner.common.DrillRelOptUtil;
@@ -48,7 +49,7 @@
   @Override
   public String generateCode(RelOptCluster cluster, RelNode relNode) {
     RelNode enumerableRel =
-        CassandraToEnumerableConverterRule.INSTANCE.convert(relNode);
+      CassandraToEnumerableConverterRule.DEFAULT_CONFIG.toRule(ConverterRule.class).convert(relNode);
 
     ClassDeclaration classDeclaration = new EnumerableRelImplementor(cluster.getRexBuilder(), Collections.emptyMap())
         .implementRoot((EnumerableRel) enumerableRel, EnumerableRel.Prefer.ARRAY);
diff --git a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/DrillCassandraLimitRule.java b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/DrillCassandraLimitRule.java
index 6c4baf4..eafc708 100644
--- a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/DrillCassandraLimitRule.java
+++ b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/plan/DrillCassandraLimitRule.java
@@ -29,8 +29,9 @@
   public static final DrillCassandraLimitRule INSTANCE = new DrillCassandraLimitRule();
 
   private DrillCassandraLimitRule() {
-    super(DrillLimitRelBase.class, DrillRel.DRILL_LOGICAL, CassandraRel.CONVENTION,
-        "DrillCassandraLimitRule");
+    super(Config.INSTANCE
+      .withConversion(DrillLimitRelBase.class, DrillRel.DRILL_LOGICAL,
+        CassandraRel.CONVENTION, "DrillCassandraLimitRule"));
   }
 
   public RelNode convert(RelNode relNode) {
diff --git a/contrib/storage-elasticsearch/pom.xml b/contrib/storage-elasticsearch/pom.xml
index e0efa39..87a4562 100644
--- a/contrib/storage-elasticsearch/pom.xml
+++ b/contrib/storage-elasticsearch/pom.xml
@@ -68,7 +68,7 @@
     <dependency>
       <groupId>org.elasticsearch.client</groupId>
       <artifactId>elasticsearch-rest-high-level-client</artifactId>
-      <version>7.0.1</version>
+      <version>7.17.5</version>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/contrib/storage-elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/CalciteUtils.java b/contrib/storage-elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/CalciteUtils.java
index e7b5fdc..fcae5f7 100644
--- a/contrib/storage-elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/CalciteUtils.java
+++ b/contrib/storage-elasticsearch/src/main/java/org/apache/calcite/adapter/elasticsearch/CalciteUtils.java
@@ -45,8 +45,8 @@
       relOptRule -> BANNED_RULES.stream()
           .noneMatch(banned -> relOptRule.toString().startsWith(banned));
 
-  public static final VertexDrelConverterRule ELASTIC_DREL_CONVERTER_RULE =
-      new VertexDrelConverterRule(ElasticsearchRel.CONVENTION);
+  public static final RelOptRule ELASTIC_DREL_CONVERTER_RULE =
+    VertexDrelConverterRule.create(ElasticsearchRel.CONVENTION);
 
   public static final RelOptRule ENUMERABLE_INTERMEDIATE_PREL_CONVERTER_RULE =
       new EnumerableIntermediatePrelConverterRule(
diff --git a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchFilterRule.java b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchFilterRule.java
index 794f2e3..026280c 100644
--- a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchFilterRule.java
+++ b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchFilterRule.java
@@ -36,8 +36,9 @@
   private final Convention out;
 
   private ElasticsearchFilterRule() {
-    super(Filter.class, Convention.NONE, ElasticsearchRel.CONVENTION,
-        "DrillElasticsearchFilterRule");
+    super(Config.INSTANCE
+      .withConversion(Filter.class, Convention.NONE, ElasticsearchRel.CONVENTION,
+        "DrillElasticsearchFilterRule"));
     this.out = ElasticsearchRel.CONVENTION;
   }
 
diff --git a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchProjectRule.java b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchProjectRule.java
index f64cac6..047e5c1 100644
--- a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchProjectRule.java
+++ b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/plan/ElasticsearchProjectRule.java
@@ -45,8 +45,9 @@
   public static final ElasticsearchProjectRule INSTANCE = new ElasticsearchProjectRule();
 
   private ElasticsearchProjectRule() {
-    super(Project.class, Convention.NONE, ElasticsearchRel.CONVENTION,
-        "DrillElasticsearchProjectRule");
+    super(Config.INSTANCE
+      .withConversion(Project.class, Convention.NONE, ElasticsearchRel.CONVENTION,
+        "DrillElasticsearchProjectRule"));
     this.out = ElasticsearchRel.CONVENTION;
   }
 
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java
index f777d38..a7109b2 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticComplexTypesTest.java
@@ -29,8 +29,8 @@
 import org.elasticsearch.client.RestClient;
 import org.elasticsearch.client.RestHighLevelClient;
 import org.elasticsearch.client.indices.CreateIndexRequest;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java
index 4edd3ed..dba28c0 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticInfoSchemaTest.java
@@ -28,8 +28,8 @@
 import org.elasticsearch.client.RestClient;
 import org.elasticsearch.client.RestHighLevelClient;
 import org.elasticsearch.client.indices.CreateIndexRequest;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java
index c654d3f..99e520d 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchPlanTest.java
@@ -28,8 +28,8 @@
 import org.elasticsearch.client.RestClient;
 import org.elasticsearch.client.RestHighLevelClient;
 import org.elasticsearch.client.indices.CreateIndexRequest;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
diff --git a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java
index 727e831..395dae5 100644
--- a/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java
+++ b/contrib/storage-elasticsearch/src/test/java/org/apache/drill/exec/store/elasticsearch/ElasticSearchQueryTest.java
@@ -29,8 +29,8 @@
 import org.elasticsearch.client.RestClient;
 import org.elasticsearch.client.RestHighLevelClient;
 import org.elasticsearch.client.indices.CreateIndexRequest;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
index edba290..7d69d7b 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
@@ -615,7 +615,7 @@
 
     runHBaseSQLVerifyCount(sql, 5);
 
-    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*\""};
+    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*LESS_OR_EQUAL, a2.*GREATER_OR_EQUAL, b5.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -631,7 +631,7 @@
         + "WHERE\n"
         + "  (row_key >= 'b5' OR row_key <= 'a2') AND (t.f.c1 >= '1' OR t.f.c1 is null)";
 
-    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*\""};
+    final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*LESS_OR_EQUAL, a2.*GREATER_OR_EQUAL, b5.*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -734,7 +734,7 @@
 
     runHBaseSQLVerifyCount(sql, 2);
 
-    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*\""};
+    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, a2\\), RowFilter \\(EQUAL, b4\\).*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -753,7 +753,7 @@
 
     runHBaseSQLVerifyCount(sql, 2);
 
-    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*\""};
+    final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, a2\\), RowFilter \\(EQUAL, b4\\).*\""};
     final String[] excludedPlan ={};
     final String sqlHBase = canonizeHBaseSQL(sql);
     PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
index 0963d2a..e64ba9a 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
@@ -117,7 +117,7 @@
         and thus Hive Scan can be chosen instead of Drill native scan because costings allegedly lower for Hive.
         To ensure Drill native scan will be chosen, reduce Hive scan importance to 0.
        */
-      call.getPlanner().setImportance(hiveScanRel, 0.0);
+      call.getPlanner().prune(hiveScanRel);
     } catch (final Exception e) {
       logger.warn("Failed to convert HiveScan to HiveDrillNativeParquetScan", e);
     }
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java
index aeeb47c..018f8b3 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveViewTable.java
@@ -21,14 +21,13 @@
 import java.util.List;
 import java.util.stream.Stream;
 
-import org.apache.calcite.plan.RelOptTable;
 import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
 import org.apache.drill.exec.dotdrill.View;
 import org.apache.drill.exec.planner.logical.DrillViewTable;
 import org.apache.drill.exec.planner.sql.SchemaUtilites;
+import org.apache.drill.exec.planner.sql.conversion.DrillViewExpander;
 import org.apache.drill.exec.planner.types.DrillRelDataTypeSystem;
 import org.apache.drill.exec.planner.types.HiveToRelDataTypeConverter;
 import org.apache.drill.exec.store.SchemaConfig;
@@ -62,17 +61,16 @@
    * table.
    *
    * @param context - to rel conversion context
-   * @param rowType - data type of requested columns
    * @param workspaceSchemaPath - path to view in drill, for example: ["hive"]
    * @param tokenSchemaTree - schema created for impersonated user
    * @return - relational representation of expanded Hive view
    */
   @Override
-  protected RelNode expandViewForImpersonatedUser(RelOptTable.ToRelContext context, RelDataType rowType,
+  protected RelNode expandViewForImpersonatedUser(DrillViewExpander context,
                                                   List<String> workspaceSchemaPath, SchemaPlus tokenSchemaTree) {
     SchemaPlus drillHiveSchema = SchemaUtilites.findSchema(tokenSchemaTree, workspaceSchemaPath);
     workspaceSchemaPath = ImmutableList.of();
-    return super.expandViewForImpersonatedUser(context, rowType, workspaceSchemaPath, drillHiveSchema);
+    return super.expandViewForImpersonatedUser(context, workspaceSchemaPath, drillHiveSchema);
   }
 
   /**
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/schema/TestSchemaConversion.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/schema/TestSchemaConversion.java
index 62f3451..fe9f426 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/schema/TestSchemaConversion.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/schema/TestSchemaConversion.java
@@ -48,7 +48,7 @@
   public void testPrimitiveSchema() {
     verifyConversion("int", Types.optional(TypeProtos.MinorType.INT));
     verifyConversion("varchar(123)", TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.VARCHAR).setPrecision(123).build());
-    verifyConversion("timestamp", Types.optional(TypeProtos.MinorType.TIMESTAMP));
+    verifyConversion("timestamp", Types.withPrecision(TypeProtos.MinorType.TIMESTAMP, TypeProtos.DataMode.OPTIONAL, 3));
   }
 
   @Test
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DefaultJdbcDialect.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DefaultJdbcDialect.java
index 74769d2..c8ef31e 100644
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DefaultJdbcDialect.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DefaultJdbcDialect.java
@@ -64,7 +64,7 @@
   public String generateSql(RelOptCluster cluster, RelNode input) {
     final JdbcImplementor jdbcImplementor = new JdbcImplementor(dialect,
         (JavaTypeFactory) cluster.getTypeFactory());
-    final JdbcImplementor.Result result = jdbcImplementor.visitChild(0,
+    final JdbcImplementor.Result result = jdbcImplementor.visitRoot(
       input.accept(SubsetRemover.INSTANCE));
     return result.asStatement().toSqlString(dialect).getSql();
   }
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DrillJdbcConvention.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DrillJdbcConvention.java
index 07b12b7..3639628 100644
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DrillJdbcConvention.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/DrillJdbcConvention.java
@@ -72,7 +72,7 @@
     ImmutableSet.Builder<RelOptRule> builder = ImmutableSet.<RelOptRule>builder()
       .addAll(calciteJdbcRules)
       .add(new JdbcIntermediatePrelConverterRule(this, username))
-      .add(new VertexDrelConverterRule(this))
+      .add(VertexDrelConverterRule.create(this))
       .add(RuleInstance.FILTER_SET_OP_TRANSPOSE_RULE)
       .add(RuleInstance.PROJECT_REMOVE_RULE);
     for (RelTrait inputTrait : inputTraits) {
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcDialect.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcDialect.java
index 4a58bfc..59816f3 100644
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcDialect.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcDialect.java
@@ -67,7 +67,7 @@
   public String generateSql(RelOptCluster cluster, RelNode input) {
     final JdbcImplementor jdbcImplementor = new ClickhouseJdbcImplementor(dialect,
       (JavaTypeFactory) cluster.getTypeFactory());
-    final JdbcImplementor.Result result = jdbcImplementor.visitChild(0,
+    final JdbcImplementor.Result result = jdbcImplementor.visitRoot(
       input.accept(SubsetRemover.INSTANCE));
     return result.asStatement().toSqlString(dialect).getSql();
   }
diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcImplementor.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcImplementor.java
index c3af184..dd9e862 100644
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcImplementor.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/clickhouse/ClickhouseJdbcImplementor.java
@@ -17,16 +17,22 @@
  */
 package org.apache.drill.exec.store.jdbc.clickhouse;
 
+import org.apache.calcite.adapter.jdbc.JdbcTable;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.TableScan;
+import org.apache.calcite.sql.parser.SqlParserPos;
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import org.apache.calcite.adapter.java.JavaTypeFactory;
 import org.apache.calcite.adapter.jdbc.JdbcImplementor;
-import org.apache.calcite.adapter.jdbc.JdbcTableScan;
 import org.apache.calcite.sql.SqlDialect;
 import org.apache.calcite.sql.SqlIdentifier;
 
 import java.util.Iterator;
 
+import static java.util.Objects.requireNonNull;
+
 public class ClickhouseJdbcImplementor extends JdbcImplementor {
   public ClickhouseJdbcImplementor(SqlDialect dialect,
                                    JavaTypeFactory typeFactory) {
@@ -34,8 +40,8 @@
   }
 
   @Override
-  public Result visit(JdbcTableScan scan) {
-    SqlIdentifier sqlIdentifier = scan.jdbcTable.tableName();
+  public Result visit(TableScan scan) {
+    SqlIdentifier sqlIdentifier = getSqlTargetTable(scan);
     Iterator<String> iter = sqlIdentifier.names.iterator();
     Preconditions.checkArgument(sqlIdentifier.names.size() == 3,
       "size of clickhouse table names:[%s] is not 3", sqlIdentifier.toString());
@@ -43,4 +49,14 @@
     sqlIdentifier.setNames(ImmutableList.copyOf(iter), null);
     return result(sqlIdentifier, ImmutableList.of(Clause.FROM), scan, null);
   }
+
+  private static SqlIdentifier getSqlTargetTable(RelNode e) {
+    // Use the foreign catalog, schema and table names, if they exist,
+    // rather than the qualified name of the shadow table in Calcite.
+    RelOptTable table = requireNonNull(e.getTable());
+    return table.maybeUnwrap(JdbcTable.class)
+      .map(JdbcTable::tableName)
+      .orElseGet(() ->
+        new SqlIdentifier(table.getQualifiedName(), SqlParserPos.ZERO));
+  }
 }
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithClickhouse.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithClickhouse.java
index d2ecb83..8b8f520 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithClickhouse.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithClickhouse.java
@@ -194,7 +194,7 @@
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("EXPR$0", "EXPR$1", "EXPR$2")
-        .baselineValues(4L, 88L, 1.618033988749895)
+        .baselineValues(4L, 88, 1.618033988749895)
         .go();
   }
 
@@ -207,7 +207,7 @@
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("EXPR$1", "EXPR$0", "EXPR$2")
-        .baselineValues(1.618033988749895, 88L, 4L)
+        .baselineValues(1.618033988749895, 88, 4L)
         .go();
   }
 
@@ -220,8 +220,8 @@
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("ID", "FIBONACCI_SUM", "golden_ratio")
-        .baselineValues(1, 88L, 1.618033988749895)
-        .baselineValues(2, 88L, 1.618033988749895)
+        .baselineValues(1, 88, 1.618033988749895)
+        .baselineValues(2, 88, 1.618033988749895)
         .go();
   }
 
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
index 3f9a5b5..e96da0d 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
@@ -48,12 +48,10 @@
 
 /**
  * JDBC storage plugin tests against MySQL.
- * Note: it requires libaio1.so library on Linux
  */
 @Category(JdbcStorageTest.class)
 public class TestJdbcPluginWithMySQLIT extends ClusterTest {
 
-  private static final String DOCKER_IMAGE_MYSQL = "mysql:5.7.27";
   private static final String DOCKER_IMAGE_MARIADB = "mariadb:10.6.0";
   private static JdbcDatabaseContainer<?> jdbcContainer;
 
@@ -63,12 +61,8 @@
     String osName = System.getProperty("os.name").toLowerCase();
     String mysqlDBName = "drill_mysql_test";
 
-    DockerImageName imageName;
-    if (osName.startsWith("linux") && "aarch64".equals(System.getProperty("os.arch"))) {
-      imageName = DockerImageName.parse(DOCKER_IMAGE_MARIADB).asCompatibleSubstituteFor("mysql");
-    } else {
-      imageName = DockerImageName.parse(DOCKER_IMAGE_MYSQL);
-    }
+    DockerImageName imageName = DockerImageName.parse(DOCKER_IMAGE_MARIADB)
+      .asCompatibleSubstituteFor("mysql");
 
     jdbcContainer = new MySQLContainer<>(imageName)
             .withExposedPorts(3306)
@@ -282,7 +276,7 @@
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("EXPR$0", "EXPR$1", "EXPR$2")
-        .baselineValues(4L, 88L, 1.618033988749895)
+        .baselineValues(4L, 88, BigDecimal.valueOf(1.618033988749895))
         .go();
   }
 
@@ -293,9 +287,9 @@
 
     testBuilder()
         .sqlQuery(query)
-        .unOrdered()
+        .ordered()
         .baselineColumns("EXPR$1", "EXPR$0", "EXPR$2")
-        .baselineValues(1.618033988749895, 88L, 4L)
+        .baselineValues(BigDecimal.valueOf(1.618033988749895), 88, 4L)
         .go();
   }
 
@@ -308,8 +302,8 @@
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("ID", "FIBONACCI_SUM", "golden_ratio")
-        .baselineValues(1, 88L, 1.618033988749895)
-        .baselineValues(2, 88L, 1.618033988749895)
+        .baselineValues(1, 88, BigDecimal.valueOf(1.618033988749895))
+        .baselineValues(2, 88, BigDecimal.valueOf(1.618033988749895))
         .go();
   }
 
diff --git a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixConvention.java b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixConvention.java
index 48ddaad..b0b785d 100644
--- a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixConvention.java
+++ b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixConvention.java
@@ -70,7 +70,7 @@
     ImmutableSet.Builder<RelOptRule> builder = ImmutableSet.<RelOptRule>builder()
       .addAll(calciteJdbcRules)
       .add(new PhoenixIntermediatePrelConverterRule(this))
-      .add(new VertexDrelConverterRule(this))
+      .add(VertexDrelConverterRule.create(this))
       .add(RuleInstance.FILTER_SET_OP_TRANSPOSE_RULE)
       .add(RuleInstance.PROJECT_REMOVE_RULE);
     for (RelTrait inputTrait : inputTraits) {
diff --git a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java
index 74116ea..29a45d8 100644
--- a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java
+++ b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixImplementor.java
@@ -67,7 +67,7 @@
   @Override
   public Result visit(Filter e) {
     final RelNode input = e.getInput();
-    Result x = visitChild(0, input);
+    Result x = visitRoot(input);
     parseCorrelTable(e, x);
     if (input instanceof Aggregate) {
       return super.visit(e);
diff --git a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixJoinRule.java b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixJoinRule.java
index 19a2984..885b662 100644
--- a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixJoinRule.java
+++ b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixJoinRule.java
@@ -34,7 +34,8 @@
   private final JdbcConvention out;
 
   public PhoenixJoinRule(RelTrait in, JdbcConvention out) {
-    super(LogicalJoin.class, in, out, "PhoenixJoinRule");
+    super(Config.INSTANCE
+      .withConversion(LogicalJoin.class, in, out, "PhoenixJoinRule"));
     this.out = out;
   }
 
@@ -49,7 +50,7 @@
       newInputs.add(input);
     }
     try {
-      JdbcJoin jdbcJoin = new JdbcJoin(
+      return new JdbcJoin(
           join.getCluster(),
           join.getTraitSet().replace(out),
           newInputs.get(0),
@@ -57,7 +58,6 @@
           join.getCondition(),
           join.getVariablesSet(),
           join.getJoinType());
-      return jdbcJoin;
     } catch (InvalidRelException e) {
       return null;
     }
diff --git a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixPrel.java b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixPrel.java
index 47a71e6..cb289dc 100644
--- a/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixPrel.java
+++ b/contrib/storage-phoenix/src/main/java/org/apache/drill/exec/store/phoenix/rules/PhoenixPrel.java
@@ -55,7 +55,7 @@
     convention = (PhoenixConvention) input.getTraitSet().getTrait(ConventionTraitDef.INSTANCE);
     final SqlDialect dialect = convention.getPlugin().getDialect();
     final JdbcImplementor jdbcImplementor = new PhoenixImplementor(dialect, (JavaTypeFactory) getCluster().getTypeFactory());
-    final JdbcImplementor.Result result = jdbcImplementor.visitChild(0, input.accept(SubsetRemover.INSTANCE));
+    final JdbcImplementor.Result result = jdbcImplementor.visitRoot(input.accept(SubsetRemover.INSTANCE));
     sql = result.asStatement().toSqlString(dialect).getSql();
     rowType = input.getRowType();
   }
diff --git a/docs/dev/Calcite.md b/docs/dev/Calcite.md
deleted file mode 100644
index a733047..0000000
--- a/docs/dev/Calcite.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# Drill-Calcite repository
-
-Currently, Drill uses Apache Calcite with additional changes, required for Drill.
-
-Repository with source code is placed [here](https://github.com/vvysotskyi/drill-calcite). For backward 
-compatibility, a couple of previous Drill Calcite branches were pushed into this repo.
-
-Drill committers who need write permissions to the repository, should notify its owner.
-
-# Process of updating Calcite version
-
-- Push required changes to the existing branch, or create new branch.
- *Though this repository contains Drill specific commits, it is forbidden to add additional specific commits which
-  were not merged to Apache Calcite master if it wasn't discussed in Drill community first.*
-- The last commit must be a commit which updates the version number.
-- Create and push tag for commit with the version update. Tag name should match the version, for example,
- `1.18.0-drill-r0`, `1.18.0-drill-r1`, `1.18.0-drill-r2` and so on.
-- Bump-up Drill Calcite version in Drill pom.xml file.
-- Update [wiki](https://github.com/vvysotskyi/drill-calcite/wiki) to contain relevant information after update
- (specific commits, current version, etc.)
-
-For more info about the update process or Drill-specific commits, please refer to
- [drill-calcite wiki](https://github.com/vvysotskyi/drill-calcite/wiki).
diff --git a/exec/java-exec/src/main/codegen/includes/compoundIdentifier.ftl b/exec/java-exec/src/main/codegen/includes/compoundIdentifier.ftl
index 1e8efb4..b091c5e 100644
--- a/exec/java-exec/src/main/codegen/includes/compoundIdentifier.ftl
+++ b/exec/java-exec/src/main/codegen/includes/compoundIdentifier.ftl
@@ -48,7 +48,7 @@
         (
           <LBRACKET>
           index = UnsignedIntLiteral()
-          <RBRACKET> 
+          <RBRACKET>
           {
               builder.addIndex(index, getPos());
           }
@@ -57,4 +57,54 @@
     {
       return builder.build();
     }
-}
\ No newline at end of file
+}
+
+/**
+ * Parses a compound identifier in the FROM clause.
+ */
+SqlIdentifier CompoundTableIdentifier() :
+{
+    final List<String> nameList = new ArrayList<String>();
+    final List<SqlParserPos> posList = new ArrayList<SqlParserPos>();
+}
+{
+    AddTableIdentifierSegment(nameList, posList)
+    (
+        LOOKAHEAD(2)
+        <DOT>
+          AddTableIdentifierSegment(nameList, posList)
+    )*
+    {
+        SqlParserPos pos = SqlParserPos.sum(posList);
+        return new SqlIdentifier(nameList, null, pos, posList);
+    }
+}
+
+/**
+ * Parses a comma-separated list of compound identifiers.
+ */
+void AddCompoundIdentifierTypeCommaList(List<SqlNode> list, List<SqlNode> extendList) :
+{
+}
+{
+    AddCompoundIdentifierType(list, extendList)
+    (<COMMA> AddCompoundIdentifierType(list, extendList))*
+}
+
+/**
+ * List of compound identifiers in parentheses. The position extends from the
+ * open parenthesis to the close parenthesis.
+ */
+Pair<SqlNodeList, SqlNodeList> ParenthesizedCompoundIdentifierList() :
+{
+    final Span s;
+    final List<SqlNode> list = new ArrayList<SqlNode>();
+    final List<SqlNode> extendList = new ArrayList<SqlNode>();
+}
+{
+    <LPAREN> { s = span(); }
+    AddCompoundIdentifierTypeCommaList(list, extendList)
+    <RPAREN> {
+        return Pair.of(new SqlNodeList(list, s.end(this)), new SqlNodeList(extendList, s.end(this)));
+    }
+}
diff --git a/exec/java-exec/src/main/codegen/includes/parserImpls.ftl b/exec/java-exec/src/main/codegen/includes/parserImpls.ftl
index 4efbeb4..bb3b3c7 100644
--- a/exec/java-exec/src/main/codegen/includes/parserImpls.ftl
+++ b/exec/java-exec/src/main/codegen/includes/parserImpls.ftl
@@ -34,6 +34,7 @@
     }
   }
 -->
+
 /**
  * Parses statement
  *   SHOW TABLES [{FROM | IN} db_name] [LIKE 'pattern' | WHERE expr]
@@ -166,9 +167,7 @@
     Pair<SqlNodeList, SqlNodeList> fieldList;
 }
 {
-    <LPAREN>
     fieldList = ParenthesizedCompoundIdentifierList()
-    <RPAREN>
     {
         for(SqlNode node : fieldList.left)
         {
@@ -688,25 +687,6 @@
    }
 }
 
-<#if !parser.includeCompoundIdentifier >
-/**
-* Parses a comma-separated list of simple identifiers.
-*/
-Pair<SqlNodeList, SqlNodeList> ParenthesizedCompoundIdentifierList() :
-{
-    List<SqlIdentifier> list = new ArrayList<SqlIdentifier>();
-    SqlIdentifier id;
-}
-{
-    id = CompoundIdentifier() {list.add(id);}
-    (
-   <COMMA> id = CompoundIdentifier() {list.add(id);}) *
-    {
-       return Pair.of(new SqlNodeList(list, getPos()), null);
-    }
-}
-</#if>
-
 /**
  * Parses a analyze statements:
  * <ul>
@@ -733,9 +713,7 @@
     (
         tableRef = CompoundIdentifier()
     |
-        <TABLE> { s = span(); } <LPAREN>
-        tableRef = TableFunctionCall(s.pos())
-        <RPAREN>
+        tableRef = TableFunctionCall()
     )
     [
         (
diff --git a/exec/java-exec/src/main/codegen/templates/ConvertToNullableHolder.java b/exec/java-exec/src/main/codegen/templates/ConvertToNullableHolder.java
index 875038c..cd59ff8 100644
--- a/exec/java-exec/src/main/codegen/templates/ConvertToNullableHolder.java
+++ b/exec/java-exec/src/main/codegen/templates/ConvertToNullableHolder.java
@@ -61,7 +61,7 @@
   <#elseif (minor.class == "IntervalDay")>
     output.days = input.days;
     output.milliseconds = input.milliseconds;
-  <#elseif minor.class.startsWith("Decimal")>
+  <#elseif minor.class.contains("Decimal")>
     output.scale = input.scale;
     output.precision = input.precision;
     <#if minor.class.startsWith("Decimal28") || minor.class.startsWith("Decimal38")>
@@ -78,6 +78,10 @@
     output.value = input.value;
   </#if>
 <#else>
+  <#if minor.class.contains("Decimal")>
+    output.scale = input.scale;
+    output.precision = input.precision;
+  </#if>
     output.start = input.start;
     output.end = input.end;
     output.buffer = input.buffer;
@@ -85,4 +89,4 @@
   }
 }
 </#list>
-</#list>
\ No newline at end of file
+</#list>
diff --git a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicSchema.java b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicSchema.java
index 2a7987b..0f4a8ca 100644
--- a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicSchema.java
@@ -53,6 +53,19 @@
   }
 
   @Override
+  public CalciteSchema add(String name, Schema schema) {
+    CalciteSchema calciteSchema =
+      new DynamicSchema(this, schema, name);
+    subSchemaMap.put(name, calciteSchema);
+    return calciteSchema;
+  }
+
+  @Override
+  protected TableEntry getImplicitTable(String tableName, boolean caseSensitive) {
+    return super.getImplicitTable(tableName, true);
+  }
+
+  @Override
   public void close() throws Exception {
     for (CalciteSchema cs : subSchemaMap.map().values()) {
       AutoCloseables.closeWithUserException(cs.plus().unwrap(AbstractSchema.class));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
index 88eca6d..93cb664 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
@@ -910,7 +910,7 @@
     }
 
     private JVar getDateTimeKey(PathSegment segment, ClassGenerator<?> generator, Class<?> javaClass, String methodName) {
-      String strValue = (String) segment.getOriginalValue();
+      String strValue = segment.getNameSegment().getPath();
 
       JClass dateUtilityClass = generator.getModel().ref(org.apache.drill.exec.expr.fn.impl.DateUtility.class);
       JExpression newKeyObject = dateUtilityClass.staticInvoke(methodName).arg(JExpr.lit(strValue));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
index b639de8..2671b8c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
@@ -490,6 +490,23 @@
     }
   }
 
+  @FunctionTemplate(name = "castTIMESTAMP", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
+  public static class CastTimeToTimeStamp implements DrillSimpleFunc {
+    @Param
+    TimeHolder in;
+    @Output
+    TimeStampHolder out;
+
+    @Override
+    public void setup() {
+    }
+
+    @Override
+    public void eval() {
+      out.value = in.value;
+    }
+  }
+
   @FunctionTemplate(name = "castTIME", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
   public static class CastDateToTime implements DrillSimpleFunc {
     @Param
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java
index 7bce81b..b3d2414 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java
@@ -162,7 +162,8 @@
           (org.apache.drill.exec.record.metadata.TupleMetadata) schemaHolder.obj;
 
       if (!resolvedSchema.isEquivalent(currentSchema)) {
-        throw new UnsupportedOperationException("merge_schema function does not support schema changes.");
+        schemaHolder.obj = org.apache.drill.exec.physical.impl.scan.v3.FixedReceiver.Builder
+          .mergeSchemas(currentSchema, resolvedSchema);
       }
     }
 
@@ -215,7 +216,8 @@
           (org.apache.drill.exec.record.metadata.TupleMetadata) schemaHolder.obj;
 
       if (!resolvedSchema.isEquivalent(currentSchema)) {
-        throw new UnsupportedOperationException("merge_schema function does not support schema changes.");
+        schemaHolder.obj = org.apache.drill.exec.physical.impl.scan.v3.FixedReceiver.Builder
+          .mergeSchemas(currentSchema, resolvedSchema);
       }
     }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
index 5499003..97bdde6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
@@ -23,6 +23,7 @@
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.sql.conversion.DrillViewExpander;
 import org.apache.drill.exec.proto.UserBitShared.UserCredentials;
 import org.apache.drill.exec.store.SchemaConfig.SchemaConfigInfoProvider;
 
@@ -78,6 +79,7 @@
   private final UserCredentials queryUserCredentials;
   private final ObjectIntHashMap<String> userTokens = new ObjectIntHashMap<>();
   private final boolean impersonationEnabled;
+  private DrillViewExpander viewExpander;
 
   public ViewExpansionContext(QueryContext queryContext) {
     this(queryContext.getConfig(), queryContext);
@@ -150,6 +152,14 @@
     logger.debug("Released view expansion token issued for user '{}'", viewOwner);
   }
 
+  public void setViewExpander(DrillViewExpander viewExpander) {
+    this.viewExpander = viewExpander;
+  }
+
+  public DrillViewExpander getViewExpander() {
+    return viewExpander;
+  }
+
   /**
    * Represents token issued to a view owner for expanding the view.
    */
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/DrillRelBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/DrillRelBuilder.java
index 454c458..ebc313e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/DrillRelBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/DrillRelBuilder.java
@@ -30,6 +30,7 @@
 import org.apache.calcite.util.Util;
 
 public class DrillRelBuilder extends RelBuilder {
+  public static final int DISABLE_MERGE_PROJECT = -1;
   private final RelFactories.FilterFactory filterFactory;
 
   protected DrillRelBuilder(Context context, RelOptCluster cluster, RelOptSchema relOptSchema) {
@@ -59,7 +60,11 @@
   /** Creates a {@link RelBuilderFactory}, a partially-created DrillRelBuilder.
    * Just add a {@link RelOptCluster} and a {@link RelOptSchema} */
   public static RelBuilderFactory proto(final Context context) {
-    return (cluster, schema) -> new DrillRelBuilder(context, cluster, schema);
+    Config conf = Util.first(context.unwrap(Config.class), Config.DEFAULT)
+      .withBloat(DISABLE_MERGE_PROJECT)
+      .withSimplify(false);
+    Context drillContext = Contexts.chain(Contexts.of(conf), context);
+    return (cluster, schema) -> new DrillRelBuilder(drillContext, cluster, schema);
   }
 
   /** Creates a {@link RelBuilderFactory} that uses a given set of factories. */
@@ -67,13 +72,4 @@
     return proto(Contexts.of(factories));
   }
 
-  /**
-   * Disables combining of consecutive {@link org.apache.calcite.rel.core.Project} nodes.
-   * See comments under CALCITE-2470 for details.
-   * @return false
-   */
-  @Override
-  protected boolean shouldMergeProject() {
-    return false;
-  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
index 4a4f385..524400d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
@@ -19,6 +19,7 @@
 
 import org.apache.drill.exec.planner.logical.ConvertMetadataAggregateToDirectScanRule;
 import org.apache.drill.exec.planner.logical.DrillDistinctJoinToSemiJoinRule;
+import org.apache.drill.exec.planner.logical.DrillReduceExpressionsRule;
 import org.apache.drill.exec.planner.physical.MetadataAggPrule;
 import org.apache.drill.exec.planner.physical.MetadataControllerPrule;
 import org.apache.drill.exec.planner.physical.MetadataHandlerPrule;
@@ -54,7 +55,7 @@
 import org.apache.drill.exec.planner.logical.DrillPushProjectPastJoinRule;
 import org.apache.drill.exec.planner.logical.DrillPushRowKeyJoinToScanRule;
 import org.apache.drill.exec.planner.logical.DrillReduceAggregatesRule;
-import org.apache.drill.exec.planner.logical.DrillReduceExpressionsRule;
+import org.apache.drill.exec.planner.logical.ReduceAndSimplifyExpressionsRules;
 import org.apache.drill.exec.planner.logical.DrillRelFactories;
 import org.apache.drill.exec.planner.logical.DrillScanRule;
 import org.apache.drill.exec.planner.logical.DrillSortRule;
@@ -232,13 +233,6 @@
     }
   },
 
-  PRE_LOGICAL_PLANNING("Planning with Hep planner only for rules, which are failed for Volcano planner") {
-    @Override
-    public RuleSet getRules (OptimizerRulesContext context, Collection<StoragePlugin> plugins) {
-      return PlannerPhase.getSetOpTransposeRules();
-    }
-  },
-
   TRANSITIVE_CLOSURE("Transitive closure") {
     @Override
     public RuleSet getRules(OptimizerRulesContext context, Collection<StoragePlugin> plugins) {
@@ -269,12 +263,18 @@
   }
 
   static final RelOptRule DRILL_JOIN_TO_MULTIJOIN_RULE =
-      new JoinToMultiJoinRule(DrillJoinRel.class, DrillRelFactories.LOGICAL_BUILDER);
+    JoinToMultiJoinRule.Config.DEFAULT
+      .withOperandFor(DrillJoinRel.class)
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
+
   static final RelOptRule DRILL_LOPT_OPTIMIZE_JOIN_RULE =
-      new LoptOptimizeJoinRule(DrillRelBuilder.proto(
-          DrillRelFactories.DRILL_LOGICAL_JOIN_FACTORY,
-          DrillRelFactories.DRILL_LOGICAL_PROJECT_FACTORY,
-          DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY));
+    LoptOptimizeJoinRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelBuilder.proto(
+        DrillRelFactories.DRILL_LOGICAL_JOIN_FACTORY,
+        DrillRelFactories.DRILL_LOGICAL_PROJECT_FACTORY,
+        DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY))
+      .toRule();
 
   /**
    * Get a list of logical rules that can be turned on or off by session/system options.
@@ -298,7 +298,10 @@
 
     if (ps.isConstantFoldingEnabled()) {
       // TODO - DRILL-2218
-      userConfigurableRules.add(RuleInstance.PROJECT_INSTANCE);
+      userConfigurableRules.add(ReduceAndSimplifyExpressionsRules.PROJECT_INSTANCE_DRILL);
+      userConfigurableRules.add(ReduceAndSimplifyExpressionsRules.FILTER_INSTANCE_DRILL);
+      userConfigurableRules.add(ReduceAndSimplifyExpressionsRules.CALC_INSTANCE_DRILL);
+      userConfigurableRules.add(DrillReduceExpressionsRule.PROJECT_INSTANCE_DRILL);
       userConfigurableRules.add(DrillReduceExpressionsRule.FILTER_INSTANCE_DRILL);
       userConfigurableRules.add(DrillReduceExpressionsRule.CALC_INSTANCE_DRILL);
     }
@@ -407,6 +410,8 @@
       basicRules.add(RuleInstance.JOIN_TO_SEMI_JOIN_RULE);
     }
 
+    basicRules.addAll(getSetOpTransposeRules());
+
     return RuleSets.ofList(basicRules.build());
   }
 
@@ -614,7 +619,6 @@
 
   /**
    *  Get an immutable list of rules to transpose SetOp(Union) operator with other operators.<p>
-   *  Note: Used by Hep planner only (failed for Volcano planner - CALCITE-1271)
    *
    * @return SetOp(Union) transpose rules
    */
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/RuleInstance.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/RuleInstance.java
index bbcd075..66628bb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/RuleInstance.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/RuleInstance.java
@@ -20,14 +20,7 @@
 import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.plan.RelOptRuleCall;
 import org.apache.calcite.plan.volcano.AbstractConverter;
-import org.apache.calcite.rel.core.Aggregate;
 import org.apache.calcite.rel.core.Join;
-import org.apache.calcite.rel.core.Project;
-import org.apache.calcite.rel.logical.LogicalAggregate;
-import org.apache.calcite.rel.logical.LogicalCalc;
-import org.apache.calcite.rel.logical.LogicalJoin;
-import org.apache.calcite.rel.logical.LogicalProject;
-import org.apache.calcite.rel.logical.LogicalUnion;
 import org.apache.calcite.rel.rules.AggregateExpandDistinctAggregatesRule;
 import org.apache.calcite.rel.rules.AggregateRemoveRule;
 import org.apache.calcite.rel.rules.FilterCorrelateRule;
@@ -55,16 +48,17 @@
  */
 public interface RuleInstance {
 
-  ReduceExpressionsRule PROJECT_INSTANCE =
-      new ReduceExpressionsRule.ProjectReduceExpressionsRule(LogicalProject.class, true,
-          DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule UNION_TO_DISTINCT_RULE =
+    UnionToDistinctRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  UnionToDistinctRule UNION_TO_DISTINCT_RULE =
-      new UnionToDistinctRule(LogicalUnion.class,
-          DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule SEMI_JOIN_PROJECT_RULE = new SemiJoinRule.ProjectToSemiJoinRule(
+    SemiJoinRule.ProjectToSemiJoinRule.ProjectToSemiJoinRuleConfig.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .withDescription("DrillSemiJoinRule:project")
+      .as(SemiJoinRule.ProjectToSemiJoinRule.ProjectToSemiJoinRuleConfig.class)) {
 
-  SemiJoinRule SEMI_JOIN_PROJECT_RULE = new SemiJoinRule.ProjectToSemiJoinRule(Project.class, Join.class, Aggregate.class,
-          DrillRelFactories.LOGICAL_BUILDER, "DrillSemiJoinRule:project") {
     public boolean matches(RelOptRuleCall call) {
       Preconditions.checkArgument(call.rel(1) instanceof Join);
       Join join = call.rel(1);
@@ -72,74 +66,105 @@
     }
   };
 
-  SemiJoinRule JOIN_TO_SEMI_JOIN_RULE = new SemiJoinRule.JoinToSemiJoinRule(Join.class, Aggregate.class,
-    DrillRelFactories.LOGICAL_BUILDER, "DrillJoinToSemiJoinRule") {
+  SemiJoinRule JOIN_TO_SEMI_JOIN_RULE = new SemiJoinRule.JoinToSemiJoinRule(
+    SemiJoinRule.JoinToSemiJoinRule.JoinToSemiJoinRuleConfig.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .withDescription("DrillJoinToSemiJoinRule")
+      .as(SemiJoinRule.JoinToSemiJoinRule.JoinToSemiJoinRuleConfig.class)) {
     public boolean matches(RelOptRuleCall call) {
       Join join = call.rel(0);
       return !(join.getCondition().isAlwaysTrue() || join.getCondition().isAlwaysFalse());
     }
   };
 
-  JoinPushExpressionsRule JOIN_PUSH_EXPRESSIONS_RULE =
-      new JoinPushExpressionsRule(Join.class,
-          DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule JOIN_PUSH_EXPRESSIONS_RULE =
+    JoinPushExpressionsRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  FilterMergeRule FILTER_MERGE_RULE =
-      new FilterMergeRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule FILTER_MERGE_RULE =
+    FilterMergeRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  FilterMergeRule DRILL_FILTER_MERGE_RULE =
-      new FilterMergeRule(DrillRelBuilder.proto(DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY));
+  RelOptRule DRILL_FILTER_MERGE_RULE =
+    FilterMergeRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelBuilder.proto(DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY))
+      .toRule();
 
-  FilterCorrelateRule FILTER_CORRELATE_RULE =
-      new FilterCorrelateRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule FILTER_CORRELATE_RULE =
+    FilterCorrelateRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  AggregateRemoveRule AGGREGATE_REMOVE_RULE =
-      new AggregateRemoveRule(LogicalAggregate.class, DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule AGGREGATE_REMOVE_RULE =
+    AggregateRemoveRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  AggregateExpandDistinctAggregatesRule AGGREGATE_EXPAND_DISTINCT_AGGREGATES_RULE =
-      new AggregateExpandDistinctAggregatesRule(LogicalAggregate.class, false,
-          DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule AGGREGATE_EXPAND_DISTINCT_AGGREGATES_RULE =
+    AggregateExpandDistinctAggregatesRule.Config.JOIN
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
   /**
    * Instance of the rule that works on logical joins only, and pushes to the
    * right.
    */
   RelOptRule JOIN_PUSH_THROUGH_JOIN_RULE_RIGHT =
-      new JoinPushThroughJoinRule("JoinPushThroughJoinRule:right", true,
-          LogicalJoin.class, DrillRelFactories.LOGICAL_BUILDER);
+    JoinPushThroughJoinRule.Config.RIGHT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
   /**
    * Instance of the rule that works on logical joins only, and pushes to the
    * left.
    */
   RelOptRule JOIN_PUSH_THROUGH_JOIN_RULE_LEFT =
-      new JoinPushThroughJoinRule("JoinPushThroughJoinRule:left", false,
-          LogicalJoin.class, DrillRelFactories.LOGICAL_BUILDER);
+    JoinPushThroughJoinRule.Config.LEFT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  ReduceExpressionsRule CALC_INSTANCE =
-      new ReduceExpressionsRule.CalcReduceExpressionsRule(LogicalCalc.class, true,
-          DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule CALC_INSTANCE =
+    ReduceExpressionsRule.CalcReduceExpressionsRule.CalcReduceExpressionsRuleConfig.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  FilterSetOpTransposeRule FILTER_SET_OP_TRANSPOSE_RULE =
-      new FilterSetOpTransposeRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule FILTER_SET_OP_TRANSPOSE_RULE =
+    FilterSetOpTransposeRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  ProjectSetOpTransposeRule PROJECT_SET_OP_TRANSPOSE_RULE =
-      new ProjectSetOpTransposeRule(DrillConditions.PRESERVE_ITEM, DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule PROJECT_SET_OP_TRANSPOSE_RULE =
+    ProjectSetOpTransposeRule.Config.DEFAULT
+      .withPreserveExprCondition(DrillConditions.PRESERVE_ITEM)
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  ProjectRemoveRule PROJECT_REMOVE_RULE =
-      new ProjectRemoveRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule PROJECT_REMOVE_RULE =
+    ProjectRemoveRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  ProjectToWindowRule PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW_RULE =
-      new ProjectToWindowRule.ProjectToLogicalProjectAndWindowRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule PROJECT_TO_LOGICAL_PROJECT_AND_WINDOW_RULE =
+    ProjectToWindowRule.ProjectToLogicalProjectAndWindowRule.ProjectToLogicalProjectAndWindowRuleConfig.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  SortRemoveRule SORT_REMOVE_RULE =
-      new SortRemoveRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule SORT_REMOVE_RULE =
+    SortRemoveRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  ProjectWindowTransposeRule PROJECT_WINDOW_TRANSPOSE_RULE =
-      new ProjectWindowTransposeRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule PROJECT_WINDOW_TRANSPOSE_RULE =
+    ProjectWindowTransposeRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  AbstractConverter.ExpandConversionRule EXPAND_CONVERSION_RULE =
-      new AbstractConverter.ExpandConversionRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule EXPAND_CONVERSION_RULE =
+    AbstractConverter.ExpandConversionRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
   /**
    * Instance of the rule that infers predicates from on a
@@ -147,19 +172,29 @@
    * {@link org.apache.calcite.rel.core.Filter}s if those predicates can be pushed
    * to its inputs.
    */
-  JoinPushTransitivePredicatesRule DRILL_JOIN_PUSH_TRANSITIVE_PREDICATES_RULE =
-      new JoinPushTransitivePredicatesRule(Join.class, DrillRelBuilder.proto(
-          DrillRelFactories.DRILL_LOGICAL_JOIN_FACTORY, DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY));
+  RelOptRule DRILL_JOIN_PUSH_TRANSITIVE_PREDICATES_RULE =
+    JoinPushTransitivePredicatesRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelBuilder.proto(
+        DrillRelFactories.DRILL_LOGICAL_JOIN_FACTORY, DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY))
+      .toRule();
 
-  FilterRemoveIsNotDistinctFromRule REMOVE_IS_NOT_DISTINCT_FROM_RULE =
-      new FilterRemoveIsNotDistinctFromRule(DrillRelBuilder.proto(DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY));
+  RelOptRule REMOVE_IS_NOT_DISTINCT_FROM_RULE =
+    FilterRemoveIsNotDistinctFromRule.Config.DEFAULT
+      .withRelBuilderFactory(DrillRelBuilder.proto(DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY))
+      .toRule();
 
-  SubQueryRemoveRule SUB_QUERY_FILTER_REMOVE_RULE =
-      new SubQueryRemoveRule.SubQueryFilterRemoveRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule SUB_QUERY_FILTER_REMOVE_RULE =
+      SubQueryRemoveRule.Config.FILTER
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .toRule();
 
-  SubQueryRemoveRule SUB_QUERY_PROJECT_REMOVE_RULE =
-      new SubQueryRemoveRule.SubQueryProjectRemoveRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule SUB_QUERY_PROJECT_REMOVE_RULE =
+    SubQueryRemoveRule.Config.PROJECT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
-  SubQueryRemoveRule SUB_QUERY_JOIN_REMOVE_RULE =
-      new SubQueryRemoveRule.SubQueryJoinRemoveRule(DrillRelFactories.LOGICAL_BUILDER);
+  RelOptRule SUB_QUERY_JOIN_REMOVE_RULE =
+    SubQueryRemoveRule.Config.JOIN
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillFilterRelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillFilterRelBase.java
index 7125815..abd129b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillFilterRelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillFilterRelBase.java
@@ -55,7 +55,8 @@
 
     // save the number of conjuncts that make up the filter condition such
     // that repeated calls to the costing function can use the saved copy
-    conjunctions = RelOptUtil.conjunctions(condition);
+    conjunctions = RelOptUtil.conjunctions(
+      RexUtil.expandSearch(cluster.getRexBuilder(), null, condition));
     numConjuncts = conjunctions.size();
     // assert numConjuncts >= 1;
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java
index 6150bf3..03dd487 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillJoinRelBase.java
@@ -61,7 +61,7 @@
   public DrillJoinRelBase(RelOptCluster cluster, RelTraitSet traits, RelNode left, RelNode right, RexNode condition,
       JoinRelType joinType) {
     super(cluster, traits, left, right, condition,
-        CorrelationId.setOf(Collections.<String> emptySet()), joinType);
+        CorrelationId.setOf(Collections.emptySet()), joinType);
     this.joinRowFactor = PrelUtil.getPlannerSettings(cluster.getPlanner()).getRowCountEstimateFactor();
   }
 
@@ -104,12 +104,12 @@
       return joinRowFactor * this.getLeft().estimateRowCount(mq) * this.getRight().estimateRowCount(mq);
     }
 
-    LogicalJoin jr = LogicalJoin.create(this.getLeft(), this.getRight(), this.getCondition(),
-            this.getVariablesSet(), this.getJoinType());
+    LogicalJoin jr = LogicalJoin.create(this.getLeft(), this.getRight(), Collections.emptyList(),
+        this.getCondition(), this.getVariablesSet(), this.getJoinType());
 
     if (!DrillRelOptUtil.guessRows(this)         //Statistics present for left and right side of the join
         && jr.getJoinType() == JoinRelType.INNER) {
-      List<Pair<Integer, Integer>> joinConditions = DrillRelOptUtil.analyzeSimpleEquiJoin((Join)jr);
+      List<Pair<Integer, Integer>> joinConditions = DrillRelOptUtil.analyzeSimpleEquiJoin(jr);
       if (joinConditions.size() > 0) {
         List<Integer> leftSide =  new ArrayList<>();
         List<Integer> rightSide = new ArrayList<>();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillUnionRelBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillUnionRelBase.java
index 39f7ba4..968cf54 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillUnionRelBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillUnionRelBase.java
@@ -51,7 +51,6 @@
     return true;
   }
 
-  @Override
   public boolean isDistinct() {
     return !this.all;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillDefaultRelMetadataProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillDefaultRelMetadataProvider.java
index d34647c..fad0018 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillDefaultRelMetadataProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillDefaultRelMetadataProvider.java
@@ -31,5 +31,6 @@
           DrillRelMdDistinctRowCount.SOURCE,
           DrillRelMdSelectivity.SOURCE,
           DrillRelMdMaxRowCount.SOURCE,
+          DrillRelMdPredicates.SOURCE,
           DefaultRelMetadataProvider.INSTANCE));
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdDistinctRowCount.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdDistinctRowCount.java
index 9f2901b..25c2fe2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdDistinctRowCount.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdDistinctRowCount.java
@@ -218,7 +218,8 @@
       List<RexNode> leftFilters = new ArrayList<>();
       List<RexNode> rightFilters = new ArrayList<>();
       List<RexNode> joinFilters = new ArrayList();
-      List<RexNode> predList = RelOptUtil.conjunctions(predicate);
+      List<RexNode> predList = RelOptUtil.conjunctions(
+        RexUtil.expandSearch(joinRel.getCluster().getRexBuilder(), null, predicate));
       RelOptUtil.classifyFilters(joinRel, predList, joinType, joinType == JoinRelType.INNER,
           !joinType.generatesNullsOnLeft(), !joinType.generatesNullsOnRight(), joinFilters,
               leftFilters, rightFilters);
@@ -238,9 +239,10 @@
      * B) Otherwise, based on independence assumption CNDV(sX) = NDV(sX)
      */
     Set<ImmutableBitSet> joinFiltersSet = new HashSet<>();
-    for (RexNode filter : RelOptUtil.conjunctions(joinRel.getCondition())) {
+    for (RexNode filter : RelOptUtil.conjunctions(
+      RexUtil.expandSearch(joinRel.getCluster().getRexBuilder(), null, joinRel.getCondition()))) {
       final RelOptUtil.InputFinder inputFinder = RelOptUtil.InputFinder.analyze(filter);
-      joinFiltersSet.add(inputFinder.inputBitSet.build());
+      joinFiltersSet.add(inputFinder.build());
     }
     for (int idx = 0; idx < groupKey.length(); idx++) {
       if (groupKey.get(idx)) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdPredicates.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdPredicates.java
new file mode 100644
index 0000000..c7bbc7c
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdPredicates.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.cost;
+
+import org.apache.calcite.plan.RelOptPredicateList;
+import org.apache.calcite.plan.RelOptUtil;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.Filter;
+import org.apache.calcite.rel.metadata.BuiltInMetadata;
+import org.apache.calcite.rel.metadata.ReflectiveRelMetadataProvider;
+import org.apache.calcite.rel.metadata.RelMdPredicates;
+import org.apache.calcite.rel.metadata.RelMetadataProvider;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
+import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.rex.RexUtil;
+import org.apache.calcite.util.Util;
+
+public class DrillRelMdPredicates extends RelMdPredicates {
+
+  public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider
+    .reflectiveSource(new DrillRelMdPredicates(), BuiltInMetadata.Predicates.Handler.class);
+
+  /**
+   * Add the Filter condition to the pulledPredicates list from the input.
+   */
+  public RelOptPredicateList getPredicates(Filter filter, RelMetadataQuery mq) {
+    final RelNode input = filter.getInput();
+    final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
+    final RelOptPredicateList inputInfo = mq.getPulledUpPredicates(input);
+
+    return Util.first(inputInfo, RelOptPredicateList.EMPTY)
+      .union(rexBuilder,
+        RelOptPredicateList.of(rexBuilder,
+          RexUtil.retainDeterministic(
+            RelOptUtil.conjunctions(RexUtil.expandSearch(rexBuilder, null, filter.getCondition())))));
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdSelectivity.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdSelectivity.java
index 95921e0..5732f91 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdSelectivity.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/cost/DrillRelMdSelectivity.java
@@ -169,7 +169,8 @@
       return sel;
     }
 
-    List<RexNode> conjuncts1 = RelOptUtil.conjunctions(predicate);
+    List<RexNode> conjuncts1 = RelOptUtil.conjunctions(
+      RexUtil.expandSearch(rexBuilder, null, predicate));
 
     // a Set that holds range predicates that are combined based on whether they are defined on the same column
     Set<RexNode> combinedRangePredicates = new HashSet<>();
@@ -380,7 +381,8 @@
       List<RexNode> leftFilters = new ArrayList<>();
       List<RexNode> rightFilters = new ArrayList<>();
       List<RexNode> joinFilters = new ArrayList<>();
-      List<RexNode> predList = RelOptUtil.conjunctions(predicate);
+      List<RexNode> predList = RelOptUtil.conjunctions(
+        RexUtil.expandSearch(rexBuilder, null, predicate));
 
       RelOptUtil.classifyFilters(
           rel,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexCallContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexCallContext.java
index 45251c6..54b3fbb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexCallContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexCallContext.java
@@ -36,8 +36,6 @@
 
   DbGroupScan getGroupScan();
 
-  List<RelCollation> getCollationList();
-
   RelCollation getCollation();
 
   boolean hasLowerProject();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexLogicalPlanCallContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexLogicalPlanCallContext.java
index 3a6ea83..697150f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexLogicalPlanCallContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexLogicalPlanCallContext.java
@@ -98,12 +98,6 @@
     return scan;
   }
 
-  public List<RelCollation> getCollationList() {
-    if (sort != null) {
-      return sort.getCollationList();
-    }
-    return null;
-  }
 
   public RelCollation getCollation() {
     if (sort != null) {
@@ -174,5 +168,5 @@
 
   public RelNode getExchange() { return null; }
 
-  public List<DistributionField> getDistributionFields() { return Collections.EMPTY_LIST; }
+  public List<DistributionField> getDistributionFields() { return Collections.emptyList(); }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexPhysicalPlanCallContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexPhysicalPlanCallContext.java
index 91ff02c..0e842fd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexPhysicalPlanCallContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexPhysicalPlanCallContext.java
@@ -104,14 +104,6 @@
     return scan;
   }
 
-
-  public List<RelCollation> getCollationList() {
-    if (sort != null) {
-      return sort.getCollationList();
-    }
-    return null;
-  }
-
   public RelCollation getCollation() {
     if (sort != null) {
       return sort.getCollation();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexSelector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexSelector.java
index d3a0b13..b071cb6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexSelector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/IndexSelector.java
@@ -126,10 +126,7 @@
   }
 
   private boolean requiredCollation() {
-    if (indexContext.getCollationList() != null && indexContext.getCollationList().size() > 0) {
-      return true;
-    }
-    return false;
+    return indexContext.getCollation() != null;
   }
 
   private boolean buildAndCheckCollation(IndexProperties indexProps) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
index 2e1c854..09045f5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
@@ -76,13 +76,16 @@
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.planner.sql.TypeInferenceUtils;
 import org.apache.drill.exec.vector.DateUtilities;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
-import java.util.Calendar;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.ZoneOffset;
 import java.util.List;
+import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
 
 public class DrillConstExecutor implements RexExecutor {
@@ -172,13 +175,13 @@
           case INT: {
             int value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
                 ((NullableIntHolder) valueHolder).value : ((IntHolder) valueHolder).value;
-            return rexBuilder.makeLiteral(new BigDecimal(value),
+            return rexBuilder.makeLiteral(BigDecimal.valueOf(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTEGER, newCall.getType().isNullable()), false);
           }
           case BIGINT: {
             long value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
                 ((NullableBigIntHolder) valueHolder).value : ((BigIntHolder) valueHolder).value;
-            return rexBuilder.makeLiteral(new BigDecimal(value),
+            return rexBuilder.makeLiteral(BigDecimal.valueOf(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.BIGINT, newCall.getType().isNullable()), false);
           }
           case FLOAT4: {
@@ -191,7 +194,7 @@
               return rexBuilder.makeLiteral(Float.toString(value));
             }
 
-            return rexBuilder.makeLiteral(new BigDecimal(value),
+            return rexBuilder.makeLiteral(BigDecimal.valueOf(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.FLOAT, newCall.getType().isNullable()), false);
           }
           case FLOAT8: {
@@ -204,7 +207,7 @@
               return rexBuilder.makeLiteral(Double.toString(value));
             }
 
-            return rexBuilder.makeLiteral(new BigDecimal(value),
+            return rexBuilder.makeLiteral(BigDecimal.valueOf(value),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DOUBLE, newCall.getType().isNullable()), false);
           }
           case VARCHAR: {
@@ -222,10 +225,12 @@
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.BOOLEAN, newCall.getType().isNullable()), false);
           }
           case DATE: {
-            Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new DateTime(((NullableDateHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) :
-                new DateTime(((DateHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
-            return rexBuilder.makeLiteral(DateString.fromCalendarFields(value),
+            long millis = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableDateHolder) valueHolder).value :
+                ((DateHolder) valueHolder).value;
+            long days = millis / TimeUnit.DAYS.toMillis(1);
+            LocalDate localDate = LocalDate.ofEpochDay(days);
+            return rexBuilder.makeLiteral(new DateString(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth()),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DATE, newCall.getType().isNullable()), false);
           }
           case DECIMAL9: {
@@ -327,25 +332,27 @@
                 false);
           }
           case TIME: {
-            Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new DateTime(((NullableTimeHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) :
-                new DateTime(((TimeHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
+            int value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableTimeHolder) valueHolder).value :
+                ((TimeHolder) valueHolder).value;
+            LocalTime time = LocalDateTime.ofInstant(Instant.ofEpochMilli(value), ZoneOffset.UTC).toLocalTime();
             RelDataType type = typeFactory.createSqlType(SqlTypeName.TIME, newCall.getType().getPrecision());
             RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
-            return rexBuilder.makeLiteral(TimeString.fromCalendarFields(value), typeWithNullability, false);
+            TimeString timeString = new TimeString(time.getHour(), time.getMinute(), time.getSecond()).withNanos(time.getNano());
+            return rexBuilder.makeLiteral(timeString, typeWithNullability, false);
           }
           case TIMESTAMP: {
-            Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new DateTime(((NullableTimeStampHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) :
-                new DateTime(((TimeStampHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
+            long value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
+                ((NullableTimeStampHolder) valueHolder).value :
+                ((TimeStampHolder) valueHolder).value;
             RelDataType type = typeFactory.createSqlType(SqlTypeName.TIMESTAMP, newCall.getType().getPrecision());
             RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
-            return rexBuilder.makeLiteral(TimestampString.fromCalendarFields(value), typeWithNullability, false);
+            return rexBuilder.makeLiteral(TimestampString.fromMillisSinceEpoch(value), typeWithNullability, false);
           }
           case INTERVALYEAR: {
             BigDecimal value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ?
-                new BigDecimal(((NullableIntervalYearHolder) valueHolder).value) :
-                new BigDecimal(((IntervalYearHolder) valueHolder).value);
+                BigDecimal.valueOf(((NullableIntervalYearHolder) valueHolder).value) :
+                BigDecimal.valueOf(((IntervalYearHolder) valueHolder).value);
             return rexBuilder.makeLiteral(value,
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_YEAR_MONTH, newCall.getType().isNullable()), false);
           }
@@ -362,7 +369,7 @@
               milliseconds = intervalDayOut.milliseconds;
             }
             return rexBuilder.makeLiteral(
-                new BigDecimal(days * (long) DateUtilities.daysToStandardMillis + milliseconds),
+                BigDecimal.valueOf(days * (long) DateUtilities.daysToStandardMillis + milliseconds),
                 TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_DAY,
                     newCall.getType().isNullable()), false);
           }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterAggregateTransposeRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterAggregateTransposeRule.java
index 6bb409e..c3be0b6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterAggregateTransposeRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterAggregateTransposeRule.java
@@ -39,7 +39,8 @@
       DrillRelBuilder.proto(DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY, DrillRelFactories.DRILL_LOGICAL_AGGREGATE_FACTORY));
 
   private DrillFilterAggregateTransposeRule(RelBuilderFactory relBuilderFactory) {
-    super(Filter.class, relBuilderFactory, Aggregate.class);
+    super(Config.DEFAULT.withRelBuilderFactory(relBuilderFactory)
+      .as(Config.class));
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterJoinRules.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterJoinRules.java
index 5e24103..06dd3db 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterJoinRules.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterJoinRules.java
@@ -17,17 +17,31 @@
  */
 package org.apache.drill.exec.planner.logical;
 
+import org.apache.calcite.plan.RelOptCluster;
+import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.plan.RelOptUtil;
-import org.apache.calcite.rel.core.Join;
+import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.calcite.rel.rules.FilterJoinRule;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.rex.RexCall;
 import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.rex.RexUtil;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.util.ImmutableBitSet;
 import org.apache.drill.exec.planner.DrillRelBuilder;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
+import static java.util.Objects.requireNonNull;
+import static org.apache.calcite.plan.RelOptUtil.collapseExpandedIsNotDistinctFromExpr;
+
 public class DrillFilterJoinRules {
   /** Predicate that always returns true for any filter in OUTER join, and only true
    * for EQUAL or IS_DISTINCT_FROM over RexInputRef in INNER join. With this predicate,
@@ -37,26 +51,31 @@
    *           OUTER JOIN,   Keep any filter in JOIN.
   */
   public static final FilterJoinRule.Predicate EQUAL_IS_DISTINCT_FROM =
-      new FilterJoinRule.Predicate() {
-        public boolean apply(Join join, JoinRelType joinType, RexNode exp) {
-          if (joinType != JoinRelType.INNER) {
-            return true;  // In OUTER join, we could not pull-up the filter.
-                          // All we can do is keep the filter with JOIN, and
-                          // then decide whether the filter could be pushed down
-                          // into LEFT/RIGHT.
-          }
+    (join, joinType, exp) -> {
+      if (joinType != JoinRelType.INNER) {
+        return true;  // In OUTER join, we could not pull-up the filter.
+                      // All we can do is keep the filter with JOIN, and
+                      // then decide whether the filter could be pushed down
+                      // into LEFT/RIGHT.
+      }
 
-          List<RexNode> tmpLeftKeys = new ArrayList<>();
-          List<RexNode> tmpRightKeys = new ArrayList<>();
-          List<RelDataTypeField> sysFields = new ArrayList<>();
-          List<Integer> filterNulls = new ArrayList<>();
+      List<RexNode> tmpLeftKeys = new ArrayList<>();
+      List<RexNode> tmpRightKeys = new ArrayList<>();
+      List<RelDataTypeField> sysFields = new ArrayList<>();
+      List<Integer> filterNulls = new ArrayList<>();
 
-          RexNode remaining = RelOptUtil.splitJoinCondition(sysFields, join.getLeft(), join.getRight(),
-              exp, tmpLeftKeys, tmpRightKeys, filterNulls, null);
+      List<RelNode> inputs = Arrays.asList(join.getLeft(), join.getRight());
+      final List<RexNode> nonEquiList = new ArrayList<>();
 
-          return remaining.isAlwaysTrue();
-        }
-      };
+      splitJoinCondition(sysFields, inputs, exp, Arrays.asList(tmpLeftKeys, tmpRightKeys),
+        filterNulls, null, nonEquiList);
+
+      // Convert the remainders into a list that are AND'ed together.
+      RexNode remaining = RexUtil.composeConjunction(
+        inputs.get(0).getCluster().getRexBuilder(), nonEquiList);
+
+      return remaining.isAlwaysTrue();
+    };
 
   /** Predicate that always returns true for any filter in OUTER join, and only true
    * for strict EQUAL or IS_DISTINCT_FROM conditions (without any mathematical operations) over RexInputRef in INNER join.
@@ -66,36 +85,277 @@
    *           OUTER JOIN,   Keep any filter in JOIN.
   */
   public static final FilterJoinRule.Predicate STRICT_EQUAL_IS_DISTINCT_FROM =
-      new FilterJoinRule.Predicate() {
-        public boolean apply(Join join, JoinRelType joinType, RexNode exp) {
-          if (joinType != JoinRelType.INNER) {
-            return true;
-          }
+    (join, joinType, exp) -> {
+      if (joinType != JoinRelType.INNER) {
+        return true;
+      }
 
-          List<Integer> tmpLeftKeys = new ArrayList<>();
-          List<Integer> tmpRightKeys = new ArrayList<>();
-          List<Boolean> filterNulls = new ArrayList<>();
+      List<Integer> tmpLeftKeys = new ArrayList<>();
+      List<Integer> tmpRightKeys = new ArrayList<>();
+      List<Boolean> filterNulls = new ArrayList<>();
 
-          RexNode remaining =
-              RelOptUtil.splitJoinCondition(join.getLeft(), join.getRight(), exp, tmpLeftKeys, tmpRightKeys, filterNulls);
+      RexNode remaining =
+          RelOptUtil.splitJoinCondition(join.getLeft(), join.getRight(), exp, tmpLeftKeys, tmpRightKeys, filterNulls);
 
-          return remaining.isAlwaysTrue();
-        }
-      };
-
+      return remaining.isAlwaysTrue();
+    };
 
   /** Rule that pushes predicates from a Filter into the Join below them. */
-  public static final FilterJoinRule FILTER_INTO_JOIN =
-      new FilterJoinRule.FilterIntoJoinRule(true, DrillRelFactories.LOGICAL_BUILDER, EQUAL_IS_DISTINCT_FROM);
+  public static final RelOptRule FILTER_INTO_JOIN =
+    FilterJoinRule.FilterIntoJoinRule.FilterIntoJoinRuleConfig.DEFAULT
+      .withPredicate(EQUAL_IS_DISTINCT_FROM)
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
   /** The same as above, but with Drill's operators. */
-  public static final FilterJoinRule DRILL_FILTER_INTO_JOIN =
-      new FilterJoinRule.FilterIntoJoinRule(true,
-          DrillRelBuilder.proto(DrillRelFactories.DRILL_LOGICAL_PROJECT_FACTORY,
-              DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY), STRICT_EQUAL_IS_DISTINCT_FROM);
+  public static final RelOptRule DRILL_FILTER_INTO_JOIN =
+    FilterJoinRule.FilterIntoJoinRule.FilterIntoJoinRuleConfig.DEFAULT
+      .withPredicate(STRICT_EQUAL_IS_DISTINCT_FROM)
+      .withRelBuilderFactory(
+        DrillRelBuilder.proto(DrillRelFactories.DRILL_LOGICAL_PROJECT_FACTORY,
+          DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY))
+      .toRule();
 
   /** Rule that pushes predicates in a Join into the inputs to the join. */
-  public static final FilterJoinRule JOIN_PUSH_CONDITION =
-      new FilterJoinRule.JoinConditionPushRule(DrillRelFactories.LOGICAL_BUILDER, EQUAL_IS_DISTINCT_FROM);
+  public static final RelOptRule JOIN_PUSH_CONDITION =
+    FilterJoinRule.JoinConditionPushRule.JoinConditionPushRuleConfig.DEFAULT
+      .withPredicate(EQUAL_IS_DISTINCT_FROM)
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
 
+  private static void splitJoinCondition(
+    List<RelDataTypeField> sysFieldList,
+    List<RelNode> inputs,
+    RexNode condition,
+    List<List<RexNode>> joinKeys,
+    List<Integer> filterNulls,
+    List<SqlOperator> rangeOp,
+    List<RexNode> nonEquiList) {
+    final int sysFieldCount = sysFieldList.size();
+    final RelOptCluster cluster = inputs.get(0).getCluster();
+    final RexBuilder rexBuilder = cluster.getRexBuilder();
+    final RelDataTypeFactory typeFactory = cluster.getTypeFactory();
+
+    final ImmutableBitSet[] inputsRange = new ImmutableBitSet[inputs.size()];
+    int totalFieldCount = 0;
+    for (int i = 0; i < inputs.size(); i++) {
+      final int firstField = totalFieldCount + sysFieldCount;
+      totalFieldCount = firstField + inputs.get(i).getRowType().getFieldCount();
+      inputsRange[i] = ImmutableBitSet.range(firstField, totalFieldCount);
+    }
+
+    // adjustment array
+    int[] adjustments = new int[totalFieldCount];
+    for (int i = 0; i < inputs.size(); i++) {
+      final int adjustment = inputsRange[i].nextSetBit(0);
+      for (int j = adjustment; j < inputsRange[i].length(); j++) {
+        adjustments[j] = -adjustment;
+      }
+    }
+
+    if (condition.getKind() == SqlKind.AND) {
+      for (RexNode operand : ((RexCall) condition).getOperands()) {
+        splitJoinCondition(
+          sysFieldList,
+          inputs,
+          operand,
+          joinKeys,
+          filterNulls,
+          rangeOp,
+          nonEquiList);
+      }
+      return;
+    }
+
+    if (condition instanceof RexCall) {
+      RexNode leftKey = null;
+      RexNode rightKey = null;
+      int leftInput = 0;
+      int rightInput = 0;
+      List<RelDataTypeField> leftFields = null;
+      List<RelDataTypeField> rightFields = null;
+      boolean reverse = false;
+
+      final RexCall call =
+        collapseExpandedIsNotDistinctFromExpr((RexCall) condition, rexBuilder);
+      SqlKind kind = call.getKind();
+
+      // Only consider range operators if we haven't already seen one
+      if ((kind == SqlKind.EQUALS)
+        || (filterNulls != null
+        && kind == SqlKind.IS_NOT_DISTINCT_FROM)
+        || (rangeOp != null
+        && rangeOp.isEmpty()
+        && (kind == SqlKind.GREATER_THAN
+        || kind == SqlKind.GREATER_THAN_OR_EQUAL
+        || kind == SqlKind.LESS_THAN
+        || kind == SqlKind.LESS_THAN_OR_EQUAL))) {
+        final List<RexNode> operands = call.getOperands();
+        RexNode op0 = operands.get(0);
+        RexNode op1 = operands.get(1);
+
+        final ImmutableBitSet projRefs0 = RelOptUtil.InputFinder.bits(op0);
+        final ImmutableBitSet projRefs1 = RelOptUtil.InputFinder.bits(op1);
+
+        boolean foundBothInputs = false;
+        for (int i = 0; i < inputs.size() && !foundBothInputs; i++) {
+          if (projRefs0.intersects(inputsRange[i])
+            && projRefs0.union(inputsRange[i]).equals(inputsRange[i])) {
+            if (leftKey == null) {
+              leftKey = op0;
+              leftInput = i;
+              leftFields = inputs.get(leftInput).getRowType().getFieldList();
+            } else {
+              rightKey = op0;
+              rightInput = i;
+              rightFields = inputs.get(rightInput).getRowType().getFieldList();
+              reverse = true;
+              foundBothInputs = true;
+            }
+          } else if (projRefs1.intersects(inputsRange[i])
+            && projRefs1.union(inputsRange[i]).equals(inputsRange[i])) {
+            if (leftKey == null) {
+              leftKey = op1;
+              leftInput = i;
+              leftFields = inputs.get(leftInput).getRowType().getFieldList();
+            } else {
+              rightKey = op1;
+              rightInput = i;
+              rightFields = inputs.get(rightInput).getRowType().getFieldList();
+              foundBothInputs = true;
+            }
+          }
+        }
+
+        if ((leftKey != null) && (rightKey != null)) {
+          // replace right Key input ref
+          rightKey =
+            rightKey.accept(
+              new RelOptUtil.RexInputConverter(
+                rexBuilder,
+                rightFields,
+                rightFields,
+                adjustments));
+
+          // left key only needs to be adjusted if there are system
+          // fields, but do it for uniformity
+          leftKey =
+            leftKey.accept(
+              new RelOptUtil.RexInputConverter(
+                rexBuilder,
+                leftFields,
+                leftFields,
+                adjustments));
+
+          RelDataType leftKeyType = leftKey.getType();
+          RelDataType rightKeyType = rightKey.getType();
+
+          if (leftKeyType != rightKeyType) {
+            // perform casting
+            RelDataType targetKeyType =
+              typeFactory.leastRestrictive(
+                Arrays.asList(leftKeyType, rightKeyType));
+
+            if (targetKeyType == null) {
+              throw new AssertionError("Cannot find common type for join keys "
+                + leftKey + " (type " + leftKeyType + ") and " + rightKey
+                + " (type " + rightKeyType + ")");
+            }
+
+            if (leftKeyType != targetKeyType) {
+              leftKey =
+                rexBuilder.makeCast(targetKeyType, leftKey);
+            }
+
+            if (rightKeyType != targetKeyType) {
+              rightKey =
+                rexBuilder.makeCast(targetKeyType, rightKey);
+            }
+          }
+        }
+      }
+
+      if ((rangeOp == null)
+        && ((leftKey == null) || (rightKey == null))) {
+        // no equality join keys found yet:
+        // try transforming the condition to
+        // equality "join" conditions, e.g.
+        //     f(LHS) > 0 ===> ( f(LHS) > 0 ) = TRUE,
+        // and make the RHS produce TRUE, but only if we're strictly
+        // looking for equi-joins
+        final ImmutableBitSet projRefs = RelOptUtil.InputFinder.bits(condition);
+        leftKey = null;
+        rightKey = null;
+
+        boolean foundInput = false;
+        for (int i = 0; i < inputs.size() && !foundInput; i++) {
+          if (inputsRange[i].contains(projRefs)) {
+            leftInput = i;
+            leftFields = inputs.get(leftInput).getRowType().getFieldList();
+
+            leftKey = condition.accept(
+              new RelOptUtil.RexInputConverter(
+                rexBuilder,
+                leftFields,
+                leftFields,
+                adjustments));
+
+            rightKey = rexBuilder.makeLiteral(true);
+
+            // effectively performing an equality comparison
+            kind = SqlKind.EQUALS;
+
+            foundInput = true;
+          }
+        }
+      }
+
+      if ((leftKey != null) && (rightKey != null)) {
+        // found suitable join keys
+        // add them to key list, ensuring that if there is a
+        // non-equi join predicate, it appears at the end of the
+        // key list; also mark the null filtering property
+        addJoinKey(
+          joinKeys.get(leftInput),
+          leftKey,
+          (rangeOp != null) && !rangeOp.isEmpty());
+        addJoinKey(
+          joinKeys.get(rightInput),
+          rightKey,
+          (rangeOp != null) && !rangeOp.isEmpty());
+        if (filterNulls != null
+          && kind == SqlKind.EQUALS) {
+          // nulls are considered not matching for equality comparison
+          // add the position of the most recently inserted key
+          filterNulls.add(joinKeys.get(leftInput).size() - 1);
+        }
+        if (rangeOp != null
+          && kind != SqlKind.EQUALS
+          && kind != SqlKind.IS_DISTINCT_FROM) {
+          SqlOperator op = call.getOperator();
+          if (reverse) {
+            op = requireNonNull(op.reverse());
+          }
+          rangeOp.add(op);
+        }
+        return;
+      } // else fall through and add this condition as nonEqui condition
+    }
+
+    // The operator is not of RexCall type
+    // So we fail. Fall through.
+    // Add this condition to the list of non-equi-join conditions.
+    nonEquiList.add(condition);
+  }
+
+  private static void addJoinKey(
+    List<RexNode> joinKeyList,
+    RexNode key,
+    boolean preserveLastElementInList) {
+    if (!joinKeyList.isEmpty() && preserveLastElementInList) {
+      joinKeyList.add(joinKeyList.size() - 1, key);
+    } else {
+      joinKeyList.add(key);
+    }
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillMergeProjectRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillMergeProjectRule.java
index 46cbdb4..f67f6eb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillMergeProjectRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillMergeProjectRule.java
@@ -35,6 +35,7 @@
 import org.apache.drill.exec.planner.physical.PrelFactories;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 
 /**
@@ -174,14 +175,14 @@
     // replace the two projects with a combined projection
     if (topProject instanceof DrillProjectRel) {
       RelNode newProjectRel = DrillRelFactories.DRILL_LOGICAL_PROJECT_FACTORY.createProject(
-          bottomProject.getInput(), newProjects,
+          bottomProject.getInput(), Collections.emptyList(), newProjects,
           topProject.getRowType().getFieldNames());
 
       return (Project) newProjectRel;
     }
     else {
       RelNode newProjectRel = PrelFactories.PROJECT_FACTORY.createProject(
-          bottomProject.getInput(), newProjects,
+          bottomProject.getInput(), Collections.emptyList(), newProjects,
           topProject.getRowType().getFieldNames());
 
       return (Project) newProjectRel;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
index 7cd69c8..e6e7a78 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
@@ -25,9 +25,13 @@
 
 import org.apache.calcite.avatica.util.TimeUnit;
 import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rex.RexUtil;
 import org.apache.calcite.sql.SqlKind;
 import org.apache.calcite.sql.type.BasicSqlType;
 import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.util.DateString;
+import org.apache.calcite.util.TimeString;
+import org.apache.calcite.util.TimestampString;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.exceptions.UserException;
@@ -266,7 +270,10 @@
         if (call.getOperator() == SqlStdOperatorTable.MINUS_DATE) {
           return doFunction(call, "-");
         }
-
+      case INTERNAL:
+        if (call.getOperator() == SqlStdOperatorTable.SEARCH) {
+          return RexUtil.expandSearch(getRexBuilder(), null, call).accept(this);
+        }
         // fall through
       default:
         throw notImplementedException(syntax, call);
@@ -309,6 +316,9 @@
           }
           return left.getChild(((BigDecimal) literal.getValue()).intValue());
         case CHAR:
+        case TIMESTAMP:
+        case TIME:
+        case DATE:
           if (isMap) {
             return handleMapCharKey(literal, operand, dataType, left);
           }
@@ -385,15 +395,19 @@
       TypeProtos.DataMode mode = operand.getType().isNullable()
           ? TypeProtos.DataMode.OPTIONAL : TypeProtos.DataMode.REQUIRED;
       TypeProtos.MajorType type;
+      Object value = literal.getValue2();
       switch (mapType.getKeyType().getSqlTypeName()) {
         case TIMESTAMP:
           type = Types.withMode(MinorType.TIMESTAMP, mode);
+          value = literal.getValueAs(TimestampString.class);
           break;
         case DATE:
           type = Types.withMode(MinorType.DATE, mode);
+          value = literal.getValueAs(DateString.class);
           break;
         case TIME:
           type = Types.withMode(MinorType.TIME, mode);
+          value = literal.getValueAs(TimeString.class);
           break;
         case INTERVAL_DAY:
           type = Types.withMode(MinorType.INTERVALDAY, mode);
@@ -408,7 +422,7 @@
           type = Types.withMode(MinorType.VARCHAR, mode);
           break;
       }
-      return parentPath.getChild(literal.getValue2().toString(), literal.getValue2(), type);
+      return parentPath.getChild(value.toString(), value, type);
     }
 
     private LogicalExpression doFunction(RexCall call, String funcName) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillProjectLateralJoinTransposeRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillProjectLateralJoinTransposeRule.java
index 5cb984a..0c01659 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillProjectLateralJoinTransposeRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillProjectLateralJoinTransposeRule.java
@@ -30,7 +30,9 @@
   public static final DrillProjectLateralJoinTransposeRule INSTANCE = new DrillProjectLateralJoinTransposeRule(PushProjector.ExprCondition.TRUE, RelFactories.LOGICAL_BUILDER);
 
   public DrillProjectLateralJoinTransposeRule(PushProjector.ExprCondition preserveExprCondition, RelBuilderFactory relFactory) {
-    super(preserveExprCondition, relFactory);
+    super(Config.DEFAULT.withRelBuilderFactory(relFactory)
+      .as(Config.class)
+      .withPreserveExprCondition(preserveExprCondition));
   }
 
   @Override
@@ -40,11 +42,7 @@
 
     // No need to call ProjectCorrelateTransposeRule if the current lateralJoin contains excludeCorrelationColumn set to true.
     // This is needed as the project push into Lateral join rule changes the output row type which will fail assertions in ProjectCorrelateTransposeRule.
-    if (correlate instanceof DrillLateralJoinRel &&
-        ((DrillLateralJoinRel)correlate).excludeCorrelateColumn) {
-      return false;
-    }
-
-    return true;
+    return !(correlate instanceof DrillLateralJoinRel) ||
+      !((DrillLateralJoinRel) correlate).excludeCorrelateColumn;
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastFilterRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastFilterRule.java
index 6ea2c92..f9620c7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastFilterRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastFilterRule.java
@@ -28,7 +28,10 @@
   public final static RelOptRule INSTANCE = new DrillPushProjectPastFilterRule(DrillConditions.PRESERVE_ITEM);
 
   protected DrillPushProjectPastFilterRule(PushProjector.ExprCondition preserveExprCondition) {
-    super(LogicalProject.class, LogicalFilter.class,
-        DrillRelFactories.LOGICAL_BUILDER, preserveExprCondition);
+    super(Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .as(Config.class)
+      .withOperandFor(LogicalProject.class, LogicalFilter.class)
+      .withPreserveExprCondition(preserveExprCondition));
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastJoinRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastJoinRule.java
index 972a141..206f53c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastJoinRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectPastJoinRule.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.planner.logical;
 
+import org.apache.calcite.rel.core.Join;
+import org.apache.calcite.rel.core.Project;
 import org.apache.calcite.rel.rules.ProjectJoinTransposeRule;
 import org.apache.calcite.rel.rules.PushProjector;
 import org.apache.calcite.plan.RelOptRule;
@@ -26,7 +28,11 @@
   public static final RelOptRule INSTANCE = new DrillPushProjectPastJoinRule(DrillConditions.PRESERVE_ITEM);
 
   protected DrillPushProjectPastJoinRule(PushProjector.ExprCondition preserveExprCondition) {
-    super(preserveExprCondition, DrillRelFactories.LOGICAL_BUILDER);
+    super(Config.DEFAULT
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .as(Config.class)
+      .withOperandFor(Project.class, Join.class)
+      .withPreserveExprCondition(preserveExprCondition));
   }
 
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillReduceExpressionsRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillReduceExpressionsRule.java
index f053f3f..7dd2945 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillReduceExpressionsRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillReduceExpressionsRule.java
@@ -17,18 +17,41 @@
  */
 package org.apache.drill.exec.planner.logical;
 
+import org.apache.calcite.plan.RelOptCluster;
+import org.apache.calcite.plan.RelOptPredicateList;
 import org.apache.calcite.plan.RelOptRuleCall;
 import org.apache.calcite.rel.RelCollations;
-import org.apache.calcite.rel.core.Calc;
-import org.apache.calcite.rel.core.Filter;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.SingleRel;
+import org.apache.calcite.rel.core.Calc;
+import org.apache.calcite.rel.core.Filter;
+import org.apache.calcite.rel.core.Project;
 import org.apache.calcite.rel.logical.LogicalSort;
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.rel.rules.ReduceExpressionsRule;
+import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.rex.RexCall;
+import org.apache.calcite.rex.RexExecutor;
+import org.apache.calcite.rex.RexInputRef;
+import org.apache.calcite.rex.RexLiteral;
+import org.apache.calcite.rex.RexLocalRef;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.rex.RexProgram;
+import org.apache.calcite.rex.RexProgramBuilder;
+import org.apache.calcite.rex.RexShuttle;
+import org.apache.calcite.rex.RexSimplify;
+import org.apache.calcite.rex.RexUnknownAs;
+import org.apache.calcite.rex.RexUtil;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.util.Util;
+import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 
 import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.List;
 
-public class DrillReduceExpressionsRule {
+public abstract class DrillReduceExpressionsRule
+  extends ReduceExpressionsRule<ReduceExpressionsRule.Config> {
 
   public static final DrillReduceFilterRule FILTER_INSTANCE_DRILL =
       new DrillReduceFilterRule();
@@ -36,10 +59,21 @@
   public static final DrillReduceCalcRule CALC_INSTANCE_DRILL =
       new DrillReduceCalcRule();
 
-  private static class DrillReduceFilterRule extends ReduceExpressionsRule.FilterReduceExpressionsRule {
+  public static final DrillReduceProjectRule PROJECT_INSTANCE_DRILL =
+      new DrillReduceProjectRule();
+
+  protected DrillReduceExpressionsRule(Config config) {
+    super(config);
+  }
+
+  private static class DrillReduceFilterRule extends FilterReduceExpressionsRule {
 
     DrillReduceFilterRule() {
-      super(Filter.class, true, DrillRelFactories.LOGICAL_BUILDER);
+      super(FilterReduceExpressionsRuleConfig.DEFAULT
+        .withOperandFor(Filter.class)
+        .withMatchNullability(false)
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .as(FilterReduceExpressionsRuleConfig.class));
     }
 
     /**
@@ -53,12 +87,106 @@
       return createEmptyEmptyRelHelper(filter);
     }
 
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      final Filter filter = call.rel(0);
+      final List<RexNode> expList =
+        Lists.newArrayList(filter.getCondition());
+      RexNode newConditionExp;
+      boolean reduced;
+      final RelMetadataQuery mq = call.getMetadataQuery();
+      final RelOptPredicateList predicates =
+        mq.getPulledUpPredicates(filter.getInput());
+      if (reduceExpressionsNoSimplify(filter, expList, predicates, true,
+        config.treatDynamicCallsAsConstant())) {
+        assert expList.size() == 1;
+        newConditionExp = expList.get(0);
+        reduced = true;
+      } else {
+        // No reduction, but let's still test the original
+        // predicate to see if it was already a constant,
+        // in which case we don't need any runtime decision
+        // about filtering.
+        newConditionExp = filter.getCondition();
+        reduced = false;
+      }
+
+      // Even if no reduction, let's still test the original
+      // predicate to see if it was already a constant,
+      // in which case we don't need any runtime decision
+      // about filtering.
+      if (newConditionExp.isAlwaysTrue()) {
+        call.transformTo(
+          filter.getInput());
+      } else if (newConditionExp instanceof RexLiteral
+        || RexUtil.isNullLiteral(newConditionExp, true)) {
+        call.transformTo(createEmptyRelOrEquivalent(call, filter));
+      } else if (reduced) {
+        call.transformTo(call.builder()
+          .push(filter.getInput())
+          .filter(newConditionExp).build());
+      } else {
+        if (newConditionExp instanceof RexCall) {
+          boolean reverse = newConditionExp.getKind() == SqlKind.NOT;
+          if (reverse) {
+            newConditionExp = ((RexCall) newConditionExp).getOperands().get(0);
+          }
+          reduceNotNullableFilter(call, filter, newConditionExp, reverse);
+        }
+        return;
+      }
+
+      // New plan is absolutely better than old plan.
+      call.getPlanner().prune(filter);
+    }
+
+    private void reduceNotNullableFilter(
+      RelOptRuleCall call,
+      Filter filter,
+      RexNode rexNode,
+      boolean reverse) {
+      // If the expression is a IS [NOT] NULL on a non-nullable
+      // column, then we can either remove the filter or replace
+      // it with an Empty.
+      boolean alwaysTrue;
+      switch (rexNode.getKind()) {
+        case IS_NULL:
+        case IS_UNKNOWN:
+          alwaysTrue = false;
+          break;
+        case IS_NOT_NULL:
+          alwaysTrue = true;
+          break;
+        default:
+          return;
+      }
+      if (reverse) {
+        alwaysTrue = !alwaysTrue;
+      }
+      RexNode operand = ((RexCall) rexNode).getOperands().get(0);
+      if (operand instanceof RexInputRef) {
+        RexInputRef inputRef = (RexInputRef) operand;
+        if (!inputRef.getType().isNullable()) {
+          if (alwaysTrue) {
+            call.transformTo(filter.getInput());
+          } else {
+            call.transformTo(createEmptyRelOrEquivalent(call, filter));
+          }
+          // New plan is absolutely better than old plan.
+          call.getPlanner().prune(filter);
+        }
+      }
+    }
   }
 
-  private static class DrillReduceCalcRule extends ReduceExpressionsRule.CalcReduceExpressionsRule {
+  private static class DrillReduceCalcRule extends CalcReduceExpressionsRule {
 
     DrillReduceCalcRule() {
-      super(Calc.class, true, DrillRelFactories.LOGICAL_BUILDER);
+      super(CalcReduceExpressionsRuleConfig.DEFAULT
+        .withOperandFor(Calc.class)
+        .withMatchNullability(true)
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .as(CalcReduceExpressionsRuleConfig.class));
     }
 
     /**
@@ -72,6 +200,116 @@
       return createEmptyEmptyRelHelper(input);
     }
 
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      Calc calc = call.rel(0);
+      RexProgram program = calc.getProgram();
+      final List<RexNode> exprList = program.getExprList();
+
+      // Form a list of expressions with sub-expressions fully expanded.
+      final List<RexNode> expandedExprList = new ArrayList<>();
+      final RexShuttle shuttle =
+        new RexShuttle() {
+          @Override
+          public RexNode visitLocalRef(RexLocalRef localRef) {
+            return expandedExprList.get(localRef.getIndex());
+          }
+        };
+      for (RexNode expr : exprList) {
+        expandedExprList.add(expr.accept(shuttle));
+      }
+      final RelOptPredicateList predicates = RelOptPredicateList.EMPTY;
+      if (reduceExpressionsNoSimplify(calc, expandedExprList, predicates, false,
+        config.treatDynamicCallsAsConstant())) {
+        final RexProgramBuilder builder =
+          new RexProgramBuilder(
+            calc.getInput().getRowType(),
+            calc.getCluster().getRexBuilder());
+        final List<RexLocalRef> list = new ArrayList<>();
+        for (RexNode expr : expandedExprList) {
+          list.add(builder.registerInput(expr));
+        }
+        if (program.getCondition() != null) {
+          final int conditionIndex =
+            program.getCondition().getIndex();
+          final RexNode newConditionExp =
+            expandedExprList.get(conditionIndex);
+          if (newConditionExp.isAlwaysTrue()) {
+            // condition is always TRUE - drop it.
+          } else if (newConditionExp instanceof RexLiteral
+            || RexUtil.isNullLiteral(newConditionExp, true)) {
+            // condition is always NULL or FALSE - replace calc
+            // with empty.
+            call.transformTo(createEmptyRelOrEquivalent(call, calc));
+            return;
+          } else {
+            builder.addCondition(list.get(conditionIndex));
+          }
+        }
+        int k = 0;
+        for (RexLocalRef projectExpr : program.getProjectList()) {
+          final int index = projectExpr.getIndex();
+          builder.addProject(
+            list.get(index).getIndex(),
+            program.getOutputRowType().getFieldNames().get(k++));
+        }
+        call.transformTo(
+          calc.copy(calc.getTraitSet(), calc.getInput(), builder.getProgram()));
+
+        // New plan is absolutely better than old plan.
+        call.getPlanner().prune(calc);
+      }
+    }
+  }
+
+  private static class DrillReduceProjectRule extends ProjectReduceExpressionsRule {
+
+    DrillReduceProjectRule() {
+      super(ProjectReduceExpressionsRuleConfig.DEFAULT
+        .withOperandFor(Project.class)
+        .withMatchNullability(true)
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .as(ProjectReduceExpressionsRuleConfig.class));
+    }
+
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      final Project project = call.rel(0);
+      final RelMetadataQuery mq = call.getMetadataQuery();
+      final RelOptPredicateList predicates =
+        mq.getPulledUpPredicates(project.getInput());
+      final List<RexNode> expList =
+        Lists.newArrayList(project.getProjects());
+      if (reduceExpressionsNoSimplify(project, expList, predicates, false,
+        config.treatDynamicCallsAsConstant())) {
+        assert !project.getProjects().equals(expList)
+          : "Reduced expressions should be different from original expressions";
+        call.transformTo(
+          call.builder()
+            .push(project.getInput())
+            .project(expList, project.getRowType().getFieldNames())
+            .build());
+
+        // New plan is absolutely better than old plan.
+        call.getPlanner().prune(project);
+      }
+    }
+  }
+
+  protected static boolean reduceExpressionsNoSimplify(RelNode rel, List<RexNode> expList,
+    RelOptPredicateList predicates, boolean unknownAsFalse, boolean treatDynamicCallsAsConstant) {
+    RelOptCluster cluster = rel.getCluster();
+    RexBuilder rexBuilder = cluster.getRexBuilder();
+    RexExecutor executor =
+      Util.first(cluster.getPlanner().getExecutor(), RexUtil.EXECUTOR);
+    RexSimplify simplify =
+      new RexSimplify(rexBuilder, predicates, executor);
+
+    // Simplify predicates in place
+    RexUnknownAs unknownAs = RexUnknownAs.falseIf(unknownAsFalse);
+
+    return ReduceExpressionsRule.reduceExpressionsInternal(rel, simplify, unknownAs,
+      expList, predicates, treatDynamicCallsAsConstant);
   }
 
   private static RelNode createEmptyEmptyRelHelper(SingleRel input) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java
index bb0b798..574631f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillRelFactories.java
@@ -25,6 +25,7 @@
 import org.apache.calcite.rel.core.JoinInfo;
 import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.calcite.rel.core.RelFactories;
+import org.apache.calcite.rel.hint.RelHint;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexUtil;
@@ -51,7 +52,6 @@
  */
 
 public class DrillRelFactories {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillRelFactories.class);
   public static final RelFactories.ProjectFactory DRILL_LOGICAL_PROJECT_FACTORY =
       new DrillProjectFactoryImpl();
 
@@ -94,14 +94,15 @@
    * {@link DrillProjectRel}.
    */
   private static class DrillProjectFactoryImpl implements RelFactories.ProjectFactory {
-    @Override
-    public RelNode createProject(RelNode child,
-                                 List<? extends RexNode> childExprs, List<String> fieldNames) {
-      final RelOptCluster cluster = child.getCluster();
-      final RelDataType rowType =
-          RexUtil.createStructType(cluster.getTypeFactory(), childExprs, fieldNames, null);
 
-      return DrillProjectRel.create(cluster, child.getTraitSet().plus(DRILL_LOGICAL), child, childExprs, rowType);
+    @Override
+    public RelNode createProject(RelNode input, List<RelHint> hints, List<? extends RexNode> childExprs,
+      List<? extends String> fieldNames) {
+      RelOptCluster cluster = input.getCluster();
+      RelDataType rowType =
+        RexUtil.createStructType(cluster.getTypeFactory(), childExprs, fieldNames, null);
+
+      return DrillProjectRel.create(cluster, input.getTraitSet().plus(DRILL_LOGICAL), input, childExprs, rowType);
     }
   }
 
@@ -123,17 +124,16 @@
   private static class DrillJoinFactoryImpl implements RelFactories.JoinFactory {
 
     @Override
-    public RelNode createJoin(RelNode left, RelNode right,
+    public RelNode createJoin(RelNode left, RelNode right, List<RelHint> hints,
                               RexNode condition, Set<CorrelationId> variablesSet,
                               JoinRelType joinType, boolean semiJoinDone) {
-      return new DrillJoinRel(left.getCluster(), left.getTraitSet().plus(DRILL_LOGICAL), left, right, condition, joinType);
-    }
-
-    @Override
-    public RelNode createJoin(RelNode left, RelNode right,
-                              RexNode condition, JoinRelType joinType,
-                              Set<String> variablesStopped, boolean semiJoinDone) {
-      return new DrillJoinRel(left.getCluster(), left.getTraitSet().plus(DRILL_LOGICAL), left, right, condition, joinType);
+      switch (joinType) {
+        case SEMI:
+          JoinInfo joinInfo = JoinInfo.of(left, right, condition);
+          return DrillSemiJoinRel.create(left, right, condition, joinInfo.leftKeys, joinInfo.rightKeys);
+        default:
+          return new DrillJoinRel(left.getCluster(), left.getTraitSet().plus(DRILL_LOGICAL), left, right, condition, joinType);
+      }
     }
   }
 
@@ -144,7 +144,7 @@
   private static class DrillAggregateFactoryImpl implements RelFactories.AggregateFactory {
 
     @Override
-    public RelNode createAggregate(RelNode input, ImmutableBitSet groupSet,
+    public RelNode createAggregate(RelNode input, List<RelHint> hints, ImmutableBitSet groupSet,
                                    com.google.common.collect.ImmutableList<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) {
       return new DrillAggregateRel(input.getCluster(), input.getTraitSet().plus(DRILL_LOGICAL), input, groupSet, groupSets, aggCalls);
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillViewTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillViewTable.java
index a90a135..87b1355 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillViewTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillViewTable.java
@@ -17,10 +17,11 @@
  */
 package org.apache.drill.exec.planner.logical;
 
+import java.util.Collections;
 import java.util.List;
 
 import org.apache.calcite.schema.SchemaPlus;
-import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
+import org.apache.drill.exec.planner.sql.conversion.DrillViewExpander;
 import org.apache.calcite.config.CalciteConnectionConfig;
 import org.apache.calcite.schema.Schema.TableType;
 import org.apache.calcite.schema.Statistic;
@@ -39,7 +40,6 @@
 import org.apache.drill.exec.ops.ViewExpansionContext;
 
 public class DrillViewTable implements TranslatableTable, DrillViewInfoProvider {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillViewTable.class);
 
   private final View view;
   private final String viewOwner;
@@ -63,6 +63,7 @@
 
   @Override
   public RelNode toRel(ToRelContext context, RelOptTable relOptTable) {
+    DrillViewExpander viewExpander = viewExpansionContext.getViewExpander();
     ViewExpansionContext.ViewExpansionToken token = null;
     try {
       RelDataType rowType = relOptTable.getRowType();
@@ -70,9 +71,9 @@
 
       if (viewExpansionContext.isImpersonationEnabled()) {
         token = viewExpansionContext.reserveViewExpansionToken(viewOwner);
-        rel = expandViewForImpersonatedUser(context, rowType, view.getWorkspaceSchemaPath(), token.getSchemaTree());
+        rel = expandViewForImpersonatedUser(viewExpander, view.getWorkspaceSchemaPath(), token.getSchemaTree());
       } else {
-        rel = context.expandView(rowType, view.getSql(), view.getWorkspaceSchemaPath(), ImmutableList.<String>of()).rel;
+        rel = viewExpander.expandView(rowType, view.getSql(), view.getWorkspaceSchemaPath(), Collections.emptyList()).rel;
       }
 
       // If the View's field list is not "*", create a cast.
@@ -89,11 +90,10 @@
   }
 
 
-  protected RelNode expandViewForImpersonatedUser(ToRelContext context,
-                                                  RelDataType rowType,
+  protected RelNode expandViewForImpersonatedUser(DrillViewExpander context,
                                                   List<String> workspaceSchemaPath,
                                                   SchemaPlus tokenSchemaTree) {
-    return context.expandView(rowType, view.getSql(), tokenSchemaTree, workspaceSchemaPath).rel;
+    return context.expandView(view.getSql(), tokenSchemaTree, workspaceSchemaPath).rel;
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/EnumerableDrillRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/EnumerableDrillRule.java
deleted file mode 100644
index a1559bd..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/EnumerableDrillRule.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.planner.logical;
-
-import org.apache.calcite.adapter.enumerable.EnumerableConvention;
-
-import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.convert.ConverterRule;
-
-/**
- * Rule that converts any Drill relational expression to enumerable format by adding a {@link DrillScreenRelBase}.
- */
-public class EnumerableDrillRule extends ConverterRule {
-
-  public static EnumerableDrillRule INSTANCE = new EnumerableDrillRule();
-
-
-  private EnumerableDrillRule() {
-    super(RelNode.class, DrillRel.DRILL_LOGICAL, EnumerableConvention.INSTANCE, "EnumerableDrillRule.");
-  }
-
-  @Override
-  public boolean isGuaranteed() {
-    return true;
-  }
-
-  @Override
-  public RelNode convert(RelNode rel) {
-    assert rel.getTraitSet().contains(DrillRel.DRILL_LOGICAL);
-    return new DrillScreenRel(rel.getCluster(), rel.getTraitSet().replace(getOutConvention()), rel);
-  }
-}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ReduceAndSimplifyExpressionsRules.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ReduceAndSimplifyExpressionsRules.java
new file mode 100644
index 0000000..8c6a9dd
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ReduceAndSimplifyExpressionsRules.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.logical;
+
+import org.apache.calcite.plan.RelOptRuleCall;
+import org.apache.calcite.rel.RelCollations;
+import org.apache.calcite.rel.core.Calc;
+import org.apache.calcite.rel.core.Filter;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.SingleRel;
+import org.apache.calcite.rel.core.Project;
+import org.apache.calcite.rel.logical.LogicalSort;
+import org.apache.calcite.rel.rules.ReduceExpressionsRule;
+
+import java.math.BigDecimal;
+
+public class ReduceAndSimplifyExpressionsRules {
+
+  public static final ReduceAndSimplifyFilterRule FILTER_INSTANCE_DRILL =
+      new ReduceAndSimplifyFilterRule();
+
+  public static final ReduceAndSimplifyCalcRule CALC_INSTANCE_DRILL =
+      new ReduceAndSimplifyCalcRule();
+
+  public static final ReduceAndSimplifyProjectRule PROJECT_INSTANCE_DRILL =
+      new ReduceAndSimplifyProjectRule();
+
+  private static class ReduceAndSimplifyFilterRule extends ReduceExpressionsRule.FilterReduceExpressionsRule {
+
+    ReduceAndSimplifyFilterRule() {
+      super(FilterReduceExpressionsRuleConfig.DEFAULT
+        .withOperandFor(Filter.class)
+        .withMatchNullability(false)
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .withDescription("ReduceAndSimplifyFilterRule")
+        .as(FilterReduceExpressionsRuleConfig.class));
+    }
+
+    /**
+     * Drills schema flexibility requires us to override the default behavior of calcite
+     * to produce an EmptyRel in the case of a constant false filter. We need to propagate
+     * schema at runtime, so we cannot just produce a simple operator at planning time to
+     * expose the planning time known schema. Instead we have to insert a limit 0.
+     */
+    @Override
+    protected RelNode createEmptyRelOrEquivalent(RelOptRuleCall call, Filter filter) {
+      return createEmptyEmptyRelHelper(filter);
+    }
+
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      try {
+        super.onMatch(call);
+      } catch (ClassCastException e) {
+        // noop
+      }
+    }
+  }
+
+  private static class ReduceAndSimplifyCalcRule extends ReduceExpressionsRule.CalcReduceExpressionsRule {
+
+    ReduceAndSimplifyCalcRule() {
+      super(CalcReduceExpressionsRuleConfig.DEFAULT
+        .withOperandFor(Calc.class)
+        .withMatchNullability(true)
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .withDescription("ReduceAndSimplifyCalcRule")
+        .as(CalcReduceExpressionsRuleConfig.class));
+    }
+
+    /**
+     * Drills schema flexibility requires us to override the default behavior of calcite
+     * to produce an EmptyRel in the case of a constant false filter. We need to propagate
+     * schema at runtime, so we cannot just produce a simple operator at planning time to
+     * expose the planning time known schema. Instead we have to insert a limit 0.
+     */
+    @Override
+    protected RelNode createEmptyRelOrEquivalent(RelOptRuleCall call, Calc input) {
+      return createEmptyEmptyRelHelper(input);
+    }
+
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      try {
+        super.onMatch(call);
+      } catch (ClassCastException e) {
+        // noop
+      }
+    }
+  }
+
+  private static class ReduceAndSimplifyProjectRule extends ReduceExpressionsRule.ProjectReduceExpressionsRule {
+
+    ReduceAndSimplifyProjectRule() {
+      super(ProjectReduceExpressionsRuleConfig.DEFAULT
+        .withOperandFor(Project.class)
+        .withMatchNullability(true)
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .withDescription("ReduceAndSimplifyProjectRule")
+        .as(ProjectReduceExpressionsRuleConfig.class));
+    }
+
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      try {
+        super.onMatch(call);
+      } catch (ClassCastException e) {
+        // noop
+      }
+    }
+  }
+
+  private static RelNode createEmptyEmptyRelHelper(SingleRel input) {
+    return LogicalSort.create(input.getInput(), RelCollations.EMPTY,
+        input.getCluster().getRexBuilder().makeExactLiteral(BigDecimal.valueOf(0)),
+        input.getCluster().getRexBuilder().makeExactLiteral(BigDecimal.valueOf(0)));
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
index 10752ad..4a77039 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
@@ -40,7 +40,7 @@
 
   public SelectionBasedTableScan(RelOptCluster cluster, RelTraitSet traitSet,
                                  RelOptTable table, String digestFromSelection) {
-    super(cluster, traitSet, table);
+    super(cluster, traitSet, Collections.emptyList(), table);
     this.digestFromSelection = digestFromSelection;
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
index 3596213..9e6917b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
@@ -26,6 +26,7 @@
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
+import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.drill.exec.planner.common.DrillRelOptUtil;
 import org.apache.drill.exec.planner.logical.SelectionBasedTableScan;
 import org.apache.drill.exec.util.DrillFileSystemUtil;
@@ -609,7 +610,12 @@
         }
       }
 
-      return scan.isDistinct() || aggregate.getGroupCount() > 0;
+      return isDistinct(scan) || aggregate.getGroupCount() > 0;
+    }
+
+    private boolean isDistinct(RelNode relNode) {
+      final RelMetadataQuery mq = relNode.getCluster().getMetadataQuery();
+      return Boolean.TRUE.equals(mq.areRowsUnique(relNode));
     }
 
     /*
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java
index de8f499..4e079aa 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java
@@ -413,10 +413,19 @@
    */
   public Quoting getQuotingIdentifiers() {
     String quotingIdentifiersCharacter = options.getOption(QUOTING_IDENTIFIERS);
-    for (Quoting value : Quoting.values()) {
-      if (value.string.equals(quotingIdentifiersCharacter)) {
-        return value;
-      }
+    switch (quotingIdentifiersCharacter) {
+      case "\"":
+        return Quoting.DOUBLE_QUOTE;
+      case "`":
+        return Quoting.BACK_TICK;
+      case "[":
+        return Quoting.BRACKET;
+      default:
+        for (Quoting value : Quoting.values()) {
+          if (value.string.equals(quotingIdentifiersCharacter)) {
+            return value;
+          }
+        }
     }
     // this is never reached
     throw UserException.validationError()
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelFactories.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelFactories.java
index 321398b..b565834 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelFactories.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelFactories.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.planner.physical;
 
 
+import org.apache.calcite.rel.hint.RelHint;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.rel.RelNode;
@@ -39,14 +40,12 @@
    */
   private static class DrillProjectPrelFactory implements RelFactories.ProjectFactory {
     @Override
-    public RelNode createProject(RelNode child,
-                                 List<? extends RexNode> childExprs, List<String> fieldNames) {
-      final RelOptCluster cluster = child.getCluster();
-      final RelDataType rowType = RexUtil.createStructType(cluster.getTypeFactory(), childExprs, fieldNames);
-      final RelNode project = new ProjectPrel(cluster, child.getTraitSet().plus(Prel.DRILL_PHYSICAL),
-          child, Lists.newArrayList(childExprs), rowType);
+    public RelNode createProject(RelNode child, List<RelHint> hints, List<? extends RexNode> childExprs, List<? extends String> fieldNames) {
+      RelOptCluster cluster = child.getCluster();
+      RelDataType rowType = RexUtil.createStructType(cluster.getTypeFactory(), childExprs, fieldNames, null);
 
-      return project;
+      return new ProjectPrel(cluster, child.getTraitSet().plus(Prel.DRILL_PHYSICAL),
+        child, Lists.newArrayList(childExprs), rowType);
     }
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SortConvertPrule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SortConvertPrule.java
index 714262d..a626c52 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SortConvertPrule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/SortConvertPrule.java
@@ -36,7 +36,8 @@
   //public static final RelOptRule INSTANCE_SRC_LOGICAL = new SortPrule("SortPrule:Src_Logical", DrillRel.DRILL_LOGICAL);
 
   private SortConvertPrule(String description, Convention srcConvention) {
-    super(Sort.class, srcConvention, Prel.DRILL_PHYSICAL, description);
+    super(Config.INSTANCE
+      .withConversion(Sort.class, srcConvention, Prel.DRILL_PHYSICAL, description));
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillCalciteSqlOperatorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillCalciteSqlOperatorWrapper.java
index a95b898..7487a08 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillCalciteSqlOperatorWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillCalciteSqlOperatorWrapper.java
@@ -147,4 +147,9 @@
       return super.inferReturnType(opBinding);
     }
   }
+
+  @Override
+  public SqlOperator reverse() {
+    return operator.reverse();
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConformance.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConformance.java
index 4a6aefc..67c7957 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConformance.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConformance.java
@@ -57,4 +57,14 @@
   public boolean isHavingAlias() {
     return true;
   }
+
+  @Override
+  public boolean isOffsetLimitAllowed() {
+    return true;
+  }
+
+  @Override
+  public boolean isLimitStartCountAllowed() {
+    return true;
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConvertletTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConvertletTable.java
index 5797cf2..16d97c4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConvertletTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillConvertletTable.java
@@ -36,6 +36,7 @@
 import org.apache.calcite.sql.SqlNode;
 import org.apache.calcite.sql.SqlNumericLiteral;
 import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.fun.SqlRandFunction;
 import org.apache.calcite.sql.fun.SqlStdOperatorTable;
 import org.apache.calcite.sql.parser.SqlParserPos;
 import org.apache.calcite.sql.type.SqlTypeName;
@@ -72,6 +73,7 @@
         .put(SqlStdOperatorTable.COALESCE, coalesceConvertlet())
         .put(SqlStdOperatorTable.TIMESTAMP_DIFF, timestampDiffConvertlet())
         .put(SqlStdOperatorTable.ROW, rowConvertlet())
+        .put(SqlStdOperatorTable.RAND, randConvertlet())
         .put(SqlStdOperatorTable.AVG, avgVarianceConvertlet(DrillConvertletTable::expandAvg))
         .put(SqlStdOperatorTable.STDDEV_POP, avgVarianceConvertlet(arg -> expandVariance(arg, true, true)))
         .put(SqlStdOperatorTable.STDDEV_SAMP, avgVarianceConvertlet(arg -> expandVariance(arg, false, true)))
@@ -153,6 +155,20 @@
     };
   }
 
+  private static SqlRexConvertlet randConvertlet() {
+    return (cx, call) -> {
+      List<RexNode> operands = call.getOperandList().stream()
+        .map(cx::convertExpression)
+        .collect(Collectors.toList());
+      return cx.getRexBuilder().makeCall(new SqlRandFunction() {
+        @Override
+        public boolean isDeterministic() {
+          return false;
+        }
+      }, operands);
+    };
+  }
+
   private static SqlRexConvertlet substringConvertlet() {
     return (cx, call) -> {
       List<RexNode> exprs = call.getOperandList().stream()
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillParserConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillParserConfig.java
deleted file mode 100644
index 575ad86..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillParserConfig.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.planner.sql;
-
-import org.apache.calcite.avatica.util.Casing;
-import org.apache.calcite.avatica.util.Quoting;
-import org.apache.calcite.sql.parser.SqlParser;
-import org.apache.calcite.sql.parser.SqlParserImplFactory;
-import org.apache.calcite.sql.validate.SqlConformance;
-import org.apache.drill.exec.planner.physical.PlannerSettings;
-import org.apache.drill.exec.planner.sql.parser.impl.DrillParserWithCompoundIdConverter;
-
-public class DrillParserConfig implements SqlParser.Config {
-
-  private final long identifierMaxLength;
-  private final Quoting quotingIdentifiers;
-  public final static SqlConformance DRILL_CONFORMANCE = new DrillConformance();
-
-  public DrillParserConfig(PlannerSettings settings) {
-    identifierMaxLength = settings.getIdentifierMaxLength();
-    quotingIdentifiers = settings.getQuotingIdentifiers();
-  }
-
-  @Override
-  public int identifierMaxLength() {
-    return (int) identifierMaxLength;
-  }
-
-  @Override
-  public Casing quotedCasing() {
-    return Casing.UNCHANGED;
-  }
-
-  @Override
-  public Casing unquotedCasing() {
-    return Casing.UNCHANGED;
-  }
-
-  @Override
-  public Quoting quoting() {
-    return quotingIdentifiers;
-  }
-
-  @Override
-  public boolean caseSensitive() {
-    return false;
-  }
-
-  @Override
-  public SqlConformance conformance() {
-    return DRILL_CONFORMANCE;
-  }
-
-  @Override
-  public boolean allowBangEqual() {
-    return conformance().isBangEqualAllowed();
-  }
-
-  @Override
-  public SqlParserImplFactory parserFactory() {
-    return DrillParserWithCompoundIdConverter.FACTORY;
-  }
-
-}
\ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlSelectBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlSelectBuilder.java
index 852bdcb..a665390 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlSelectBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlSelectBuilder.java
@@ -34,6 +34,7 @@
   private SqlNodeList orderBy;
   private SqlNode offset;
   private SqlNode fetch;
+  private SqlNodeList hints;
 
   public SqlSelectBuilder parserPosition(SqlParserPos parserPosition) {
     this.parserPosition = parserPosition;
@@ -90,8 +91,14 @@
     return this;
   }
 
+  public SqlSelectBuilder hints(SqlNodeList hints) {
+    this.hints = hints;
+    return this;
+  }
+
   public SqlSelect build() {
-    return new SqlSelect(parserPosition, keywordList, selectList, from, where, groupBy, having, windowDecls, orderBy, offset, fetch);
+    return new SqlSelect(parserPosition, keywordList, selectList, from,
+      where, groupBy, having, windowDecls, orderBy, offset, fetch, hints);
   }
 
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/TypeInferenceUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/TypeInferenceUtils.java
index c95eb1a..ced54a4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/TypeInferenceUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/TypeInferenceUtils.java
@@ -606,6 +606,7 @@
       TimeUnit qualifier = ((SqlLiteral) ((SqlCallBinding) opBinding).operand(0)).getValueAs(TimeUnit.class);
 
       SqlTypeName sqlTypeName;
+      int precision = 0;
 
       // follow up with type inference of reduced expression
       switch (qualifier) {
@@ -616,17 +617,19 @@
         case YEAR:
         case NANOSECOND:  // NANOSECOND is not supported by Calcite SqlTimestampAddFunction.
                           // Once it is fixed, NANOSECOND should be moved to the group below.
+          precision = 3;
           sqlTypeName = inputTypeName;
           break;
         case MICROSECOND:
         case MILLISECOND:
           // precision should be specified for MICROSECOND and MILLISECOND
-          return factory.createTypeWithNullability(
-              factory.createSqlType(SqlTypeName.TIMESTAMP, 3),
-              isNullable);
+          precision = 3;
+          sqlTypeName = SqlTypeName.TIMESTAMP;
+          break;
         case SECOND:
         case MINUTE:
         case HOUR:
+          precision = 3;
           if (inputTypeName == SqlTypeName.TIME) {
             sqlTypeName = SqlTypeName.TIME;
           } else {
@@ -639,7 +642,7 @@
 
       // preserves precision of input type if it was specified
       if (inputType.getSqlTypeName().allowsPrecNoScale()) {
-        RelDataType type = factory.createSqlType(sqlTypeName, inputType.getPrecision());
+        RelDataType type = factory.createSqlType(sqlTypeName, precision);
         return factory.createTypeWithNullability(type, isNullable);
       }
       return createCalciteTypeWithNullability(
@@ -984,10 +987,18 @@
   public static RelDataType convertToCalciteType(RelDataTypeFactory typeFactory,
                                                  TypeProtos.MajorType drillType, boolean isNullable) {
     SqlTypeName sqlTypeName = getCalciteTypeFromDrillType(drillType.getMinorType());
-    if (sqlTypeName == SqlTypeName.DECIMAL) {
-      return typeFactory.createTypeWithNullability(
+    switch (sqlTypeName) {
+      case DECIMAL:
+        return typeFactory.createTypeWithNullability(
           typeFactory.createSqlType(sqlTypeName, drillType.getPrecision(),
               drillType.getScale()), isNullable);
+      case TIME:
+      case TIMESTAMP:
+        int precision = drillType.hasPrecision()
+          ? drillType.getPrecision()
+          : typeFactory.getTypeSystem().getDefaultPrecision(sqlTypeName);
+        return typeFactory.createTypeWithNullability(
+          typeFactory.createSqlType(sqlTypeName, precision), isNullable);
     }
     return createCalciteTypeWithNullability(typeFactory, sqlTypeName, isNullable);
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillValidator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillValidator.java
index f23f425..5de8a34 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillValidator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillValidator.java
@@ -17,10 +17,6 @@
  */
 package org.apache.drill.exec.planner.sql.conversion;
 
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.calcite.jdbc.CalciteSchema;
 import org.apache.calcite.jdbc.DynamicRootSchema;
 import org.apache.calcite.rel.type.RelDataType;
@@ -42,13 +38,20 @@
 import org.apache.drill.exec.util.ImpersonationUtil;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.List;
+
 class DrillValidator extends SqlValidatorImpl {
 
   private final boolean isImpersonationEnabled;
 
   DrillValidator(SqlOperatorTable opTab, SqlValidatorCatalogReader catalogReader,
                  RelDataTypeFactory typeFactory, SqlConformance conformance, boolean isImpersonationEnabled) {
-    super(opTab, catalogReader, typeFactory, conformance);
+    super(opTab, catalogReader, typeFactory,
+      Config.DEFAULT.withConformance(conformance)
+        .withTypeCoercionEnabled(true)
+        .withIdentifierExpansion(true));
     this.isImpersonationEnabled = isImpersonationEnabled;
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillViewExpander.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillViewExpander.java
index c2257b1..6e9e0da 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillViewExpander.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/DrillViewExpander.java
@@ -29,12 +29,12 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-class DrillViewExpander implements RelOptTable.ViewExpander {
+public class DrillViewExpander implements RelOptTable.ViewExpander {
   private static final Logger logger = LoggerFactory.getLogger(DrillViewExpander.class);
 
   private final SqlConverter sqlConverter;
 
-  DrillViewExpander(SqlConverter sqlConverter) {
+  public DrillViewExpander(SqlConverter sqlConverter) {
     this.sqlConverter = sqlConverter;
   }
 
@@ -46,8 +46,7 @@
     return convertToRel(queryString, parser);
   }
 
-  @Override
-  public RelRoot expandView(RelDataType rowType, String queryString, SchemaPlus rootSchema, List<String> schemaPath) {
+  public RelRoot expandView(String queryString, SchemaPlus rootSchema, List<String> schemaPath) {
     final DrillCalciteCatalogReader catalogReader = newCatalogReader(rootSchema, schemaPath);
     SchemaPlus schema = findSchema(queryString, rootSchema, schemaPath);
     SqlConverter parser = new SqlConverter(sqlConverter, schema, rootSchema, catalogReader);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/SqlConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/SqlConverter.java
index ce3cf4d..76452bb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/SqlConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/conversion/SqlConverter.java
@@ -18,10 +18,12 @@
 package org.apache.drill.exec.planner.sql.conversion;
 
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.stream.Collectors;
 
 import org.apache.calcite.adapter.java.JavaTypeFactory;
+import org.apache.calcite.avatica.util.Casing;
 import org.apache.calcite.jdbc.DynamicSchema;
 import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
 import org.apache.calcite.plan.ConventionTraitDef;
@@ -37,27 +39,29 @@
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rex.RexBuilder;
 import org.apache.calcite.rex.RexNode;
-import org.apache.calcite.runtime.Hook;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.sql.SqlNode;
 import org.apache.calcite.sql.SqlOperatorTable;
 import org.apache.calcite.sql.parser.SqlParseException;
 import org.apache.calcite.sql.parser.SqlParser;
 import org.apache.calcite.sql.util.ChainedSqlOperatorTable;
+import org.apache.calcite.sql.validate.SqlConformance;
 import org.apache.calcite.sql2rel.SqlToRelConverter;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.QueryContext;
+import org.apache.drill.exec.planner.DrillRelBuilder;
 import org.apache.drill.exec.planner.cost.DrillCostBase;
 import org.apache.drill.exec.planner.logical.DrillConstExecutor;
 import org.apache.drill.exec.planner.logical.DrillRelFactories;
 import org.apache.drill.exec.planner.physical.DrillDistributionTraitDef;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
+import org.apache.drill.exec.planner.sql.DrillConformance;
 import org.apache.drill.exec.planner.sql.DrillConvertletTable;
-import org.apache.drill.exec.planner.sql.DrillParserConfig;
 import org.apache.drill.exec.planner.sql.SchemaUtilites;
+import org.apache.drill.exec.planner.sql.parser.impl.DrillParserWithCompoundIdConverter;
 import org.apache.drill.exec.planner.sql.parser.impl.DrillSqlParseException;
 import org.apache.drill.exec.planner.types.DrillRelDataTypeSystem;
 import org.apache.drill.exec.rpc.user.UserSession;
@@ -75,6 +79,7 @@
  */
 public class SqlConverter {
   private static final Logger logger = LoggerFactory.getLogger(SqlConverter.class);
+  public final static SqlConformance DRILL_CONFORMANCE = new DrillConformance();
 
   private final JavaTypeFactory typeFactory;
   private final SqlParser.Config parserConfig;
@@ -93,31 +98,36 @@
   private final UserSession session;
   private final DrillConfig drillConfig;
   // Allow the default config to be modified using immutable configs
-  private SqlToRelConverter.Config sqlToRelConverterConfig;
+  private final SqlToRelConverter.Config sqlToRelConverterConfig;
   private RelOptCluster cluster;
   private VolcanoPlanner planner;
   private boolean useRootSchema = false;
 
-  static {
-    /*
-     * Sets value to false to avoid simplifying project expressions
-     * during creating new projects since it may cause changing data mode
-     * which causes to assertion errors during type validation
-     */
-    Hook.REL_BUILDER_SIMPLIFY.add(Hook.propertyJ(false));
-  }
-
   public SqlConverter(QueryContext context) {
     this.settings = context.getPlannerSettings();
     this.util = context;
     this.functions = context.getFunctionRegistry();
-    this.parserConfig = new DrillParserConfig(settings);
-    this.sqlToRelConverterConfig = SqlToRelConverter.configBuilder()
+    this.parserConfig = SqlParser.Config.DEFAULT
+      .withIdentifierMaxLength((int) settings.getIdentifierMaxLength())
+      .withQuoting(settings.getQuotingIdentifiers())
+      .withParserFactory(DrillParserWithCompoundIdConverter.FACTORY)
+      .withCaseSensitive(false)
+      .withConformance(DRILL_CONFORMANCE)
+      .withUnquotedCasing(Casing.UNCHANGED)
+      .withQuotedCasing(Casing.UNCHANGED);
+    this.sqlToRelConverterConfig = SqlToRelConverter.config()
         .withInSubQueryThreshold((int) settings.getInSubqueryThreshold())
-        .withConvertTableAccess(false)
+        .withRemoveSortInSubQuery(false)
+        .withRelBuilderConfigTransform(t -> t
+          .withSimplify(false)
+          .withAggregateUnique(true)
+          .withPruneInputOfAggregate(false)
+          .withDedupAggregateCalls(false)
+          .withSimplifyLimit(false)
+          .withBloat(DrillRelBuilder.DISABLE_MERGE_PROJECT)
+          .withSimplifyValues(false))
         .withExpand(false)
-        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
-        .build();
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER);
     this.isInnerQuery = false;
     this.isExpandedView = false;
     this.typeFactory = new JavaTypeFactoryImpl(DrillRelDataTypeSystem.DRILL_REL_DATATYPE_SYSTEM);
@@ -140,7 +150,6 @@
     this.costFactory = (settings.useDefaultCosting()) ? null : new DrillCostBase.DrillCostFactory();
     this.validator =
       new DrillValidator(opTab, catalog, typeFactory, parserConfig.conformance(), context.isImpersonationEnabled());
-    validator.setIdentifierExpansion(true);
     cluster = null;
   }
 
@@ -165,7 +174,6 @@
     this.temporarySchema = parent.temporarySchema;
     this.session = parent.session;
     this.drillConfig = parent.drillConfig;
-    validator.setIdentifierExpansion(true);
     this.cluster = parent.cluster;
   }
 
@@ -201,6 +209,7 @@
   public RelRoot toRel(final SqlNode validatedNode) {
     initCluster(initPlanner());
     DrillViewExpander viewExpander = new DrillViewExpander(this);
+    util.getViewExpansionContext().setViewExpander(viewExpander);
     final SqlToRelConverter sqlToRelConverter = new SqlToRelConverter(
         viewExpander, validator, catalog, cluster,
         DrillConvertletTable.INSTANCE, sqlToRelConverterConfig);
@@ -218,7 +227,7 @@
           .map(f -> builder.makeInputRef(relNode, f.left))
           .collect(Collectors.toList());
 
-      RelNode project = LogicalProject.create(rel.rel, expressions, rel.validatedRowType);
+      RelNode project = LogicalProject.create(rel.rel, Collections.emptyList(), expressions, rel.validatedRowType);
       rel = RelRoot.of(project, rel.validatedRowType, rel.kind);
     }
     return rel.withRel(sqlToRelConverter.flattenTypes(rel.rel, true));
@@ -228,6 +237,10 @@
     return validator.getValidatedNodeType(validatedNode);
   }
 
+  public DrillValidator getValidator() {
+    return validator;
+  }
+
   public JavaTypeFactory getTypeFactory() {
     return typeFactory;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
index 48e8558..dca4ece 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
@@ -225,11 +225,8 @@
     }
 
     try {
-      // HEP for rules, which are failed at the LOGICAL_PLANNING stage for Volcano planner
-      final RelNode setOpTransposeNode = transform(PlannerType.HEP, PlannerPhase.PRE_LOGICAL_PLANNING, relNode);
-
       // HEP Directory pruning.
-      final RelNode pruned = transform(PlannerType.HEP_BOTTOM_UP, PlannerPhase.DIRECTORY_PRUNING, setOpTransposeNode);
+      final RelNode pruned = transform(PlannerType.HEP_BOTTOM_UP, PlannerPhase.DIRECTORY_PRUNING, relNode);
       final RelTraitSet logicalTraits = pruned.getTraitSet().plus(DrillRel.DRILL_LOGICAL);
 
       final RelNode convertedRelNode;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DrillTableInfo.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DrillTableInfo.java
index 09b8f02..c789261 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DrillTableInfo.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DrillTableInfo.java
@@ -19,6 +19,7 @@
 
 import org.apache.calcite.schema.Table;
 import org.apache.calcite.sql.SqlCall;
+import org.apache.calcite.sql.SqlCallBinding;
 import org.apache.calcite.sql.SqlFunction;
 import org.apache.calcite.sql.SqlIdentifier;
 import org.apache.calcite.sql.SqlKind;
@@ -90,7 +91,7 @@
         AbstractSchema drillSchema = SchemaUtilites.resolveToDrillSchema(
             config.getConverter().getDefaultSchema(), SchemaUtilites.getSchemaPath(tableIdentifier));
 
-        DrillTable table = (DrillTable) tableMacro.getTable(config.getConverter().getTypeFactory(), prepareTableMacroOperands(call.operand(0)));
+        DrillTable table = (DrillTable) tableMacro.getTable(new SqlCallBinding(config.getConverter().getValidator(), null, call.operand(0)));
         return new DrillTableInfo(table, drillSchema.getSchemaPath(), Util.last(tableIdentifier.names));
       }
       case IDENTIFIER: {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java
index a7602e1..6aca6ba 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/parser/CompoundIdentifierConverter.java
@@ -66,7 +66,7 @@
         .put(SqlAnalyzeTable.class, arrayOf(D, D, E, D))
         .put(SqlMetastoreAnalyzeTable.class, arrayOf(D, E, D, D, D))
         .put(SqlDropTableMetadata.class, arrayOf(D, D, D))
-        .put(SqlSelect.class, arrayOf(D, E, D, E, E, E, E, E, D, D))
+        .put(SqlSelect.class, arrayOf(D, E, D, E, E, E, E, E, D, D, D))
         .put(SqlCreateTable.class, arrayOf(D, D, D, E, D, D))
         .put(SqlCreateView.class, arrayOf(D, E, E, D))
         .put(DrillSqlDescribeTable.class, arrayOf(D, D, E))
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/torel/ConversionContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/torel/ConversionContext.java
index e4b9668..16b3ed8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/torel/ConversionContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/torel/ConversionContext.java
@@ -17,12 +17,12 @@
  */
 package org.apache.drill.exec.planner.torel;
 
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.calcite.prepare.Prepare;
-
 import org.apache.calcite.rel.RelRoot;
+import org.apache.calcite.rel.hint.RelHint;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.logical.LogicalPlan;
 import org.apache.drill.common.logical.data.Filter;
@@ -53,22 +53,18 @@
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rex.RexBuilder;
 import org.apache.calcite.rex.RexNode;
-import org.apache.calcite.schema.SchemaPlus;
 
 public class ConversionContext implements ToRelContext {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ConversionContext.class);
 
   private static final ConverterVisitor VISITOR = new ConverterVisitor();
 
   private final Map<Scan, FieldList> scanFieldLists;
   private final RelOptCluster cluster;
-  private final Prepare prepare;
 
   public ConversionContext(RelOptCluster cluster, LogicalPlan plan) {
     super();
     scanFieldLists = ScanFieldDeterminer.getFieldLists(plan);
     this.cluster = cluster;
-    this.prepare = null;
   }
 
   @Override
@@ -76,6 +72,10 @@
     return cluster;
   }
 
+  @Override
+  public List<RelHint> getTableHints() {
+    return Collections.emptyList();
+  }
 
   private FieldList getFieldList(Scan scan) {
     assert scanFieldLists.containsKey(scan);
@@ -116,11 +116,6 @@
     throw new UnsupportedOperationException();
   }
 
-  @Override
-  public RelRoot expandView(RelDataType rowType, String queryString, SchemaPlus rootSchema, List<String> schemaPath) {
-    throw new UnsupportedOperationException();
-  }
-
   private static class ConverterVisitor extends AbstractLogicalVisitor<RelNode, ConversionContext, InvalidRelException>{
 
     @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillRelDataTypeSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillRelDataTypeSystem.java
index cdaad29..54c43b1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillRelDataTypeSystem.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillRelDataTypeSystem.java
@@ -34,6 +34,9 @@
       case VARCHAR:
       case VARBINARY:
         return Types.MAX_VARCHAR_LENGTH;
+      case TIMESTAMP:
+      case TIME:
+        return Types.DEFAULT_TIMESTAMP_PRECISION;
       default:
         return super.getDefaultPrecision(typeName);
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginRulesProviderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginRulesProviderImpl.java
index b91b47e..e1b3c1c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginRulesProviderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginRulesProviderImpl.java
@@ -106,7 +106,7 @@
 
   @Override
   public RelOptRule vertexRule() {
-    return new VertexDrelConverterRule(convention);
+    return VertexDrelConverterRule.create(convention);
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/DrillJdbcRuleBase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/DrillJdbcRuleBase.java
index 2fc5183..18fe9ff 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/DrillJdbcRuleBase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/DrillJdbcRuleBase.java
@@ -55,7 +55,10 @@
   protected final JdbcConvention out;
 
   protected DrillJdbcRuleBase(Class<? extends RelNode> clazz, RelTrait in, JdbcConvention out, String description) {
-    super(clazz, (Predicate<RelNode>) input -> true, in, out, DrillRelFactories.LOGICAL_BUILDER, description);
+    super(Config.INSTANCE
+      .withConversion(clazz, (Predicate<RelNode>) input -> true, in, out, description)
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .as(Config.class));
     this.out = out;
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/VertexDrelConverterRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/VertexDrelConverterRule.java
index f69ba7c..4b3994c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/VertexDrelConverterRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/enumerable/plan/VertexDrelConverterRule.java
@@ -18,18 +18,24 @@
 package org.apache.drill.exec.store.enumerable.plan;
 
 import org.apache.calcite.plan.Convention;
+import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.convert.ConverterRule;
 import org.apache.drill.exec.planner.logical.DrillRel;
 import org.apache.drill.exec.planner.logical.DrillRelFactories;
 
-import java.util.function.Predicate;
-
 public class VertexDrelConverterRule extends ConverterRule {
 
-  public VertexDrelConverterRule(Convention in) {
-    super(RelNode.class, (Predicate<RelNode>) input -> true, in, DrillRel.DRILL_LOGICAL,
-        DrillRelFactories.LOGICAL_BUILDER, "VertexDrelConverterRule" + in.getName());
+  private VertexDrelConverterRule(Config config) {
+    super(config);
+  }
+
+  public static RelOptRule create(Convention in) {
+    return Config.INSTANCE
+      .withRuleFactory(VertexDrelConverterRule::new)
+      .withConversion(RelNode.class, input -> true, in, DrillRel.DRILL_LOGICAL, "VertexDrelConverterRule" + in.getName())
+      .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+      .toRule();
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/StoragePluginTableScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/StoragePluginTableScan.java
index ce8ce9e..2be69ef 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/StoragePluginTableScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/StoragePluginTableScan.java
@@ -71,11 +71,6 @@
   }
 
   @Override
-  protected String computeDigest() {
-    return super.computeDigest();
-  }
-
-  @Override
   public boolean canImplement(PluginImplementor implementor) {
     return implementor.canImplement(this);
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rule/PluginConverterRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rule/PluginConverterRule.java
index 377eba3..dd7d699 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rule/PluginConverterRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rule/PluginConverterRule.java
@@ -43,7 +43,10 @@
 
   protected PluginConverterRule(Class<? extends RelNode> clazz,
       RelTrait in, Convention out, String description, PluginImplementor pluginImplementor) {
-    super(clazz, (Predicate<RelNode>) input -> true, in, out, DrillRelFactories.LOGICAL_BUILDER, description);
+    super(Config.INSTANCE
+        .withConversion(clazz, (Predicate<RelNode>) input -> true, in, out, description)
+        .withRelBuilderFactory(DrillRelFactories.LOGICAL_BUILDER)
+        .as(Config.class));
     this.pluginImplementor = pluginImplementor;
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/table/function/WithOptionsTableMacro.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/table/function/WithOptionsTableMacro.java
index 6531ba1..abcd9ef 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/table/function/WithOptionsTableMacro.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/table/function/WithOptionsTableMacro.java
@@ -45,8 +45,8 @@
   }
 
   @Override
-  public TranslatableTable apply(List<Object> arguments) {
-    DrillTable drillTable = function.apply(arguments);
+  public TranslatableTable apply(List<?> arguments) {
+    DrillTable drillTable = function.apply((List<Object>) arguments);
     if (drillTable == null) {
       throw UserException
         .validationError()
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/ServerMetaProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/ServerMetaProvider.java
index 64af4ad..f18bf0e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/ServerMetaProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/ServerMetaProvider.java
@@ -25,7 +25,6 @@
 import org.apache.calcite.sql.parser.SqlParser;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
-import org.apache.drill.exec.planner.sql.DrillParserConfig;
 import org.apache.drill.exec.proto.UserProtos.ConvertSupport;
 import org.apache.drill.exec.proto.UserProtos.CorrelationNamesSupport;
 import org.apache.drill.exec.proto.UserProtos.DateTimeLiteralsSupport;
@@ -54,7 +53,7 @@
  * Contains worker {@link Runnable} for returning server meta information
  */
 public class ServerMetaProvider {
-  private static ServerMeta DEFAULT = ServerMeta.newBuilder()
+  private static final ServerMeta DEFAULT = ServerMeta.newBuilder()
       .addAllConvertSupport(getSupportedConvertOps())
       .addAllDateTimeFunctions(Splitter.on(",").split(SqlJdbcFunctionCall.getTimeDateFunctions()))
       .addAllDateTimeLiteralsSupport(Arrays.asList(DateTimeLiteralsSupport.values()))
@@ -83,7 +82,7 @@
       .build();
 
 
-  private static final Iterable<ConvertSupport> getSupportedConvertOps() {
+  private static Iterable<ConvertSupport> getSupportedConvertOps() {
     // A set would be more appropriate but it's not possible to produce
     // duplicates, and an iterable is all we need.
     ImmutableList.Builder<ConvertSupport> supportedConvertedOps = ImmutableList.builder();
@@ -124,7 +123,9 @@
         final ServerMeta.Builder metaBuilder = ServerMeta.newBuilder(DEFAULT);
         PlannerSettings plannerSettings = new PlannerSettings(session.getOptions(), context.getFunctionImplementationRegistry());
 
-        DrillParserConfig config = new DrillParserConfig(plannerSettings);
+        SqlParser.Config config = SqlParser.Config.DEFAULT
+          .withIdentifierMaxLength((int) plannerSettings.getIdentifierMaxLength())
+          .withQuoting(plannerSettings.getQuotingIdentifiers());
 
         int identifierMaxLength = config.identifierMaxLength();
         Metadata metadata = SqlParser.create("", config).getMetadata();
@@ -166,4 +167,4 @@
       }
     }
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
index 8f5c8fd..fd5046e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
@@ -22,6 +22,7 @@
 
 import org.apache.calcite.jdbc.DynamicSchema;
 import org.apache.drill.exec.alias.AliasRegistryProvider;
+import org.apache.drill.exec.ops.ViewExpansionContext;
 import org.apache.drill.shaded.guava.com.google.common.base.Function;
 import io.netty.buffer.DrillBuf;
 import org.apache.calcite.schema.SchemaPlus;
@@ -81,6 +82,7 @@
   protected void testSqlPlan(String sqlCommands) throws Exception {
     final DrillbitContext dbContext = mock(DrillbitContext.class);
     final QueryContext context = mock(QueryContext.class);
+    ViewExpansionContext viewExpansionContext = mock(ViewExpansionContext.class);
 
     final String[] sqlStrings = sqlCommands.split(";");
     final LocalPersistentStoreProvider provider = new LocalPersistentStoreProvider(config);
@@ -136,6 +138,7 @@
         Matchers.<Function<DrillBuf, ValueHolder>>any()))
       .thenReturn(ValueHolderHelper.getVarDecimalHolder(allocator.buffer(4), "0.01"));
     when(context.getOption(anyString())).thenCallRealMethod();
+    when(context.getViewExpansionContext()).thenReturn(viewExpansionContext);
 
 
     for (final String sql : sqlStrings) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java b/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java
index b359666..f749a1c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java
@@ -26,8 +26,8 @@
 
   @Test
   public void testDRILL_FilterMerge() throws Exception {
-    String expectedPattern1 = "Filter(condition=[AND(OR(=($0, 1), =($0, 2), =($0, 3)), =($4, 'GRADUATE DEGREE'))])";
-    String expectedPattern2 = "Filter(condition=[AND(OR(=($0, 1), =($0, 2), =($0, 3)), LIKE($1, '%VP%'))])";
+    String expectedPattern1 = "Filter(condition=[AND(SEARCH($0, Sarg[1, 2, 3]), =($4, 'GRADUATE DEGREE'))])";
+    String expectedPattern2 = "Filter(condition=[AND(SEARCH($0, Sarg[1, 2, 3]), LIKE($1, '%VP%'))])";
     String excludedPattern = "Filter(condition=[OR(=($0, 1), =($0, 2), =($0, 3))])";
 
     test("use dfs.tmp");
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaWithTableFunction.java b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaWithTableFunction.java
index 5b448a4..1577c2e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaWithTableFunction.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaWithTableFunction.java
@@ -240,4 +240,17 @@
 
     run("select Year from table(%s(schema=>'path=(int)')) where Make = 'Ford'", table);
   }
+
+  @Test // DRILL-7526
+  public void testWithTypeAndSchema() throws Exception {
+    String query = "select Year from table(dfs.`store/text/data/cars.csvh`(type=> 'text', " +
+      "schema=>'inline=(`Year` int)')) where Make = 'Ford'";
+
+    testBuilder()
+      .sqlQuery(query)
+      .unOrdered()
+      .baselineColumns("Year")
+      .baselineValues(1997)
+      .go();
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java
index fd97564..724b518 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java
@@ -36,7 +36,6 @@
   }
 
   @Test
-  @Ignore // DRILL-512
   public void tpch02() throws Exception{
     testDistributed("queries/tpch/02.sql");
   }
@@ -143,7 +142,6 @@
   }
 
   @Test
-  @Ignore // DRILL-518
   public void tpch22() throws Exception{
     testDistributed("queries/tpch/22.sql");
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
index 5be86ce..5502410 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
@@ -38,7 +38,6 @@
   }
 
   @Test
-  @Ignore // DRILL-512
   public void tpch02() throws Exception{
     testDistributed("queries/tpch/02.sql");
   }
@@ -84,7 +83,6 @@
   }
 
   @Test
-  @Ignore // cartesion problem
   public void tpch11() throws Exception{
     testDistributed("queries/tpch/11.sql");
   }
@@ -105,19 +103,16 @@
   }
 
   @Test
-  @Ignore // non-equality join
   public void tpch15() throws Exception{
     testDistributed("queries/tpch/15.sql");
   }
 
   @Test
-  @Ignore // invalid plan, due to Nulls value NOT IN sub-q
   public void tpch16() throws Exception{
     testDistributed("queries/tpch/16.sql");
   }
 
   @Test
-  @Ignore // non-equality join
   public void tpch17() throws Exception{
     testDistributed("queries/tpch/17.sql");
   }
@@ -149,7 +144,6 @@
   }
 
   @Test
-  @Ignore // DRILL-518
   public void tpch22() throws Exception{
     testDistributed("queries/tpch/22.sql");
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java
index 1aa9de2..abd932c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java
@@ -25,7 +25,6 @@
 
 @Category(PlannerTest.class)
 public class TestTpchExplain extends BaseTestQuery {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTpchExplain.class);
 
   private static final String EXPLAIN_PREFIX = "EXPLAIN PLAN FOR ";
 
@@ -42,7 +41,6 @@
   }
 
   @Test
-  @Ignore // DRILL-512
   public void tpch02() throws Exception{
     doExplain("queries/tpch/02.sql");
   }
@@ -88,7 +86,6 @@
   }
 
   @Test
-  @Ignore // cartesion problem
   public void tpch11() throws Exception{
     doExplain("queries/tpch/11.sql");
   }
@@ -115,7 +112,6 @@
   }
 
   @Test
-  @Ignore // invalid plan, due to Nulls value NOT IN sub-q
   public void tpch16() throws Exception{
     doExplain("queries/tpch/16.sql");
   }
@@ -153,7 +149,6 @@
   }
 
   @Test
-  @Ignore // DRILL-518
   public void tpch22() throws Exception{
     doExplain("queries/tpch/22.sql");
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java
index 4717993..3086f26 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java
@@ -38,7 +38,6 @@
   }
 
   @Test
-  @Ignore // DRILL-512
   public void tpch02() throws Exception{
     testLimitZero("queries/tpch/02.sql");
   }
@@ -84,7 +83,6 @@
   }
 
   @Test
-  @Ignore // Cartesian problem
   public void tpch11() throws Exception{
     testLimitZero("queries/tpch/11.sql");
   }
@@ -111,13 +109,11 @@
   }
 
   @Test
-  @Ignore // invalid plan, due to Nulls value NOT IN sub-q
   public void tpch16() throws Exception{
     testLimitZero("queries/tpch/16.sql");
   }
 
   @Test
-  @Ignore //
   public void tpch17() throws Exception{
     testLimitZero("queries/tpch/17.sql");
   }
@@ -144,7 +140,6 @@
   }
 
   @Test
-  @Ignore // DRILL-518
   public void tpch22() throws Exception{
     testLimitZero("queries/tpch/22.sql");
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java
index f995ce6..083644e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java
@@ -27,7 +27,6 @@
 
 @Category(PlannerTest.class)
 public class TestTpchPlanning extends PlanningBase {
-  //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTpchPlanning.class);
 
   @Rule public final TestRule TIMEOUT = TestTools.getTimeoutRule(50000);
 
@@ -37,7 +36,6 @@
   }
 
   @Test
-  @Ignore // DRILL-512
   public void tpch02() throws Exception {
     testSqlPlanFromFile("queries/tpch/02.sql");
   }
@@ -68,13 +66,11 @@
   }
 
   @Test
-  @Ignore // cannot plan exception (was DRILL-516)
   public void tpch08() throws Exception {
     testSqlPlanFromFile("queries/tpch/08.sql");
   }
 
   @Test
-  @Ignore // cannot plan exception (was DRILL-516)
   public void tpch09() throws Exception {
     testSqlPlanFromFile("queries/tpch/09.sql");
   }
@@ -85,7 +81,6 @@
   }
 
   @Test
-  @Ignore // cartesion problem
   public void tpch11() throws Exception {
     testSqlPlanFromFile("queries/tpch/11.sql");
   }
@@ -96,7 +91,6 @@
   }
 
   @Test
-  @Ignore // sporadic failures when part of the full build.
   public void tpch13() throws Exception {
     testSqlPlanFromFile("queries/tpch/13.sql");
   }
@@ -113,7 +107,6 @@
   }
 
   @Test
-  @Ignore // invalid plan, due to Nulls value NOT IN sub-q
   public void tpch16() throws Exception {
     testSqlPlanFromFile("queries/tpch/16.sql");
   }
@@ -135,7 +128,6 @@
   }
 
   @Test
-  @Ignore // DRILL-517
   public void tpch20() throws Exception {
     testSqlPlanFromFile("queries/tpch/20.sql");
   }
@@ -146,7 +138,6 @@
   }
 
   @Test
-  @Ignore // DRILL-518
   public void tpch22() throws Exception {
     testSqlPlanFromFile("queries/tpch/22.sql");
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java
index 222d979..39ba988 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java
@@ -41,7 +41,6 @@
   }
 
   @Test
-  @Ignore // DRILL-512
   public void tpch02() throws Exception{
     testSingleMode("queries/tpch/02.sql");
   }
@@ -87,7 +86,6 @@
   }
 
   @Test
-  @Ignore // Cartesian problem
   public void tpch11() throws Exception{
     testSingleMode("queries/tpch/11.sql");
   }
@@ -108,13 +106,11 @@
   }
 
   @Test
-  @Ignore //
   public void tpch15() throws Exception{
     testSingleMode("queries/tpch/15.sql");
   }
 
   @Test
-  @Ignore // invalid plan, due to Nulls value NOT IN sub-q
   public void tpch16() throws Exception{
     testSingleMode("queries/tpch/16.sql");
   }
@@ -146,7 +142,6 @@
   }
 
   @Test
-  @Ignore // DRILL-518
   public void tpch22() throws Exception{
     testSingleMode("queries/tpch/22.sql");
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
index 97a7a7d..8cf5984 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
@@ -153,33 +153,15 @@
 
   /**
    * Test with query against an empty file. Select clause has three expressions.
-   * 1.0 + 100.0 as constant expression, is resolved to required FLOAT8/VARDECIMAL
+   * 1.0 + 100.0 as constant expression, is resolved to required VARDECIMAL
    * cast(100 as varchar(100) is resolved to required varchar(100)
    * cast(columns as varchar(100)) is resolved to nullable varchar(100).
    */
   @Test
   public void testQueryConstExprEmptyJson() throws Exception {
     try {
-      alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, false);
-      SchemaBuilder schemaBuilder = new SchemaBuilder()
-          .add("key", TypeProtos.MinorType.FLOAT8)
-          .add("name", TypeProtos.MinorType.VARCHAR, 100)
-          .addNullable("name2", TypeProtos.MinorType.VARCHAR, 100);
-      BatchSchema expectedSchema = new BatchSchemaBuilder()
-          .withSchemaBuilder(schemaBuilder)
-          .build();
-
-      testBuilder()
-          .sqlQuery("select 1.0 + 100.0 as key, "
-            + " cast(100 as varchar(100)) as name, "
-            + " cast(columns as varchar(100)) as name2 "
-            + " from cp.`%s` ", SINGLE_EMPTY_JSON)
-          .schemaBaseLine(expectedSchema)
-          .build()
-          .run();
-
       alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
-      schemaBuilder = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("key",
               TypeProtos.MajorType.newBuilder()
                   .setMinorType(TypeProtos.MinorType.VARDECIMAL)
@@ -189,7 +171,7 @@
                   .build())
           .add("name", TypeProtos.MinorType.VARCHAR, 100)
           .addNullable("name2", TypeProtos.MinorType.VARCHAR, 100);
-      expectedSchema = new BatchSchemaBuilder()
+      BatchSchema expectedSchema = new BatchSchemaBuilder()
           .withSchemaBuilder(schemaBuilder)
           .build();
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestWindowFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestWindowFunctions.java
index 1acb8fb..2ffbe4a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestWindowFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestWindowFunctions.java
@@ -50,7 +50,7 @@
         "from cp.`tpch/nation.parquet`";
 
     // Validate the plan
-    final String[] expectedPlan = {"Window.*partition \\{0\\} order by \\[\\].*\\[SUM\\(\\$0\\), COUNT\\(\\)",
+    final String[] expectedPlan = {"Window.*partition \\{0\\} aggs .*\\[SUM\\(\\$0\\), COUNT\\(\\)",
         "Scan.*columns=\\[`n_nationKey`\\].*"};
     final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\].*"};
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
@@ -413,7 +413,7 @@
     String query = "select count(*) over (order by o_orderpriority) as cnt from dfs.`multilevel/parquet` where o_custkey < 100";
     try {
       // Validate the plan
-      final String[] expectedPlan = {"Window.*partition \\{\\} order by \\[0\\].*COUNT\\(\\)",
+      final String[] expectedPlan = {"Window.*order by \\[0\\].*COUNT\\(\\)",
           "Scan.*columns=\\[`o_custkey`, `o_orderpriority`\\]"};
       final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
       PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
@@ -445,7 +445,7 @@
         "where n_nationkey = 1";
 
     // Validate the plan
-    final String[] expectedPlan1 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$0\\), COUNT\\(\\$0\\)",
+    final String[] expectedPlan1 = {"Window.*partition \\{0\\} aggs .*SUM\\(\\$0\\), COUNT\\(\\$0\\)",
         "Scan.*columns=\\[`n_nationkey`\\]"};
     final String[] excludedPatterns1 = {"Scan.*columns=\\[`\\*`\\]"};
     PlanTestBase.testPlanMatchingPatterns(avgQuery, expectedPlan1, excludedPatterns1);
@@ -462,7 +462,7 @@
         "where n_nationkey = 1";
 
     // Validate the plan
-    final String[] expectedPlan2 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$2\\), SUM\\(\\$1\\), COUNT\\(\\$1\\)",
+    final String[] expectedPlan2 = {"Window.*partition \\{0\\} aggs .*SUM\\(\\$2\\), SUM\\(\\$1\\), COUNT\\(\\$1\\)",
         "Scan.*columns=\\[`n_nationkey`\\]"};
     final String[] excludedPatterns2 = {"Scan.*columns=\\[`\\*`\\]"};
     PlanTestBase.testPlanMatchingPatterns(varianceQuery, expectedPlan2, excludedPatterns2);
@@ -481,7 +481,7 @@
         "from cp.`jsoninput/large_int.json` limit 1";
 
     // Validate the plan
-    final String[] expectedPlan1 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$1\\)",
+    final String[] expectedPlan1 = {"Window.*partition \\{0\\} aggs .*SUM\\(\\$1\\)",
         "Scan.*columns=\\[`col_varchar`, `col_int`\\]"};
     final String[] excludedPatterns1 = {"Scan.*columns=\\[`\\*`\\]"};
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan1, excludedPatterns1);
@@ -497,7 +497,7 @@
         "from cp.`jsoninput/large_int.json` limit 1";
 
     // Validate the plan
-    final String[] expectedPlan2 = {"Window.*partition \\{0\\} order by \\[\\].*SUM\\(\\$1\\), COUNT\\(\\$1\\)",
+    final String[] expectedPlan2 = {"Window.*partition \\{0\\} aggs .*SUM\\(\\$1\\), COUNT\\(\\$1\\)",
         "Scan.*columns=\\[`col_varchar`, `col_int`\\]"};
     final String[] excludedPatterns2 = {"Scan.*columns=\\[`\\*`\\]"};
     PlanTestBase.testPlanMatchingPatterns(avgQuery, expectedPlan2, excludedPatterns2);
@@ -549,7 +549,7 @@
         " from cp.`tpch/lineitem.parquet` group by l_partkey, l_suppkey order by 1 desc limit 1";
 
     // Validate the plan
-    final String[] expectedPlan = {"Window.*partition \\{\\} order by \\[1\\].*DENSE_RANK\\(\\)",
+    final String[] expectedPlan = {"Window.*order by \\[1\\].*DENSE_RANK\\(\\)",
         "Scan.*columns=\\[`l_partkey`, `l_suppkey`\\]"};
     final String[] excludedPatterns = {"Scan.*columns=\\[`\\*`\\]"};
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPatterns);
@@ -684,7 +684,7 @@
     test("alter session set `planner.slice_target` = 1");
 
     // Validate the plan
-    final String[] expectedPlan = {"Window\\(window#0=\\[window\\(partition \\{\\}.*\n" +
+    final String[] expectedPlan = {"Window\\(window#0=\\[window\\(aggs .*\n" +
         ".*UnionExchange"};
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
 
@@ -747,7 +747,7 @@
         "from cp.`%s`", root);
 
     // Validate the plan
-    final String[] expectedPlan = {"Window\\(window#0=\\[window\\(partition \\{0\\} order by \\[\\].*\\[SUM\\(\\$1\\), SUM\\(\\$2\\)\\]"};
+    final String[] expectedPlan = {"Window\\(window#0=\\[window\\(partition \\{0\\} aggs .*\\[SUM\\(\\$1\\), SUM\\(\\$2\\)\\]"};
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, new String[]{});
 
     testBuilder()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java
index 8dc1093..1abe4f1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java
@@ -1462,7 +1462,7 @@
   @Test
   public void testILike() throws Exception {
     testBuilder()
-        .sqlQuery("select n_name from cp.`tpch/nation.parquet` where ilike(n_name, '%united%') = true")
+        .sqlQuery("select n_name from cp.`tpch/nation.parquet` where `ilike`(n_name, '%united%') = true")
         .unOrdered()
         .baselineColumns("n_name")
         .baselineValues("UNITED STATES")
@@ -1474,7 +1474,7 @@
   @Test
   public void testILikeEscape() throws Exception {
     testBuilder()
-        .sqlQuery("select a from (select concat(r_name , '_region') a from cp.`tpch/region.parquet`) where ilike(a, 'asia#_region', '#') = true")
+        .sqlQuery("select a from (select concat(r_name , '_region') a from cp.`tpch/region.parquet`) where `ilike`(a, 'asia#_region', '#') = true")
         .unOrdered()
         .baselineColumns("a")
         .baselineValues("ASIA_region")
@@ -1485,7 +1485,7 @@
   @Test
   public void testSubstr() throws Exception {
     testBuilder()
-        .sqlQuery("select substr(n_name, 'UN.TE.') a from cp.`tpch/nation.parquet` where ilike(n_name, 'united%') = true")
+        .sqlQuery("select substr(n_name, 'UN.TE.') a from cp.`tpch/nation.parquet` where `ilike`(n_name, 'united%') = true")
         .unOrdered()
         .baselineColumns("a")
         .baselineValues("UNITED")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/DrillOptiqTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/DrillOptiqTest.java
index 1f35bc6..dcaed58 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/DrillOptiqTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/DrillOptiqTest.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.planner.logical;
 
+import org.apache.calcite.rex.RexWindowBounds;
 import org.apache.drill.common.util.GuavaUtils;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import org.apache.calcite.rel.RelNode;
@@ -32,13 +33,14 @@
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.planner.types.DrillRelDataTypeSystem;
 import org.apache.drill.test.BaseTest;
-import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.util.LinkedList;
 import java.util.List;
 
+import static org.junit.Assert.fail;
+
 @Category(PlannerTest.class)
 public class DrillOptiqTest extends BaseTest {
 
@@ -58,16 +60,17 @@
 
       // create a dummy RexOver object.
       RexNode window = rex.makeOver(anyType, SqlStdOperatorTable.AVG, emptyList, emptyList, GuavaUtils.convertToUnshadedImmutableList(e),
-          null, null, true, false, false, false);
+        RexWindowBounds.UNBOUNDED_PRECEDING, RexWindowBounds.UNBOUNDED_PRECEDING, true, false, false, false, false);
       DrillOptiq.toDrill(null, (RelNode) null, window);
+      fail();
     } catch (UserException e) {
       if (e.getMessage().contains(DrillOptiq.UNSUPPORTED_REX_NODE_ERROR)) {
         // got expected error return
         return;
       }
-      Assert.fail("Hit exception with unexpected error message");
+      fail("Hit exception with unexpected error message");
     }
 
-    Assert.fail("Failed to raise the expected exception");
+    fail("Failed to raise the expected exception");
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
index 2d7a4f6..1d4b2f2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
@@ -332,7 +332,7 @@
       query = " select emp.employee_id from dfs.tmp.employeeUseStat emp join dfs.tmp.departmentUseStat dept"
               + " on emp.department_id = dept.department_id "
               + " group by emp.employee_id";
-      String[] expectedPlan8 = {"HashAgg\\(group=\\[\\{0\\}\\]\\).*rowcount = 730.0992454469841,.*",
+      String[] expectedPlan8 = {"HashAgg\\(group=\\[\\{0\\}\\]\\).*rowcount = 730.2832515526484,.*",
               "HashJoin\\(condition.*\\).*rowcount = 1155.0,.*",
               "Scan.*columns=\\[`department_id`, `employee_id`\\].*rowcount = 1155.0.*",
               "Scan.*columns=\\[`department_id`\\].*rowcount = 12.0.*"};
@@ -347,7 +347,7 @@
               + " on emp.department_id = dept.department_id "
               + " group by emp.employee_id, emp.store_id, dept.department_description "
               + " having dept.department_description = 'FINANCE'";
-      String[] expectedPlan9 = {"HashAgg\\(group=\\[\\{0, 1, 2\\}\\]\\).*rowcount = 60.84160378724867.*",
+      String[] expectedPlan9 = {"HashAgg\\(group=\\[\\{0, 1, 2\\}\\]\\).*rowcount = 61.0263439444715.*",
               "HashJoin\\(condition.*\\).*rowcount = 96.25,.*",
               "Scan.*columns=\\[`department_id`, `employee_id`, `store_id`\\].*rowcount = 1155.0.*",
               "Filter\\(condition=\\[=\\(\\$1, 'FINANCE'\\)\\]\\).*rowcount = 1.0,.*",
@@ -362,7 +362,7 @@
               + " on emp.department_id = dept.department_id "
               + " group by emp.employee_id, emp.store_id "
               + " having emp.store_id = 7";
-      String[] expectedPlan10 = {"HashAgg\\(group=\\[\\{0, 1\\}\\]\\).*rowcount = 29.203969817879365.*",
+      String[] expectedPlan10 = {"HashAgg\\(group=\\[\\{0, 1\\}\\]\\).*rowcount = 29.389586621217404.*",
               "HashJoin\\(condition.*\\).*rowcount = 46.2,.*",
               "Filter\\(condition=\\[=\\(\\$2, 7\\)\\]\\).*rowcount = 46.2,.*",
               "Scan.*columns=\\[`department_id`, `employee_id`, `store_id`\\].*rowcount = 1155.0.*",
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockPlugin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockPlugin.java
index e2e65f4..90a6c48 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockPlugin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockPlugin.java
@@ -77,7 +77,7 @@
         return ImmutableSet.of(
             new EnumerableIntermediatePrelConverterRule(
                 new EnumMockRel.MockEnumerablePrelContext(convention), convention),
-            new VertexDrelConverterRule(convention));
+            VertexDrelConverterRule.create(convention));
       case LOGICAL_PRUNE_AND_JOIN:
       case LOGICAL_PRUNE:
       case PARTITION_PRUNING:
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushdownWithTransitivePredicates.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushdownWithTransitivePredicates.java
index 9b7a913..f027421 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushdownWithTransitivePredicates.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushdownWithTransitivePredicates.java
@@ -178,8 +178,7 @@
     testPlanMatchingPatterns(query, expectedPlan);
   }
 
-  @Test // TODO: CALCITE-1048
-  @Ignore // For now plan has "first.*numRowGroups=7". Replacing left join to inner should be made earlier.
+  @Test
   public void testForTwoExists() throws Exception {
     String query = String.format("SELECT * from %s t1 " +
         " WHERE EXISTS (SELECT * FROM %s t2 WHERE t1.`year` = t2.`year` AND t2.`year` = 1988) " +
@@ -227,8 +226,7 @@
     testPlanMatchingPatterns(query, expectedPlan);
   }
 
-  @Test // TODO: CALCITE-2275
-  @Ignore // For now plan has "first.*numRowGroups=14""
+  @Test
   public void testForInAndNotOperatorsInJoinCondition() throws Exception {
     String query = String.format("SELECT * FROM %s t1 JOIN %s t2 " +
             "ON t1.`year` = t2.`year` AND t2.`year` NOT IN (1987, 1988) JOIN %s t3 ON t1.`period` = t3.`period` " +
@@ -287,5 +285,39 @@
     final String[] expectedPlan = {"first.*numRowGroups=1", "second.*numRowGroups=1"};
     testPlanMatchingPatterns(query, expectedPlan);
   }
+
+  @Test // DRILL-6371
+  public void testForTransitiveFilterPushPastUnion() throws Exception {
+    String query = "WITH year_total_1\n" +
+      "     AS (SELECT c.r_regionkey    customer_id,\n" +
+      "                1 year_total\n" +
+      "         FROM   cp.`tpch/region.parquet` c\n" +
+      "         UNION ALL\n" +
+      "         SELECT c.n_nationkey    customer_id,\n" +
+      "                1 year_total\n" +
+      "         FROM   cp.`tpch/nation.parquet` c),\n" +
+      "     year_total_2\n" +
+      "     AS (SELECT c.r_regionkey    customer_id,\n" +
+      "                1 year_total\n" +
+      "         FROM   cp.`tpch/region.parquet` c\n" +
+      "         UNION ALL\n" +
+      "         SELECT c.n_nationkey    customer_id,\n" +
+      "                1 year_total\n" +
+      "         FROM   cp.`tpch/nation.parquet` c)\n" +
+      "SELECT count(t_w_firstyear.customer_id) as ct\n" +
+      "FROM   year_total_1 t_w_firstyear,\n" +
+      "       year_total_2 t_w_secyear\n" +
+      "WHERE  t_w_firstyear.year_total = t_w_secyear.year_total\n" +
+      " AND t_w_firstyear.year_total > 0 and t_w_secyear.year_total > 0";
+
+    // Validate the plan
+    int actualRowCount = testSql(query);
+    int expectedRowCount = 1;
+    assertEquals("Expected and actual row count should match",
+      expectedRowCount, actualRowCount);
+
+    String[] excludedPlan = {"Filter"};
+    testPlanMatchingPatterns(query, new String[0], excludedPlan);
+  }
 }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/TestLimit0VsRegularQueriesMetadata.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/TestLimit0VsRegularQueriesMetadata.java
index e140889..54fc2ea 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/TestLimit0VsRegularQueriesMetadata.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/TestLimit0VsRegularQueriesMetadata.java
@@ -180,7 +180,7 @@
     List<ExpectedColumnResult> expectedMetadata = ImmutableList.of(
         new ExpectedColumnResult("col_first_cond", "CHARACTER VARYING", true, 10, 10, 0, false, String.class.getName()),
         new ExpectedColumnResult("col_second_cond", "CHARACTER VARYING", true, 20, 20, 0, false, String.class.getName()),
-        new ExpectedColumnResult("col_null", "CHARACTER VARYING", true, 10, 10, 0, false, String.class.getName()),
+        new ExpectedColumnResult("col_null", "CHARACTER VARYING", false, 10, 10, 0, false, String.class.getName()),
         new ExpectedColumnResult("col_unk", "CHARACTER VARYING", true, Types.MAX_VARCHAR_LENGTH, Types.MAX_VARCHAR_LENGTH, 0, false, String.class.getName())
     );
 
diff --git a/exec/java-exec/src/test/resources/rest/verboseExc.json b/exec/java-exec/src/test/resources/rest/verboseExc.json
index f195b9f..f4835f4 100644
--- a/exec/java-exec/src/test/resources/rest/verboseExc.json
+++ b/exec/java-exec/src/test/resources/rest/verboseExc.json
@@ -1,6 +1,6 @@
 !\{"queryId":"[^"]+"
 ,"exception":"org.apache.calcite.runtime.CalciteContextException"
-,"errorMessage":"From line 1, column 15 to line 1, column 16: Object 'employee123321123321.json' not found within 'cp': Object 'employee123321123321.json' not found within 'cp'"
+,"errorMessage":"From line 1, column 15 to line 1, column 44: Object 'employee123321123321.json' not found within 'cp': Object 'employee123321123321.json' not found within 'cp'"
 !,"stackTrace":\[.*\]
 ,"queryState":"FAILED"
 }
diff --git a/exec/jdbc-all/pom.xml b/exec/jdbc-all/pom.xml
index 78b1426..6831b69 100644
--- a/exec/jdbc-all/pom.xml
+++ b/exec/jdbc-all/pom.xml
@@ -34,7 +34,7 @@
        "package.namespace.prefix" equals to "oadd.". It can be overridden if necessary within any profile -->
   <properties>
     <package.namespace.prefix>oadd.</package.namespace.prefix>
-    <jdbc-all-jar.maxsize>46000000</jdbc-all-jar.maxsize>
+    <jdbc-all-jar.maxsize>47000000</jdbc-all-jar.maxsize>
   </properties>
 
   <dependencies>
@@ -1280,7 +1280,7 @@
     <profile>
       <id>hadoop-2</id>
       <properties>
-        <jdbc-all-jar.maxsize>49000000</jdbc-all-jar.maxsize>
+        <jdbc-all-jar.maxsize>50000000</jdbc-all-jar.maxsize>
       </properties>
     </profile>
   </profiles>
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java
index e06445d..8e21a7f 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillMetaImpl.java
@@ -26,6 +26,7 @@
 import java.sql.Timestamp;
 import java.sql.Types;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -320,8 +321,14 @@
 
         Meta.Frame frame = Meta.Frame.create(0, true, tables);
         StructType fieldMetaData = drillFieldMetaData(clazz);
+        List<String> fieldNames = fieldMetaData.columns.stream()
+          .map(c -> c.columnName)
+          .collect(Collectors.toList());
+        List<Field> fields = Arrays.stream(clazz.getFields())
+          .filter(field -> Modifier.isPublic(field.getModifiers()) && !Modifier.isStatic(field.getModifiers()))
+          .collect(Collectors.toList());
         Meta.Signature signature = Meta.Signature.create(fieldMetaData.columns, "", Collections.emptyList(),
-            CursorFactory.record(clazz), Meta.StatementType.SELECT);
+            CursorFactory.record(clazz, fields, fieldNames), Meta.StatementType.SELECT);
 
         AvaticaStatement statement = connection.createStatement();
         return MetaResultSet.create(connection.id, statement.getId(), true,
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
index 464a90c..4ff30a2 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/DatabaseMetaDataGetColumnsTest.java
@@ -1042,7 +1042,7 @@
   @Test
   public void test_COLUMN_SIZE_hasRightValue_mdrReqTIME() throws SQLException {
     assertThat( getIntOrNull( mdrReqTIME, "COLUMN_SIZE" ),
-                equalTo( 8  /* HH:MM:SS */  ) );
+                equalTo( 12  /* HH:MM:SS.SSS */  ) );
   }
 
   @Ignore( "TODO(DRILL-3225): unignore when datetime precision is implemented" )
@@ -1056,13 +1056,13 @@
   public void test_COLUMN_SIZE_hasINTERIMValue_mdrOptTIME_7() throws SQLException {
     assertThat( "When datetime precision is implemented, un-ignore above method and purge this.",
                 getIntOrNull( mdrOptTIME_7, "COLUMN_SIZE" ),
-                equalTo( 8  /* HH:MM:SS */ ) );
+                equalTo( 12  /* HH:MM:SS.SSS */ ) );
   }
 
   @Test
   public void test_COLUMN_SIZE_hasRightValue_mdrOptTIMESTAMP() throws SQLException {
     assertThat( getIntOrNull( mdrOptTIMESTAMP, "COLUMN_SIZE" ),
-                equalTo( 19 /* YYYY-MM-DDTHH:MM:SS */  ) );
+                equalTo( 23 /* YYYY-MM-DDTHH:MM:SS.SSS */  ) );
   }
 
   @Test
@@ -1353,8 +1353,8 @@
 
   @Test
   public void test_DECIMAL_DIGITS_hasRightValue_mdrReqTIME() throws SQLException {
-    // Zero is default datetime precision for TIME in SQL DATETIME_PRECISION.
-    assertThat( getIntOrNull( mdrReqTIME, "DECIMAL_DIGITS" ), equalTo( 0 ) );
+    // 3 is default datetime precision for TIME in SQL DATETIME_PRECISION.
+    assertThat( getIntOrNull( mdrReqTIME, "DECIMAL_DIGITS" ), equalTo( 3 ) );
   }
 
   @Ignore( "TODO(DRILL-3225): unignore when datetime precision is implemented" )
@@ -1366,7 +1366,7 @@
   @Test
   public void test_DECIMAL_DIGITS_hasINTERIMValue_mdrOptTIME_7() throws SQLException {
     assertThat( "When datetime precision is implemented, un-ignore above method and purge this.",
-                getIntOrNull( mdrOptTIME_7, "DECIMAL_DIGITS" ), equalTo( 0 ) );
+                getIntOrNull( mdrOptTIME_7, "DECIMAL_DIGITS" ), equalTo( 3 ) );
   }
 
   @Ignore( "TODO(DRILL-3225): unignore when datetime precision is implemented" )
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestInformationSchemaColumns.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestInformationSchemaColumns.java
index 218c618..386a2eb 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestInformationSchemaColumns.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestInformationSchemaColumns.java
@@ -1985,8 +1985,8 @@
 
   @Test
   public void test_DATETIME_PRECISION_hasRightValue_mdrReqTIME() throws SQLException {
-    // Zero is default datetime precision for TIME.
-    assertThat( getIntOrNull( mdrReqTIME, "DATETIME_PRECISION" ), equalTo( 0 ) );
+    // 3 is default datetime precision for TIME.
+    assertThat( getIntOrNull( mdrReqTIME, "DATETIME_PRECISION" ), equalTo( 3 ) );
   }
 
   @Ignore( "TODO(DRILL-3225): unignore when datetime precision is implemented" )
diff --git a/pom.xml b/pom.xml
index 320e8cb..16193cb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -54,15 +54,9 @@
     <forkCount>1</forkCount>
     <parquet.version>1.12.2</parquet.version>
     <parquet.format.version>2.8.0</parquet.format.version>
-    <!--
-      For development purposes to be able to use custom Calcite versions (e.g. not present in jitpack
-      repository or from local repository) update this property to desired value (e.g. org.apache.calcite).
-      In case if new value is org.apache.calcite, one needs to remove `org.apache.calcite:*` from
-      avoid_bad_dependencies plugin found in the file.
-    -->
-    <calcite.groupId>com.github.vvysotskyi.drill-calcite</calcite.groupId>
-    <calcite.version>1.21.0-drill-r9</calcite.version>
-    <avatica.version>1.17.0</avatica.version>
+    <calcite.groupId>org.apache.calcite</calcite.groupId>
+    <calcite.version>1.31.0</calcite.version>
+    <avatica.version>1.22.0</avatica.version>
     <janino.version>3.1.7</janino.version>
     <sqlline.version>1.12.0</sqlline.version>
     <jackson.version>2.13.2.20220328</jackson.version>
@@ -128,7 +122,7 @@
     <snakeyaml.version>1.26</snakeyaml.version>
     <commons.lang3.version>3.10</commons.lang3.version>
     <testcontainers.version>1.16.3</testcontainers.version>
-    <typesafe.config.version>1.0.0</typesafe.config.version>
+    <typesafe.config.version>1.4.2</typesafe.config.version>
     <commons.codec.version>1.14</commons.codec.version>
     <xerces.version>2.12.2</xerces.version>
     <commons.configuration.version>1.10</commons.configuration.version>
@@ -669,7 +663,6 @@
                     <exclude>jdk.tools:jdk.tools</exclude>
                     <exclude>org.json:json</exclude>
                     <exclude>org.beanshell:bsh</exclude>
-                    <exclude>org.apache.calcite:*</exclude>
                     <exclude>commons-httpclient:*</exclude>
                     <exclude>tomcat:*</exclude>
                     <exclude>commons-beanutils:commons-beanutils-core</exclude>