DRILL-8182: File scan nodes not differentiated by format config (#2583)
diff --git a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
index 97c8fff..9fe9f8e 100644
--- a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
+++ b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
@@ -780,4 +780,34 @@
new RowSetComparison(expected).verifyAndClearAll(results);
}
+
+ // DRILL-8182
+ @Test
+ public void testTableFuncsThatDifferOnlyByFormatConfig() throws Exception {
+ String sql = "WITH prod AS (" +
+ " SELECT id, name FROM table(cp.`excel/test_cross_sheet_join.xlsx` (type=> 'excel', sheetName => 'products'))" +
+ "), cust AS (" +
+ " SELECT id, name FROM table(cp.`excel/test_cross_sheet_join.xlsx` (type=> 'excel', sheetName => 'customers'))" +
+ ")" +
+ "SELECT prod.*, cust.* from prod JOIN cust ON prod.id = cust.id";
+
+ RowSet results = client.queryBuilder().sql(sql).rowSet();
+
+ TupleMetadata expectedSchema = new SchemaBuilder()
+ .addNullable("id", MinorType.FLOAT8)
+ .addNullable("name", MinorType.VARCHAR)
+ .addNullable("id0", MinorType.FLOAT8)
+ .addNullable("name0", MinorType.VARCHAR)
+ .buildSchema();
+
+ RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+ .addRow(1.0, "Doughnut", 1.0, "Alice")
+ .addRow(2.0, "Coffee", 2.0, "Bob")
+ .addRow(3.0, "Coke", 3.0, "Carol")
+ .addRow(4.0, "Cheesecake", 4.0, "Dave")
+ .addRow(5.0, "Popsicle", 5.0, "Eve")
+ .build();
+
+ new RowSetComparison(expected).verifyAndClearAll(results);
+ }
}
diff --git a/contrib/format-excel/src/test/resources/excel/test_cross_sheet_join.xlsx b/contrib/format-excel/src/test/resources/excel/test_cross_sheet_join.xlsx
new file mode 100644
index 0000000..b057361
--- /dev/null
+++ b/contrib/format-excel/src/test/resources/excel/test_cross_sheet_join.xlsx
Binary files differ
diff --git a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java
index 8b1d58d..7d5c402 100644
--- a/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java
+++ b/contrib/storage-cassandra/src/main/java/org/apache/drill/exec/store/cassandra/schema/CassandraDynamicTable.java
@@ -34,6 +34,7 @@
import org.apache.calcite.schema.TranslatableTable;
import org.apache.calcite.schema.Wrapper;
import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.store.StoragePlugin;
import java.lang.reflect.Type;
@@ -44,7 +45,7 @@
private final CassandraTable table;
- public CassandraDynamicTable(StoragePlugin plugin, String storageEngineName, Object selection, CassandraTable table) {
+ public CassandraDynamicTable(StoragePlugin plugin, String storageEngineName, DrillTableSelection selection, CassandraTable table) {
super(storageEngineName, plugin, selection);
this.table = table;
}
diff --git a/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java b/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java
index dcd7431..f4e6114 100755
--- a/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java
+++ b/contrib/storage-druid/src/main/java/org/apache/drill/exec/store/druid/DruidScanSpec.java
@@ -21,9 +21,10 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.store.druid.common.DruidFilter;
-public class DruidScanSpec {
+public class DruidScanSpec implements DrillTableSelection {
private final String dataSourceName;
private final long dataSourceSize;
@@ -84,4 +85,9 @@
.field("filter", filter)
.toString();
}
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java
index a1919f5..1e18fd3 100644
--- a/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java
+++ b/contrib/storage-elasticsearch/src/main/java/org/apache/drill/exec/store/elasticsearch/schema/ElasticsearchDynamicTable.java
@@ -31,6 +31,7 @@
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.TranslatableTable;
import org.apache.calcite.schema.Wrapper;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.planner.logical.DynamicDrillTable;
import org.apache.drill.exec.store.StoragePlugin;
@@ -40,7 +41,7 @@
private final ElasticsearchTable table;
- public ElasticsearchDynamicTable(StoragePlugin plugin, String storageEngineName, Object selection, Table table) {
+ public ElasticsearchDynamicTable(StoragePlugin plugin, String storageEngineName, DrillTableSelection selection, Table table) {
super(plugin, storageEngineName, selection);
this.table = (ElasticsearchTable) table;
}
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java
index 93d8739..21df6b5 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/AbstractHBaseDrillTable.java
@@ -22,6 +22,7 @@
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.store.StoragePlugin;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@@ -41,7 +42,7 @@
protected HTableDescriptor tableDesc;
- public AbstractHBaseDrillTable(String storageEngineName, StoragePlugin plugin, Object selection) {
+ public AbstractHBaseDrillTable(String storageEngineName, StoragePlugin plugin, DrillTableSelection selection) {
super(storageEngineName, plugin, selection);
}
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java
index 793d924..cfcf740 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseScanSpec.java
@@ -18,6 +18,8 @@
package org.apache.drill.exec.store.hbase;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.util.Bytes;
@@ -26,7 +28,7 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
-public class HBaseScanSpec {
+public class HBaseScanSpec implements DrillTableSelection {
protected String tableName;
protected byte[] startRow;
@@ -87,10 +89,16 @@
@Override
public String toString() {
- return "HBaseScanSpec [tableName=" + tableName
- + ", startRow=" + (startRow == null ? null : Bytes.toStringBinary(startRow))
- + ", stopRow=" + (stopRow == null ? null : Bytes.toStringBinary(stopRow))
- + ", filter=" + (filter == null ? null : filter.toString())
- + "]";
+ return new PlanStringBuilder(this)
+ .field("tableName", tableName)
+ .field("startRow", startRow == null ? null : Bytes.toStringBinary(startRow))
+ .field("stopRow", stopRow == null ? null : Bytes.toStringBinary(stopRow))
+ .field("filter", filter == null ? null : filter.toString())
+ .toString();
+ }
+
+ @Override
+ public String digest() {
+ return toString();
}
}
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
index cccaeb1..edba290 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
@@ -38,7 +38,7 @@
runHBaseSQLVerifyCount(sql, 1);
- final String[] expectedPlan = {".*startRow=b4, stopRow=b4\\\\x00, filter=null.*"};
+ final String[] expectedPlan = {".*startRow=\"b4\", stopRow=\"b4\\\\x00\".*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -56,7 +56,7 @@
runHBaseSQLVerifyCount(sql, 7);
- final String[] expectedPlan = {".*startRow=, stopRow=, filter=RowFilter \\(NOT_EQUAL, b4\\).*"};
+ final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"RowFilter \\(NOT_EQUAL, b4\\)\".*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -74,7 +74,7 @@
runHBaseSQLVerifyCount(sql, 1);
- final String[] expectedPlan = {".*startRow=b4, stopRow=b4\\\\x00, filter=null.*"};
+ final String[] expectedPlan = {".*startRow=\"b4\", stopRow=\"b4\\\\x00\".*"};
final String[] excludedPlan ={".*startRow=null, stopRow=null.*"};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -489,7 +489,7 @@
runHBaseSQLVerifyCount(sql, 21);
- final String[] expectedPlan = {".*filter=FilterList OR.*EQUAL.*EQUAL.*"};
+ final String[] expectedPlan = {".*filter=\"FilterList OR.*EQUAL.*EQUAL.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -507,7 +507,7 @@
runHBaseSQLVerifyCount(sql, 2);
- final String[] expectedPlan = {".*startRow=\\%_AS_PREFIX_, stopRow=\\%_AS_PREFIX`, filter=RowFilter.*EQUAL.*"};
+ final String[] expectedPlan = {".*startRow=\"\\%_AS_PREFIX_\", stopRow=\"\\%_AS_PREFIX`\", filter=\"RowFilter.*EQUAL.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -525,7 +525,7 @@
runHBaseSQLVerifyCount(sql, 22);
- final String[] expectedPlan = {".*startRow=07, stopRow=09, filter=FilterList AND.*RowFilter \\(GREATER_OR_EQUAL, 07\\), RowFilter \\(LESS, 09\\), SingleColumnValueFilter \\(f, c, EQUAL.*"};
+ final String[] expectedPlan = {".*startRow=\"07\", stopRow=\"09\", filter=\"FilterList AND.*RowFilter \\(GREATER_OR_EQUAL, 07\\), RowFilter \\(LESS, 09\\), SingleColumnValueFilter \\(f, c, EQUAL.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -543,7 +543,7 @@
runHBaseSQLVerifyCount(sql, 4);
- final String[] expectedPlan = {".*startRow=b4\\\\x00.*stopRow=,.*"};
+ final String[] expectedPlan = {".*startRow=\"b4\\\\x00\", stopRow=\"\".*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -561,7 +561,7 @@
runHBaseSQLVerifyCount(sql, 2);
- final String[] expectedPlan = {".*startRow=b4\\\\x00.*stopRow=, filter=null.*"};
+ final String[] expectedPlan = {".*startRow=\"b4\\\\x00\".*stopRow=.*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -579,7 +579,7 @@
runHBaseSQLVerifyCount(sql, 3);
- final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*"};
+ final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -597,7 +597,7 @@
runHBaseSQLVerifyCount(sql, 3);
- final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*"};
+ final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList AND.*GREATER_OR_EQUAL, a2.*LESS_OR_EQUAL, b4.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -615,7 +615,7 @@
runHBaseSQLVerifyCount(sql, 5);
- final String[] expectedPlan = {".*startRow=, stopRow=, filter=FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*"};
+ final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -631,7 +631,7 @@
+ "WHERE\n"
+ " (row_key >= 'b5' OR row_key <= 'a2') AND (t.f.c1 >= '1' OR t.f.c1 is null)";
- final String[] expectedPlan = {".*startRow=, stopRow=, filter=FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*"};
+ final String[] expectedPlan = {".*startRow=\"\", stopRow=\"\", filter=\"FilterList OR.*GREATER_OR_EQUAL, b5.*LESS_OR_EQUAL, a2.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -649,7 +649,7 @@
runHBaseSQLVerifyCount(sql, 4);
- final String[] expectedPlan = {".*startRow=b4\\\\x00, stopRow=,.*"};
+ final String[] expectedPlan = {".*startRow=\"b4\\\\x00\", stopRow=\"\".*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -667,7 +667,7 @@
runHBaseSQLVerifyCount(sql, 2);
- final String[] expectedPlan = {".*startRow=b4\\\\x00, stopRow=,.*"};
+ final String[] expectedPlan = {".*startRow=\"b4\\\\x00\", stopRow=\"\".*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -698,7 +698,7 @@
runHBaseSQLVerifyCount(sql, 4);
- final String[] expectedPlan = {".*startRow=, stopRow=b4\\\\x00, filter=null.*"};
+ final String[] expectedPlan = {".*startRow=\"\", stopRow=\"b4\\\\x00\".*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -716,7 +716,7 @@
runHBaseSQLVerifyCount(sql, 4);
- final String[] expectedPlan = {".*startRow=, stopRow=b4\\\\x00, filter=null.*"};
+ final String[] expectedPlan = {".*startRow=\"\", stopRow=\"b4\\\\x00\".*"};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -734,7 +734,7 @@
runHBaseSQLVerifyCount(sql, 2);
- final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*"};
+ final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -753,7 +753,7 @@
runHBaseSQLVerifyCount(sql, 2);
- final String[] expectedPlan = {".*startRow=a2, stopRow=b4\\\\x00, filter=FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*"};
+ final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b4\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, b4\\), RowFilter \\(EQUAL, a2\\).*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -772,7 +772,7 @@
runHBaseSQLVerifyCount(sql, 3);
- final String[] expectedPlan = {".*startRow=a2, stopRow=b6\\\\x00, filter=FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, a2\\), FilterList AND \\(2/2\\): \\[RowFilter \\(GREATER_OR_EQUAL, b5\\), RowFilter \\(LESS_OR_EQUAL, b6.*"};
+ final String[] expectedPlan = {".*startRow=\"a2\", stopRow=\"b6\\\\x00\", filter=\"FilterList OR \\(2/2\\): \\[RowFilter \\(EQUAL, a2\\), FilterList AND \\(2/2\\): \\[RowFilter \\(GREATER_OR_EQUAL, b5\\), RowFilter \\(LESS_OR_EQUAL, b6.*\""};
final String[] excludedPlan ={};
final String sqlHBase = canonizeHBaseSQL(sql);
PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
@@ -808,9 +808,9 @@
String query = "select d from dfs.tmp.pd_view where d > date '2015-06-13' and d < DATE '2015-06-18'";
String[] expectedPlan = {
- "startRow=\\\\x00\\\\x00\\\\x01M\\\\xEF\\]\\\\xA0\\\\x00, " +
- "stopRow=\\\\x00\\\\x00\\\\x01N\\\\x03\\\\xF7\\\\x10\\\\x00, " +
- "filter=null"};
+ "startRow=\"\\\\x00\\\\x00\\\\x01M\\\\xEF\\]\\\\xA0\\\\x00\", " +
+ "stopRow=\"\\\\x00\\\\x00\\\\x01N\\\\x03\\\\xF7\\\\x10\\\\x00\""
+ };
String[] excludedPlan ={"Filter\\("};
PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
index d8bf750..7710a2b 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveReadEntry.java
@@ -21,6 +21,8 @@
import org.apache.calcite.schema.Schema.TableType;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.store.hive.HiveTableWrapper.HivePartitionWrapper;
import com.fasterxml.jackson.annotation.JsonCreator;
@@ -28,7 +30,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
-public class HiveReadEntry {
+public class HiveReadEntry implements DrillTableSelection {
@JsonProperty("table")
public HiveTableWrapper table;
@@ -93,5 +95,18 @@
return partitionPath;
}
+
+ @Override
+ public String toString() {
+ return new PlanStringBuilder(this)
+ .field("tableName", table)
+ .field("partitions", partitions)
+ .toString();
+ }
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java b/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java
index 151532c..fe02304 100644
--- a/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java
+++ b/contrib/storage-http/src/main/java/org/apache/drill/exec/store/http/HttpScanSpec.java
@@ -24,10 +24,11 @@
import com.fasterxml.jackson.annotation.JsonTypeName;
import org.apache.drill.common.PlanStringBuilder;
import org.apache.drill.exec.oauth.PersistentTokenTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.store.StoragePluginRegistry;
@JsonTypeName("http-scan-spec")
-public class HttpScanSpec {
+public class HttpScanSpec implements DrillTableSelection {
private final String pluginName;
private final String connectionName;
@@ -109,4 +110,9 @@
.field("queryUserName", queryUserName)
.toString();
}
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java
index d059099..6bd88f8 100644
--- a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java
+++ b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaScanSpec.java
@@ -19,8 +19,10 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
-public class KafkaScanSpec {
+public class KafkaScanSpec implements DrillTableSelection {
private final String topicName;
@JsonCreator
@@ -34,6 +36,13 @@
@Override
public String toString() {
- return "KafkaScanSpec [topicName=" + topicName + "]";
+ return new PlanStringBuilder(this)
+ .field("topicName", topicName)
+ .toString();
+ }
+
+ @Override
+ public String digest() {
+ return toString();
}
}
diff --git a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java
index 371cf2b..78abdbe 100644
--- a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java
+++ b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduScanSpec.java
@@ -20,8 +20,10 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
-public class KuduScanSpec {
+public class KuduScanSpec implements DrillTableSelection {
private final String tableName;
@@ -33,4 +35,16 @@
public String getTableName() {
return tableName;
}
+
+ @Override
+ public String toString() {
+ return new PlanStringBuilder(this)
+ .field("tableName", tableName)
+ .toString();
+ }
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java
index 2c97785..4142633 100644
--- a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java
+++ b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoScanSpec.java
@@ -20,11 +20,12 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import java.util.ArrayList;
import java.util.List;
-public class MongoScanSpec {
+public class MongoScanSpec implements DrillTableSelection {
private final String dbName;
private final String collectionName;
@@ -71,4 +72,9 @@
.field("operations", operations)
.toString();
}
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
index f93758d..50931d1 100644
--- a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
@@ -19,8 +19,10 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
-public class OpenTSDBScanSpec {
+public class OpenTSDBScanSpec implements DrillTableSelection {
private final String tableName;
@@ -35,8 +37,13 @@
@Override
public String toString() {
- return "OpenTSDBScanSpec{" +
- "tableName='" + tableName + '\'' +
- '}';
+ return new PlanStringBuilder(this)
+ .field("tableName", tableName)
+ .toString();
+ }
+
+ @Override
+ public String digest() {
+ return toString();
}
}
diff --git a/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java b/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java
index 513db63..2d736bb 100644
--- a/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java
+++ b/contrib/storage-splunk/src/main/java/org/apache/drill/exec/store/splunk/SplunkScanSpec.java
@@ -22,9 +22,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
@JsonTypeName("splunk-scan-spec")
-public class SplunkScanSpec {
+public class SplunkScanSpec implements DrillTableSelection {
private final String pluginName;
private final String indexName;
private final SplunkPluginConfig config;
@@ -55,4 +56,9 @@
.field("indexName", indexName)
.toString();
}
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java
index 776a708..5e30683 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/FileSystemPartitionDescriptor.java
@@ -27,7 +27,6 @@
import java.util.Map;
import org.apache.calcite.plan.RelOptTable;
-import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.drill.common.util.GuavaUtils;
import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
@@ -41,7 +40,7 @@
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.physical.base.FileGroupScan;
-import org.apache.drill.exec.planner.logical.DirPrunedTableScan;
+import org.apache.drill.exec.planner.logical.SelectionBasedTableScan;
import org.apache.drill.exec.planner.logical.DrillRel;
import org.apache.drill.exec.planner.logical.DrillScanRel;
import org.apache.drill.exec.planner.logical.DrillTable;
@@ -253,8 +252,8 @@
RelOptTableImpl newOptTableImpl = RelOptTableImpl.create(relOptTable.getRelOptSchema(), relOptTable.getRowType(),
newTable, GuavaUtils.convertToUnshadedImmutableList(relOptTable.getQualifiedName()));
- // return an DirPrunedTableScan with fileSelection being part of digest of TableScan node.
- return DirPrunedTableScan.create(scanRel.getCluster(), newOptTableImpl, newFileSelection.toString());
+ // return a SelectionBasedTableScan with fileSelection being part of digest of TableScan node.
+ return SelectionBasedTableScan.create(scanRel.getCluster(), newOptTableImpl, newFileSelection.toString());
} else {
throw new UnsupportedOperationException("Only DrillScanRel and DirPrunedTableScan is allowed!");
}
@@ -274,7 +273,6 @@
}
private static boolean supportsScan(TableScan scanRel) {
- return scanRel instanceof DirPrunedTableScan
- || scanRel instanceof LogicalTableScan;
+ return scanRel instanceof SelectionBasedTableScan;
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
index 01afb1a..4a4f385 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
@@ -348,7 +348,6 @@
// Due to infinite loop in planning (DRILL-3257/CALCITE-1271), temporarily use this rule in Hep planner
// RuleInstance.PROJECT_SET_OP_TRANSPOSE_RULE,
RuleInstance.PROJECT_WINDOW_TRANSPOSE_RULE,
- DrillPushProjectIntoScanRule.LOGICAL_INSTANCE,
DrillPushProjectIntoScanRule.INSTANCE,
DrillPushProjectIntoScanRule.DRILL_LOGICAL_INSTANCE,
@@ -356,8 +355,7 @@
Convert from Calcite Logical to Drill Logical Rules.
*/
RuleInstance.EXPAND_CONVERSION_RULE,
- DrillScanRule.LOGICAL_TABLE_SCAN_TO_DRILL,
- DrillScanRule.DIR_PRUNED_TABLE_SCAN_TO_DRILL,
+ DrillScanRule.INSTANCE,
DrillFilterRule.INSTANCE,
DrillProjectRule.INSTANCE,
DrillWindowRule.INSTANCE,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectIntoScanRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectIntoScanRule.java
index 46dd07c..d869c4b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectIntoScanRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillPushProjectIntoScanRule.java
@@ -23,7 +23,6 @@
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.logical.LogicalProject;
-import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.rel.rules.ProjectRemoveRule;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexNode;
@@ -46,8 +45,8 @@
public class DrillPushProjectIntoScanRule extends RelOptRule {
public static final RelOptRule INSTANCE =
new DrillPushProjectIntoScanRule(LogicalProject.class,
- DirPrunedTableScan.class,
- "DrillPushProjectIntoScanRule:enumerable") {
+ SelectionBasedTableScan.class,
+ "DrillPushProjectIntoScanRule:none") {
@Override
protected boolean skipScanConversion(RelDataType projectRelDataType, TableScan scan) {
@@ -56,18 +55,6 @@
}
};
- public static final RelOptRule LOGICAL_INSTANCE =
- new DrillPushProjectIntoScanRule(LogicalProject.class,
- LogicalTableScan.class,
- "DrillPushProjectIntoScanRule:none") {
-
- @Override
- protected boolean skipScanConversion(RelDataType projectRelDataType, TableScan scan) {
- // do not allow skipping conversion of LogicalTableScan to DrillScanRel if rule is applicable
- return false;
- }
- };
-
public static final RelOptRule DRILL_LOGICAL_INSTANCE =
new DrillPushProjectIntoScanRule(LogicalProject.class,
DrillScanRel.class,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillScanRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillScanRule.java
index d94a87b..53820a9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillScanRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillScanRule.java
@@ -21,11 +21,9 @@
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.core.TableScan;
-import org.apache.calcite.rel.logical.LogicalTableScan;
-public class DrillScanRule extends RelOptRule {
- public static final RelOptRule LOGICAL_TABLE_SCAN_TO_DRILL = new DrillScanRule(LogicalTableScan.class);
- public static final RelOptRule DIR_PRUNED_TABLE_SCAN_TO_DRILL = new DrillScanRule(DirPrunedTableScan.class);
+public class DrillScanRule extends RelOptRule {
+ public static final RelOptRule INSTANCE = new DrillScanRule(SelectionBasedTableScan.class);
private DrillScanRule(Class<? extends TableScan> scan) {
super(RelOptHelper.any(scan),
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
index 1df5eec..5010e88 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTable.java
@@ -23,7 +23,6 @@
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.schema.Schema.TableType;
import org.apache.calcite.schema.Statistic;
import org.apache.calcite.schema.Statistics;
@@ -48,7 +47,7 @@
private final String storageEngineName;
private final StoragePluginConfig storageEngineConfig;
private final TableType tableType;
- private final Object selection;
+ private final DrillTableSelection selection;
private final StoragePlugin plugin;
private final String userName;
private GroupScan scan;
@@ -62,7 +61,7 @@
* @param userName Whom to impersonate while reading the contents of the table.
* @param selection Table contents (type and contents depend on type of StoragePlugin).
*/
- public DrillTable(String storageEngineName, StoragePlugin plugin, String userName, Object selection) {
+ public DrillTable(String storageEngineName, StoragePlugin plugin, String userName, DrillTableSelection selection) {
this(storageEngineName, plugin, TableType.TABLE, userName, selection);
}
@@ -74,12 +73,12 @@
* @param userName Whom to impersonate while reading the contents of the table.
* @param selection Table contents (type and contents depend on type of StoragePlugin).
*/
- public DrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, String userName, Object selection) {
+ public DrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, String userName, DrillTableSelection selection) {
this(storageEngineName, plugin, tableType, userName, selection, null);
}
public DrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType,
- String userName, Object selection, MetadataProviderManager metadataProviderManager) {
+ String userName, DrillTableSelection selection, MetadataProviderManager metadataProviderManager) {
this.selection = selection;
this.plugin = plugin;
@@ -96,7 +95,7 @@
* process. Once we add impersonation to non-FileSystem storage plugins such as Hive, HBase etc,
* we can remove this constructor.
*/
- public DrillTable(String storageEngineName, StoragePlugin plugin, Object selection) {
+ public DrillTable(String storageEngineName, StoragePlugin plugin, DrillTableSelection selection) {
this(storageEngineName, plugin, ImpersonationUtil.getProcessUserName(), selection);
}
@@ -168,9 +167,9 @@
@Override
public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable table) {
- // returns non-drill table scan to allow directory-based partition pruning
- // before table group scan is created
- return LogicalTableScan.create(context.getCluster(), table);
+ // Returns non-drill table scan to allow directory-based partition pruning
+ // before table group scan is created.
+ return SelectionBasedTableScan.create(context.getCluster(), table, selection.digest());
}
@Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTableSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTableSelection.java
new file mode 100644
index 0000000..b514a49
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillTableSelection.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.logical;
+
+public interface DrillTableSelection {
+
+ /**
+ * The digest of the selection represented by the implementation. The
+ * selections that accompany Tables can modify the contained dataset, e.g.
+ * a file selection can restrict to a subset of the available data and a
+ * format selection can include options that affect the behaviour of the
+ * underlying reader. Two scans will end up being considered identical during
+ * logical planning if their digests are the same so selection
+ * implementations should override this method so that exactly those scans
+ * that really are identical (in terms of the data they produce) have matching
+ * digests.
+ *
+ * @return this selection's digest, normally a string built from its properties.
+ */
+ public String digest();
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java
index c406d9a..8164287 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DynamicDrillTable.java
@@ -30,12 +30,12 @@
private final RelDataTypeHolder holder;
- public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, String userName, Object selection) {
+ public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, String userName, DrillTableSelection selection) {
this(plugin, storageEngineName, userName, selection, null);
}
public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, String userName,
- Object selection, MetadataProviderManager metadataProviderManager) {
+ DrillTableSelection selection, MetadataProviderManager metadataProviderManager) {
super(storageEngineName, plugin, Schema.TableType.TABLE, userName, selection, metadataProviderManager);
this.holder = new RelDataTypeHolder();
}
@@ -46,7 +46,7 @@
* non-FileSystem storage plugins such as Hive, HBase etc, we can remove this
* constructor.
*/
- public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, Object selection) {
+ public DynamicDrillTable(StoragePlugin plugin, String storageEngineName, DrillTableSelection selection) {
this(plugin, storageEngineName, ImpersonationUtil.getProcessUserName(), selection, null);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DirPrunedTableScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
similarity index 85%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DirPrunedTableScan.java
rename to exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
index 91973d7..10752ad 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DirPrunedTableScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/SelectionBasedTableScan.java
@@ -35,18 +35,18 @@
* When directory-based partition pruning applied, file selection could be different for the same
* table.
*/
-public class DirPrunedTableScan extends TableScan {
+public class SelectionBasedTableScan extends TableScan {
private final String digestFromSelection;
- public DirPrunedTableScan(RelOptCluster cluster, RelTraitSet traitSet,
- RelOptTable table, String digestFromSelection) {
+ public SelectionBasedTableScan(RelOptCluster cluster, RelTraitSet traitSet,
+ RelOptTable table, String digestFromSelection) {
super(cluster, traitSet, table);
this.digestFromSelection = digestFromSelection;
}
@Override
public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
- return new DirPrunedTableScan(getCluster(), traitSet, table, digestFromSelection);
+ return new SelectionBasedTableScan(getCluster(), traitSet, table, digestFromSelection);
}
/** Creates an DirPrunedTableScan. */
@@ -59,7 +59,7 @@
() -> table != null
? table.getStatistic().getCollations()
: Collections.emptyList());
- return new DirPrunedTableScan(cluster, traitSet, relOptTable, digestFromSelection);
+ return new SelectionBasedTableScan(cluster, traitSet, relOptTable, digestFromSelection);
}
@Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
index 11e85fc..3596213 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/PruneScanRule.java
@@ -26,9 +26,8 @@
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
-import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.drill.exec.planner.common.DrillRelOptUtil;
-import org.apache.drill.exec.planner.logical.DirPrunedTableScan;
+import org.apache.drill.exec.planner.logical.SelectionBasedTableScan;
import org.apache.drill.exec.util.DrillFileSystemUtil;
import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
import org.apache.calcite.plan.RelTraitSet;
@@ -781,7 +780,6 @@
}
private static boolean supportsScan(TableScan scan) {
- return scan instanceof DirPrunedTableScan
- || scan instanceof LogicalTableScan;
+ return scan instanceof SelectionBasedTableScan;
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
index ebe1a43..6563c78 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSelection.java
@@ -19,6 +19,7 @@
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.util.DrillStringUtils;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.util.DrillFileSystemUtil;
import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
@@ -40,7 +41,7 @@
/**
* Jackson serializable description of a file selection.
*/
-public class FileSelection {
+public class FileSelection implements DrillTableSelection {
private static final Logger logger = LoggerFactory.getLogger(FileSelection.class);
private static final String WILD_CARD = "*";
@@ -438,6 +439,11 @@
}
@Override
+ public String digest() {
+ return toString();
+ }
+
+ @Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("root=").append(selectionRoot);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java
index d2a5545..4210b82 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatSelection.java
@@ -21,11 +21,12 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.hadoop.fs.Path;
import java.util.List;
-public class FormatSelection {
+public class FormatSelection implements DrillTableSelection {
private FormatPluginConfig format;
private FileSelection selection;
@@ -62,4 +63,14 @@
public boolean supportsDirPruning() {
return selection.supportsDirPruning();
}
+
+ @Override
+ public String digest() {
+ return toString();
+ }
+
+ @Override
+ public String toString() {
+ return String.format("fileSelection=%s,formatConfig=%s", selection, format);
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
index 3c2708a..512289e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
@@ -319,6 +319,13 @@
@Override
public String toString() {
+ // Note that the output of this method is incorporated in the digest of
+ // the corresponding scan node in the query plan. This means that the
+ // fields included here constitute what the planner will use to decide
+ // whether two scans are identical or not. E.g. the format config must be
+ // present here because format config can be overriden using table functions
+ // Two scans that differ by format config alone may produce different data
+ // and therefore should not be considered identical.
return new PlanStringBuilder(this)
.field("selectionRoot", selectionRoot)
.field("numFiles", getFiles().size())
@@ -327,6 +334,7 @@
.field("schema", getSchema())
.field("usedMetastore", usedMetastore())
.field("limit", limit)
+ .field("formatConfig", getFormatConfig())
.toString();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java
index 3e38fb2..f08e380 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTableType.java
@@ -21,6 +21,7 @@
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.store.ischema.InfoSchemaTable.Catalogs;
import org.apache.drill.exec.store.ischema.InfoSchemaTable.Columns;
@@ -40,7 +41,7 @@
/**
* The set of tables / views in INFORMATION_SCHEMA.
*/
-public enum InfoSchemaTableType {
+public enum InfoSchemaTableType implements DrillTableSelection {
CATALOGS(new Catalogs()),
SCHEMATA(new Schemata()),
@@ -91,4 +92,9 @@
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
return tableDef.getRowType(typeFactory);
}
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
index fb00f83..983e150 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
@@ -19,7 +19,6 @@
import java.io.IOException;
import java.net.URL;
-import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -39,10 +38,11 @@
import org.apache.drill.exec.store.AbstractSchema;
import org.apache.drill.exec.store.AbstractStoragePlugin;
import org.apache.drill.exec.store.SchemaConfig;
+import org.apache.drill.exec.store.mock.MockTableDef.MockScanEntry;
+import org.apache.drill.exec.store.mock.MockTableDef.MockTableSelection;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
@@ -64,10 +64,11 @@
public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns)
throws IOException {
- List<MockTableDef.MockScanEntry> readEntries = selection.getListWith(new ObjectMapper(),
- new TypeReference<ArrayList<MockTableDef.MockScanEntry>>() {
- });
-
+ MockTableSelection tableSelection = selection.getWith(
+ new ObjectMapper(),
+ MockTableSelection.class
+ );
+ List<MockScanEntry> readEntries = tableSelection.getEntries();
assert ! readEntries.isEmpty();
return new MockGroupScanPOP(null, readEntries);
}
@@ -161,6 +162,7 @@
} catch (IOException e) {
throw new IllegalArgumentException("Unable to read mock table definition file: " + name, e);
}
+
return new DynamicDrillTable(engine, this.name, mockTableDefn.getEntries());
}
@@ -177,10 +179,9 @@
if (unit == null) { }
else if (unit.equalsIgnoreCase("K")) { n *= 1000; }
else if (unit.equalsIgnoreCase("M")) { n *= 1_000_000; }
- MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(n, true, 0, 1, null);
- List<MockTableDef.MockScanEntry> list = new ArrayList<>();
- list.add(entry);
- return new DynamicDrillTable(engine, this.name, list);
+ MockScanEntry entry = new MockTableDef.MockScanEntry(n, true, 0, 1, null);
+ MockTableSelection entries = new MockTableSelection(ImmutableList.<MockScanEntry>of(entry));
+ return new DynamicDrillTable(engine, this.name, entries);
}
@Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java
index 1b4af74..5a9a6f5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java
@@ -31,6 +31,7 @@
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
/**
* Structure of a mock table definition file. Yes, using Jackson deserialization to parse
@@ -85,6 +86,29 @@
}
/**
+ * A tiny wrapper class to add required DrillTableSelection behaviour to
+ * the entries list.
+ */
+ public static class MockTableSelection implements DrillTableSelection {
+ private final List<MockScanEntry> entries;
+
+ @JsonCreator
+ public MockTableSelection(@JsonProperty("entries") List<MockScanEntry> entries) {
+ this.entries = entries;
+ }
+
+ @JsonIgnore
+ @Override
+ public String digest() {
+ return entries.toString();
+ }
+
+ public List<MockScanEntry> getEntries() {
+ return entries;
+ }
+ }
+
+ /**
* Meta-data description of the columns we wish to create during a simulated
* scan.
*/
@@ -189,10 +213,10 @@
}
private String descrip;
- List<MockTableDef.MockScanEntry> entries;
+ MockTableSelection entries;
public MockTableDef(@JsonProperty("descrip") final String descrip,
- @JsonProperty("entries") final List<MockTableDef.MockScanEntry> entries) {
+ @JsonProperty("entries") final MockTableSelection entries) {
this.descrip = descrip;
this.entries = entries;
}
@@ -211,5 +235,5 @@
* @return
*/
- public List<MockTableDef.MockScanEntry> getEntries() { return entries; }
+ public MockTableSelection getEntries() { return entries; }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java
index 7528d99..da1a34e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/plan/rel/PluginDrillTable.java
@@ -23,6 +23,7 @@
import org.apache.calcite.schema.TranslatableTable;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.planner.logical.DynamicDrillTable;
import org.apache.drill.exec.store.StoragePlugin;
import org.apache.drill.exec.util.Utilities;
@@ -36,7 +37,7 @@
private final Convention convention;
public PluginDrillTable(StoragePlugin plugin, String storageEngineName,
- String userName, Object selection, Convention convention) {
+ String userName, DrillTableSelection selection, Convention convention) {
super(plugin, storageEngineName, userName, selection);
this.convention = convention;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java
index dbe5891..07df14b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/StaticDrillTable.java
@@ -21,6 +21,7 @@
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.schema.Schema.TableType;
import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.store.RecordDataType;
import org.apache.drill.exec.store.StoragePlugin;
import org.apache.drill.exec.util.ImpersonationUtil;
@@ -34,7 +35,7 @@
private final RecordDataType dataType;
- public StaticDrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, Object selection, RecordDataType dataType) {
+ public StaticDrillTable(String storageEngineName, StoragePlugin plugin, TableType tableType, DrillTableSelection selection, RecordDataType dataType) {
super(storageEngineName, plugin, tableType, ImpersonationUtil.getProcessUserName(), selection);
this.dataType = dataType;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java
index 5bddc8d..35a4b64 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java
@@ -21,6 +21,7 @@
import org.apache.drill.exec.alias.AliasTarget;
import org.apache.drill.exec.ops.ExecutorFragmentContext;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.store.sys.OptionIterator.OptionValueWrapper;
/**
@@ -31,7 +32,7 @@
* PROFILES and PROFILES_JSON are stored in local / distributed storage.
* </p>
*/
-public enum SystemTable {
+public enum SystemTable implements DrillTableSelection {
OPTIONS_OLD("options_old", false, OptionValueWrapper.class) {
@Deprecated
@Override
@@ -164,4 +165,9 @@
public Class<?> getPojoClass() {
return pojoClass;
}
+
+ @Override
+ public String digest() {
+ return toString();
+ }
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
index 643af83..22b5748 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
@@ -67,7 +67,6 @@
/**
* Test for validating {@link DrillSpnegoAuthenticator}
*/
-@Ignore("See DRILL-5387")
@Category(SecurityTest.class)
public class TestDrillSpnegoAuthenticator extends BaseTest {
@@ -81,7 +80,7 @@
@BeforeClass
public static void setupTest() throws Exception {
- spnegoHelper = new KerberosHelper(TestSpnegoAuthentication.class.getSimpleName(), primaryName);
+ spnegoHelper = new KerberosHelper(TestDrillSpnegoAuthenticator.class.getSimpleName(), primaryName);
spnegoHelper.setupKdc(dirTestWatcher.getTmpDir());
// (1) Refresh Kerberos config.
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java
index a91f802..3d6a8cc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestSpnegoConfig.java
@@ -55,7 +55,7 @@
@BeforeClass
public static void setupTest() throws Exception {
- spnegoHelper = new KerberosHelper(TestSpnegoAuthentication.class.getSimpleName(), primaryName);
+ spnegoHelper = new KerberosHelper(TestSpnegoConfig.class.getSimpleName(), primaryName);
spnegoHelper.setupKdc(dirTestWatcher.getTmpDir());
// (1) Refresh Kerberos config.
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java
index 6628069..f5a9273 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/enumerable/plan/EnumMockTable.java
@@ -20,12 +20,13 @@
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.schema.TranslatableTable;
+import org.apache.drill.exec.planner.logical.DrillTableSelection;
import org.apache.drill.exec.planner.logical.DynamicDrillTable;
import org.apache.drill.exec.store.StoragePlugin;
public class EnumMockTable extends DynamicDrillTable implements TranslatableTable {
- public EnumMockTable(StoragePlugin plugin, String storageEngineName, String userName, Object selection) {
+ public EnumMockTable(StoragePlugin plugin, String storageEngineName, String userName, DrillTableSelection selection) {
super(plugin, storageEngineName, userName, selection);
}
diff --git a/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json b/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json
index acae9e7..2db5634 100644
--- a/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json
+++ b/exec/java-exec/src/test/resources/functions/conv/conversionTestWithLogicalPlan.json
@@ -18,30 +18,32 @@
"op" : "scan",
"@id" : 1,
"storageengine" : "mock",
- "selection" : [ {
- "records" : 10,
- "types" : [ {
- "name" : "tinyint_val",
- "type" : "TINYINT",
- "mode" : "REQUIRED"
- }, {
- "name" : "smallint_val",
- "type" : "SMALLINT",
- "mode" : "REQUIRED"
- }, {
- "name" : "int_val",
- "type" : "INT",
- "mode" : "REQUIRED"
- }, {
- "name" : "bigint_val",
- "type" : "BIGINT",
- "mode" : "REQUIRED"
- }, {
- "name" : "uint8_val",
- "type" : "UINT8",
- "mode" : "REQUIRED"
+ "selection" : {
+ "entries": [ {
+ "records" : 10,
+ "types" : [ {
+ "name" : "tinyint_val",
+ "type" : "TINYINT",
+ "mode" : "REQUIRED"
+ }, {
+ "name" : "smallint_val",
+ "type" : "SMALLINT",
+ "mode" : "REQUIRED"
+ }, {
+ "name" : "int_val",
+ "type" : "INT",
+ "mode" : "REQUIRED"
+ }, {
+ "name" : "bigint_val",
+ "type" : "BIGINT",
+ "mode" : "REQUIRED"
+ }, {
+ "name" : "uint8_val",
+ "type" : "UINT8",
+ "mode" : "REQUIRED"
+ } ]
} ]
- } ]
+ }
}, {
"op" : "project",
"@id" : 2,
@@ -84,4 +86,4 @@
"target" : null,
"storageEngine" : "--SCREEN--"
} ]
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/test/resources/scan_screen_logical.json b/exec/java-exec/src/test/resources/scan_screen_logical.json
index 1cf3800..97f1ab9 100644
--- a/exec/java-exec/src/test/resources/scan_screen_logical.json
+++ b/exec/java-exec/src/test/resources/scan_screen_logical.json
@@ -18,14 +18,16 @@
"op" : "scan",
"memo" : "initial_scan",
"storageengine" : "mock",
- "selection" : [ {
- "records" : 100,
- "types" : [ {
- "name" : "superhero_name",
- "type" : "VARCHAR",
- "mode" : "REQUIRED"
+ "selection" : {
+ "entries": [ {
+ "records" : 100,
+ "types" : [ {
+ "name" : "superhero_name",
+ "type" : "VARCHAR",
+ "mode" : "REQUIRED"
+ } ]
} ]
- } ]
+ }
}, {
"@id" : "2",
"input" : 1,
@@ -35,4 +37,4 @@
"file" : "console:///stdout"
}
} ]
-}
\ No newline at end of file
+}
diff --git a/pom.xml b/pom.xml
index 5434390..78d59dc 100644
--- a/pom.xml
+++ b/pom.xml
@@ -51,7 +51,7 @@
<slf4j.version>1.7.26</slf4j.version>
<shaded.guava.version>28.2-jre</shaded.guava.version>
<guava.version>30.1.1-jre</guava.version>
- <forkCount>2</forkCount>
+ <forkCount>1</forkCount>
<parquet.version>1.12.2</parquet.version>
<parquet.format.version>2.8.0</parquet.format.version>
<!--