Make QueryComponentSupliers independent from test classes (#16275)
diff --git a/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java b/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java
index baa9c0f..b709d20 100644
--- a/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java
+++ b/extensions-contrib/compressed-bigdecimal/src/test/java/org/apache/druid/compressedbigdecimal/CompressedBigDecimalSqlAggregatorTestBase.java
@@ -24,6 +24,7 @@
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.collect.ImmutableList;
import com.google.inject.Injector;
+import org.apache.druid.compressedbigdecimal.CompressedBigDecimalSqlAggregatorTestBase.CompressedBigDecimalComponentSupplier;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.InputRowSchema;
import org.apache.druid.data.input.impl.DimensionsSpec;
@@ -43,8 +44,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -54,66 +58,75 @@
import java.util.List;
import java.util.stream.Collectors;
+@SqlTestFrameWorkModule(CompressedBigDecimalComponentSupplier.class)
public abstract class CompressedBigDecimalSqlAggregatorTestBase extends BaseCalciteQueryTest
{
- private static final InputRowSchema SCHEMA = new InputRowSchema(
- new TimestampSpec(TestDataBuilder.TIMESTAMP_COLUMN, "iso", null),
- new DimensionsSpec(
- DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2", "dim3", "m2"))
- ),
- null
- );
-
- private static final List<InputRow> ROWS1 =
- TestDataBuilder.RAW_ROWS1.stream().map(m -> TestDataBuilder.createRow(m, SCHEMA)).collect(Collectors.toList());
-
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class CompressedBigDecimalComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new CompressedBigDecimalModule());
- }
+ public CompressedBigDecimalComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- QueryableIndex index =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1")
- )
- .withRollup(false)
- .build()
- )
- .rows(ROWS1)
- .buildMMappedIndex();
-
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(CalciteTests.DATASOURCE1)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
+ private static final InputRowSchema SCHEMA = new InputRowSchema(
+ new TimestampSpec(TestDataBuilder.TIMESTAMP_COLUMN, "iso", null),
+ new DimensionsSpec(
+ DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2", "dim3", "m2"))
+ ),
+ null
);
- }
- @Override
- public void configureJsonMapper(ObjectMapper objectMapper)
- {
- objectMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true);
- objectMapper.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
+ private static final List<InputRow> ROWS1 =
+ TestDataBuilder.RAW_ROWS1.stream().map(m -> TestDataBuilder.createRow(m, SCHEMA)).collect(Collectors.toList());
+
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new CompressedBigDecimalModule());
+ }
+
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ QueryableIndex index =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1")
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(ROWS1)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(CalciteTests.DATASOURCE1)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
+
+ @Override
+ public void configureJsonMapper(ObjectMapper objectMapper)
+ {
+ objectMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true);
+ objectMapper.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
+ }
}
@Test
diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java
index 2a3db93..bd4b81f 100644
--- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java
+++ b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java
@@ -48,8 +48,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -57,55 +60,64 @@
import java.util.List;
+@SqlTestFrameWorkModule(TDigestSketchSqlAggregatorTest.TDigestComponentSupplier.class)
public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class TDigestComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new TDigestSketchModule());
- }
+ public TDigestComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- TDigestSketchModule.registerSerde();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new TDigestSketchModule());
+ }
- final QueryableIndex index =
- IndexBuilder.create(CalciteTests.getJsonMapper())
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1"),
- new TDigestSketchAggregatorFactory(
- "qsketch_m1",
- "m1",
- 128
- )
- )
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1)
- .buildMMappedIndex();
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ TDigestSketchModule.registerSerde();
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(CalciteTests.DATASOURCE1)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ final QueryableIndex index =
+ IndexBuilder.create(CalciteTests.getJsonMapper())
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1"),
+ new TDigestSketchAggregatorFactory(
+ "qsketch_m1",
+ "m1",
+ 128
+ )
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(CalciteTests.DATASOURCE1)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java
index d6e5825..126f397 100644
--- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java
+++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java
@@ -48,6 +48,7 @@
import org.apache.druid.query.aggregation.datasketches.hll.HllSketchToEstimateWithBoundsPostAggregator;
import org.apache.druid.query.aggregation.datasketches.hll.HllSketchToStringPostAggregator;
import org.apache.druid.query.aggregation.datasketches.hll.HllSketchUnionPostAggregator;
+import org.apache.druid.query.aggregation.datasketches.hll.sql.HllSketchSqlAggregatorTest.HllSketchComponentSupplier;
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.aggregation.post.FinalizingFieldAccessPostAggregator;
@@ -75,10 +76,13 @@
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.SqlTestFrameworkConfig;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.util.CacheTestHelperModule.ResultCacheMode;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.sql.guice.SqlModule;
import org.apache.druid.timeline.DataSegment;
@@ -94,6 +98,7 @@
import java.util.Properties;
import java.util.stream.Collectors;
+@SqlTestFramework.SqlTestFrameWorkModule(HllSketchComponentSupplier.class)
public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final boolean ROUND = true;
@@ -233,64 +238,71 @@
)
);
-
- @Override
- public void gatherProperties(Properties properties)
+ public static class HllSketchComponentSupplier extends StandardComponentSupplier
{
- super.gatherProperties(properties);
+ public HllSketchComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- // Use APPROX_COUNT_DISTINCT_DS_HLL as APPROX_COUNT_DISTINCT impl for these tests.
- properties.put(SqlModule.PROPERTY_SQL_APPROX_COUNT_DISTINCT_CHOICE, HllSketchApproxCountDistinctSqlAggregator.NAME);
- }
+ @Override
+ public void gatherProperties(Properties properties)
+ {
+ super.gatherProperties(properties);
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
- {
- super.configureGuice(builder);
- builder.addModule(new HllSketchModule());
- }
+ // Use APPROX_COUNT_DISTINCT_DS_HLL as APPROX_COUNT_DISTINCT impl for these tests.
+ properties.put(SqlModule.PROPERTY_SQL_APPROX_COUNT_DISTINCT_CHOICE, HllSketchApproxCountDistinctSqlAggregator.NAME);
+ }
- @SuppressWarnings("resource")
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- HllSketchModule.registerSerde();
- final QueryableIndex index = IndexBuilder
- .create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1"),
- new HllSketchBuildAggregatorFactory("hllsketch_dim1", "dim1", null, null, null, false, ROUND),
- new HllSketchBuildAggregatorFactory("hllsketch_dim3", "dim3", null, null, null, false, false),
- new HllSketchBuildAggregatorFactory("hllsketch_m1", "m1", null, null, null, false, ROUND),
- new HllSketchBuildAggregatorFactory("hllsketch_f1", "f1", null, null, null, false, ROUND),
- new HllSketchBuildAggregatorFactory("hllsketch_l1", "l1", null, null, null, false, ROUND),
- new HllSketchBuildAggregatorFactory("hllsketch_d1", "d1", null, null, null, false, ROUND)
- )
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS)
- .buildMMappedIndex();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new HllSketchModule());
+ }
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(CalciteTests.DATASOURCE1)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ @SuppressWarnings("resource")
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ HllSketchModule.registerSerde();
+ final QueryableIndex index = IndexBuilder
+ .create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1"),
+ new HllSketchBuildAggregatorFactory("hllsketch_dim1", "dim1", null, null, null, false, ROUND),
+ new HllSketchBuildAggregatorFactory("hllsketch_dim3", "dim3", null, null, null, false, false),
+ new HllSketchBuildAggregatorFactory("hllsketch_m1", "m1", null, null, null, false, ROUND),
+ new HllSketchBuildAggregatorFactory("hllsketch_f1", "f1", null, null, null, false, ROUND),
+ new HllSketchBuildAggregatorFactory("hllsketch_l1", "l1", null, null, null, false, ROUND),
+ new HllSketchBuildAggregatorFactory("hllsketch_d1", "d1", null, null, null, false, ROUND)
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(CalciteTests.DATASOURCE1)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java
index fe8680d..bbd0e9c 100644
--- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java
+++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/sql/DoublesSketchSqlAggregatorTest.java
@@ -42,6 +42,7 @@
import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchToQuantilesPostAggregator;
import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchToRankPostAggregator;
import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchToStringPostAggregator;
+import org.apache.druid.query.aggregation.datasketches.quantiles.sql.DoublesSketchSqlAggregatorTest.DoublesSketchComponentSupplier;
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
@@ -57,8 +58,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -69,55 +73,64 @@
import java.util.List;
import java.util.Map;
+@SqlTestFramework.SqlTestFrameWorkModule(DoublesSketchComponentSupplier.class)
public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class DoublesSketchComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new DoublesSketchModule());
- }
+ public DoublesSketchComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- DoublesSketchModule.registerSerde();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new DoublesSketchModule());
+ }
- final QueryableIndex index =
- IndexBuilder.create(CalciteTests.getJsonMapper())
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1"),
- new DoublesSketchAggregatorFactory(
- "qsketch_m1",
- "m1",
- 128
- )
- )
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1)
- .buildMMappedIndex();
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ DoublesSketchModule.registerSerde();
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(CalciteTests.DATASOURCE1)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ final QueryableIndex index =
+ IndexBuilder.create(CalciteTests.getJsonMapper())
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1"),
+ new DoublesSketchAggregatorFactory(
+ "qsketch_m1",
+ "m1",
+ 128
+ )
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(CalciteTests.DATASOURCE1)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java
index aedd641..f615319 100644
--- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java
+++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java
@@ -40,6 +40,7 @@
import org.apache.druid.query.aggregation.datasketches.theta.SketchMergeAggregatorFactory;
import org.apache.druid.query.aggregation.datasketches.theta.SketchModule;
import org.apache.druid.query.aggregation.datasketches.theta.SketchSetPostAggregator;
+import org.apache.druid.query.aggregation.datasketches.theta.sql.ThetaSketchSqlAggregatorTest.ThetaSketchComponentSupplier;
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.aggregation.post.FinalizingFieldAccessPostAggregator;
@@ -61,8 +62,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.sql.guice.SqlModule;
import org.apache.druid.timeline.DataSegment;
@@ -77,6 +81,7 @@
import java.util.List;
import java.util.Properties;
+@SqlTestFramework.SqlTestFrameWorkModule(ThetaSketchComponentSupplier.class)
public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final String DATA_SOURCE = "foo";
@@ -86,67 +91,75 @@
)
);
- @Override
- public void gatherProperties(Properties properties)
+ public static class ThetaSketchComponentSupplier extends StandardComponentSupplier
{
- super.gatherProperties(properties);
+ public ThetaSketchComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- // Use APPROX_COUNT_DISTINCT_DS_THETA as APPROX_COUNT_DISTINCT impl for these tests.
- properties.put(
- SqlModule.PROPERTY_SQL_APPROX_COUNT_DISTINCT_CHOICE,
- ThetaSketchApproxCountDistinctSqlAggregator.NAME
- );
- }
+ @Override
+ public void gatherProperties(Properties properties)
+ {
+ super.gatherProperties(properties);
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
- {
- super.configureGuice(builder);
- builder.addModule(new SketchModule());
- }
+ // Use APPROX_COUNT_DISTINCT_DS_THETA as APPROX_COUNT_DISTINCT impl for these tests.
+ properties.put(
+ SqlModule.PROPERTY_SQL_APPROX_COUNT_DISTINCT_CHOICE,
+ ThetaSketchApproxCountDistinctSqlAggregator.NAME
+ );
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- SketchModule.registerSerde();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new SketchModule());
+ }
- final QueryableIndex index = IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1"),
- new SketchMergeAggregatorFactory(
- "thetasketch_dim1",
- "dim1",
- null,
- false,
- false,
- null
- )
- )
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1)
- .buildMMappedIndex();
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ SketchModule.registerSerde();
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ final QueryableIndex index = IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1"),
+ new SketchMergeAggregatorFactory(
+ "thetasketch_dim1",
+ "dim1",
+ null,
+ false,
+ false,
+ null
+ )
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java
index 7d6e322..c6f21f5 100644
--- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java
+++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/sql/ArrayOfDoublesSketchSqlAggregatorTest.java
@@ -35,6 +35,7 @@
import org.apache.druid.query.aggregation.datasketches.tuple.ArrayOfDoublesSketchOperations;
import org.apache.druid.query.aggregation.datasketches.tuple.ArrayOfDoublesSketchSetOpPostAggregator;
import org.apache.druid.query.aggregation.datasketches.tuple.ArrayOfDoublesSketchToMetricsSumEstimatePostAggregator;
+import org.apache.druid.query.aggregation.datasketches.tuple.sql.ArrayOfDoublesSketchSqlAggregatorTest.ArrayOfDoublesComponentSupplier;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.groupby.GroupByQuery;
@@ -47,8 +48,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -57,6 +61,7 @@
import java.util.List;
import java.util.stream.Collectors;
+@SqlTestFramework.SqlTestFrameWorkModule(ArrayOfDoublesComponentSupplier.class)
public class ArrayOfDoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
@@ -98,56 +103,64 @@
.build()
).stream().map(TestDataBuilder::createRow).collect(Collectors.toList());
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ public static class ArrayOfDoublesComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new ArrayOfDoublesSketchModule());
- }
+ public ArrayOfDoublesComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- ArrayOfDoublesSketchModule.registerSerde();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new ArrayOfDoublesSketchModule());
+ }
- final QueryableIndex index = IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(
- OffHeapMemorySegmentWriteOutMediumFactory.instance()
- )
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new ArrayOfDoublesSketchAggregatorFactory(
- "tuplesketch_dim2",
- "dim2",
- null,
- ImmutableList.of("m1"),
- 1
- ),
- new LongSumAggregatorFactory("m1", "m1")
- )
- .withRollup(false)
- .build()
- )
- .rows(ROWS)
- .buildMMappedIndex();
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ ArrayOfDoublesSketchModule.registerSerde();
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ final QueryableIndex index = IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(
+ OffHeapMemorySegmentWriteOutMediumFactory.instance()
+ )
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new ArrayOfDoublesSketchAggregatorFactory(
+ "tuplesketch_dim2",
+ "dim2",
+ null,
+ ImmutableList.of("m1"),
+ 1
+ ),
+ new LongSumAggregatorFactory("m1", "m1")
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(ROWS)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java
index 5888b7d..d680abc 100644
--- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java
+++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java
@@ -49,6 +49,7 @@
import org.junit.Test;
import javax.annotation.Nullable;
+
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java
index 8d00b5b..ac79ef4 100644
--- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java
+++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java
@@ -33,6 +33,7 @@
import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.query.aggregation.FilteredAggregatorFactory;
import org.apache.druid.query.aggregation.bloom.BloomFilterAggregatorFactory;
+import org.apache.druid.query.aggregation.bloom.sql.BloomFilterSqlAggregatorTest.BloomFilterSqlAggComponentSupplier;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.ExtractionDimensionSpec;
import org.apache.druid.query.expression.TestExprMacroTable;
@@ -49,8 +50,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -58,53 +62,62 @@
import java.util.List;
+@SqlTestFramework.SqlTestFrameWorkModule(BloomFilterSqlAggComponentSupplier.class)
public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final int TEST_NUM_ENTRIES = 1000;
private static final String DATA_SOURCE = "numfoo";
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ public static class BloomFilterSqlAggComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new BloomFilterExtensionModule());
- }
+ public BloomFilterSqlAggComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- final QueryableIndex index =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1")
- )
- .withDimensionsSpec(TestDataBuilder.INDEX_SCHEMA_NUMERIC_DIMS.getDimensionsSpec())
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS)
- .buildMMappedIndex();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new BloomFilterExtensionModule());
+ }
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ final QueryableIndex index =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1")
+ )
+ .withDimensionsSpec(TestDataBuilder.INDEX_SCHEMA_NUMERIC_DIMS.getDimensionsSpec())
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java
index 49632c7..99212c8 100644
--- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java
+++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java
@@ -34,23 +34,36 @@
import org.apache.druid.query.filter.BloomKFilterHolder;
import org.apache.druid.query.filter.ExpressionDimFilter;
import org.apache.druid.query.filter.OrDimFilter;
+import org.apache.druid.query.filter.sql.BloomDimFilterSqlTest.BloomDimFilterComponentSupplier;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.http.SqlParameter;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.io.IOException;
+@SqlTestFrameWorkModule(BloomDimFilterComponentSupplier.class)
public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ public static class BloomDimFilterComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new BloomFilterExtensionModule());
+ public BloomDimFilterComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
+
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new BloomFilterExtensionModule());
+ }
}
@Test
@@ -81,6 +94,7 @@
);
}
+
@Test
public void testBloomFilterExprFilter() throws IOException
{
diff --git a/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogInsertTest.java b/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogInsertTest.java
index 49ebf3f..ca73a3f 100644
--- a/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogInsertTest.java
+++ b/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogInsertTest.java
@@ -24,14 +24,17 @@
import org.apache.druid.catalog.model.facade.DatasourceFacade;
import org.apache.druid.catalog.model.table.ClusterKeySpec;
import org.apache.druid.catalog.model.table.TableBuilder;
+import org.apache.druid.catalog.sql.CatalogInsertTest.CatalogInsertComponentSupplier;
import org.apache.druid.catalog.storage.CatalogStorage;
import org.apache.druid.catalog.storage.CatalogTests;
import org.apache.druid.catalog.sync.CachedMetadataCatalog;
import org.apache.druid.catalog.sync.MetadataCatalog;
import org.apache.druid.metadata.TestDerbyConnector.DerbyConnectorRule5;
import org.apache.druid.sql.calcite.CalciteCatalogInsertTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.planner.CatalogResolver;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.Assert.fail;
@@ -39,6 +42,7 @@
/**
* Test the use of catalog specs to drive MSQ ingestion.
*/
+@SqlTestFrameWorkModule(CatalogInsertComponentSupplier.class)
public class CatalogInsertTest extends CalciteCatalogInsertTest
{
@RegisterExtension
@@ -46,60 +50,68 @@
private static CatalogStorage storage;
- @Override
- public CatalogResolver createCatalogResolver()
+ protected static class CatalogInsertComponentSupplier extends CatalogIngestionDmlComponentSupplier
{
- CatalogTests.DbFixture dbFixture = new CatalogTests.DbFixture(DERBY_CONNECTION_RULE);
- storage = dbFixture.storage;
- MetadataCatalog catalog = new CachedMetadataCatalog(
- storage,
- storage.schemaRegistry(),
- storage.jsonMapper()
- );
- return new LiveCatalogResolver(catalog);
- }
-
- @Override
- public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
- {
- super.finalizeTestFramework(sqlTestFramework);
- buildDatasources();
- }
-
- public void buildDatasources()
- {
- RESOLVED_TABLES.forEach((datasourceName, datasourceTable) -> {
- DatasourceFacade catalogMetadata = datasourceTable.effectiveMetadata().catalogMetadata();
- TableBuilder tableBuilder = TableBuilder.datasource(datasourceName, catalogMetadata.segmentGranularityString());
- catalogMetadata.columnFacades().forEach(
- columnFacade -> {
- tableBuilder.column(columnFacade.spec().name(), columnFacade.spec().dataType());
- }
- );
-
- if (catalogMetadata.hiddenColumns() != null && !catalogMetadata.hiddenColumns().isEmpty()) {
- tableBuilder.hiddenColumns(catalogMetadata.hiddenColumns());
- }
-
- if (catalogMetadata.isSealed()) {
- tableBuilder.sealed(true);
- }
-
- if (catalogMetadata.clusterKeys() != null && !catalogMetadata.clusterKeys().isEmpty()) {
- tableBuilder.clusterColumns(catalogMetadata.clusterKeys().toArray(new ClusterKeySpec[0]));
- }
-
- createTableMetadata(tableBuilder.build());
- });
- }
-
- private void createTableMetadata(TableMetadata table)
- {
- try {
- storage.tables().create(table);
+ public CatalogInsertComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
}
- catch (CatalogException e) {
- fail(e.getMessage());
+
+ @Override
+ public CatalogResolver createCatalogResolver()
+ {
+ CatalogTests.DbFixture dbFixture = new CatalogTests.DbFixture(DERBY_CONNECTION_RULE);
+ storage = dbFixture.storage;
+ MetadataCatalog catalog = new CachedMetadataCatalog(
+ storage,
+ storage.schemaRegistry(),
+ storage.jsonMapper()
+ );
+ return new LiveCatalogResolver(catalog);
+ }
+
+ @Override
+ public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
+ {
+ super.finalizeTestFramework(sqlTestFramework);
+ buildDatasources();
+ }
+
+ public void buildDatasources()
+ {
+ RESOLVED_TABLES.forEach((datasourceName, datasourceTable) -> {
+ DatasourceFacade catalogMetadata = datasourceTable.effectiveMetadata().catalogMetadata();
+ TableBuilder tableBuilder = TableBuilder.datasource(datasourceName, catalogMetadata.segmentGranularityString());
+ catalogMetadata.columnFacades().forEach(
+ columnFacade -> {
+ tableBuilder.column(columnFacade.spec().name(), columnFacade.spec().dataType());
+ }
+ );
+
+ if (catalogMetadata.hiddenColumns() != null && !catalogMetadata.hiddenColumns().isEmpty()) {
+ tableBuilder.hiddenColumns(catalogMetadata.hiddenColumns());
+ }
+
+ if (catalogMetadata.isSealed()) {
+ tableBuilder.sealed(true);
+ }
+
+ if (catalogMetadata.clusterKeys() != null && !catalogMetadata.clusterKeys().isEmpty()) {
+ tableBuilder.clusterColumns(catalogMetadata.clusterKeys().toArray(new ClusterKeySpec[0]));
+ }
+
+ createTableMetadata(tableBuilder.build());
+ });
+ }
+
+ private void createTableMetadata(TableMetadata table)
+ {
+ try {
+ storage.tables().create(table);
+ }
+ catch (CatalogException e) {
+ fail(e.getMessage());
+ }
}
}
}
diff --git a/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogQueryTest.java b/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogQueryTest.java
index b947c2e..ceada96 100644
--- a/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogQueryTest.java
+++ b/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogQueryTest.java
@@ -23,6 +23,7 @@
import org.apache.druid.catalog.model.Columns;
import org.apache.druid.catalog.model.TableMetadata;
import org.apache.druid.catalog.model.table.TableBuilder;
+import org.apache.druid.catalog.sql.CatalogQueryTest.CatalogQueryComponentSupplier;
import org.apache.druid.catalog.storage.CatalogStorage;
import org.apache.druid.catalog.storage.CatalogTests;
import org.apache.druid.catalog.sync.CachedMetadataCatalog;
@@ -30,8 +31,10 @@
import org.apache.druid.metadata.TestDerbyConnector.DerbyConnectorRule5;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.SqlSchema;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.planner.CatalogResolver;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@@ -41,13 +44,14 @@
import static org.junit.Assert.fail;
+@SqlTestFramework.SqlTestFrameWorkModule(CatalogQueryComponentSupplier.class)
public class CatalogQueryTest extends BaseCalciteQueryTest
{
@RegisterExtension
public static final DerbyConnectorRule5 DERBY_CONNECTION_RULE = new DerbyConnectorRule5();
- private CatalogTests.DbFixture dbFixture;
- private CatalogStorage storage;
+ private static CatalogTests.DbFixture dbFixture;
+ private static CatalogStorage storage;
@Test
public void testCatalogSchema()
@@ -76,49 +80,57 @@
CatalogTests.tearDown(dbFixture);
}
- @Override
- public CatalogResolver createCatalogResolver()
+ protected static class CatalogQueryComponentSupplier extends StandardComponentSupplier
{
- dbFixture = new CatalogTests.DbFixture(DERBY_CONNECTION_RULE);
- storage = dbFixture.storage;
- MetadataCatalog catalog = new CachedMetadataCatalog(
- storage,
- storage.schemaRegistry(),
- storage.jsonMapper()
- );
- return new LiveCatalogResolver(catalog);
- }
-
- @Override
- public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
- {
- super.finalizeTestFramework(sqlTestFramework);
- buildFooDatasource();
- }
-
- private void createTableMetadata(TableMetadata table)
- {
- try {
- storage.tables().create(table);
+ public CatalogQueryComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
}
- catch (CatalogException e) {
- fail(e.getMessage());
- }
- }
- public void buildFooDatasource()
- {
- TableMetadata spec = TableBuilder.datasource("foo", "ALL")
- .timeColumn()
- .column("extra1", null)
- .column("dim2", null)
- .column("dim1", null)
- .column("cnt", null)
- .column("m1", Columns.DOUBLE)
- .column("extra2", Columns.LONG)
- .column("extra3", Columns.STRING)
- .hiddenColumns(Arrays.asList("dim3", "unique_dim1"))
- .build();
- createTableMetadata(spec);
+ @Override
+ public CatalogResolver createCatalogResolver()
+ {
+ dbFixture = new CatalogTests.DbFixture(DERBY_CONNECTION_RULE);
+ storage = dbFixture.storage;
+ MetadataCatalog catalog = new CachedMetadataCatalog(
+ storage,
+ storage.schemaRegistry(),
+ storage.jsonMapper()
+ );
+ return new LiveCatalogResolver(catalog);
+ }
+
+ @Override
+ public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
+ {
+ super.finalizeTestFramework(sqlTestFramework);
+ buildFooDatasource();
+ }
+
+ private void createTableMetadata(TableMetadata table)
+ {
+ try {
+ storage.tables().create(table);
+ }
+ catch (CatalogException e) {
+ fail(e.getMessage());
+ }
+ }
+
+ public void buildFooDatasource()
+ {
+ TableMetadata spec = TableBuilder.datasource("foo", "ALL")
+ .timeColumn()
+ .column("extra1", null)
+ .column("dim2", null)
+ .column("dim1", null)
+ .column("cnt", null)
+ .column("m1", Columns.DOUBLE)
+ .column("extra2", Columns.LONG)
+ .column("extra3", Columns.STRING)
+ .hiddenColumns(Arrays.asList("dim3", "unique_dim1"))
+ .build();
+ createTableMetadata(spec);
+ }
}
}
diff --git a/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogReplaceTest.java b/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogReplaceTest.java
index 31e0a34..3c3cefb 100644
--- a/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogReplaceTest.java
+++ b/extensions-core/druid-catalog/src/test/java/org/apache/druid/catalog/sql/CatalogReplaceTest.java
@@ -24,14 +24,17 @@
import org.apache.druid.catalog.model.facade.DatasourceFacade;
import org.apache.druid.catalog.model.table.ClusterKeySpec;
import org.apache.druid.catalog.model.table.TableBuilder;
+import org.apache.druid.catalog.sql.CatalogReplaceTest.CatalogReplaceComponentSupplier;
import org.apache.druid.catalog.storage.CatalogStorage;
import org.apache.druid.catalog.storage.CatalogTests;
import org.apache.druid.catalog.sync.CachedMetadataCatalog;
import org.apache.druid.catalog.sync.MetadataCatalog;
import org.apache.druid.metadata.TestDerbyConnector.DerbyConnectorRule5;
import org.apache.druid.sql.calcite.CalciteCatalogReplaceTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.planner.CatalogResolver;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.Assert.fail;
@@ -39,74 +42,83 @@
/**
* Test the use of catalog specs to drive MSQ ingestion.
*/
+@SqlTestFrameWorkModule(CatalogReplaceComponentSupplier.class)
public class CatalogReplaceTest extends CalciteCatalogReplaceTest
{
@RegisterExtension
public static final DerbyConnectorRule5 DERBY_CONNECTION_RULE = new DerbyConnectorRule5();
private static CatalogStorage storage;
- @Override
- public CatalogResolver createCatalogResolver()
+ protected static class CatalogReplaceComponentSupplier extends CatalogIngestionDmlComponentSupplier
{
- CatalogTests.DbFixture dbFixture = new CatalogTests.DbFixture(DERBY_CONNECTION_RULE);
- storage = dbFixture.storage;
- MetadataCatalog catalog = new CachedMetadataCatalog(
- storage,
- storage.schemaRegistry(),
- storage.jsonMapper()
- );
- return new LiveCatalogResolver(catalog);
- }
+ public CatalogReplaceComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
- {
- super.finalizeTestFramework(sqlTestFramework);
- buildDatasources();
- }
+ @Override
+ public CatalogResolver createCatalogResolver()
+ {
+ CatalogTests.DbFixture dbFixture = new CatalogTests.DbFixture(DERBY_CONNECTION_RULE);
+ storage = dbFixture.storage;
+ MetadataCatalog catalog = new CachedMetadataCatalog(
+ storage,
+ storage.schemaRegistry(),
+ storage.jsonMapper()
+ );
+ return new LiveCatalogResolver(catalog);
+ }
- public void buildDatasources()
- {
- RESOLVED_TABLES.forEach((datasourceName, datasourceTable) -> {
- DatasourceFacade catalogMetadata = datasourceTable.effectiveMetadata().catalogMetadata();
- TableBuilder tableBuilder = TableBuilder.datasource(datasourceName, catalogMetadata.segmentGranularityString());
+ @Override
+ public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
+ {
+ super.finalizeTestFramework(sqlTestFramework);
+ buildDatasources();
+ }
+
+ public void buildDatasources()
+ {
+ RESOLVED_TABLES.forEach((datasourceName, datasourceTable) -> {
+ DatasourceFacade catalogMetadata = datasourceTable.effectiveMetadata().catalogMetadata();
+ TableBuilder tableBuilder = TableBuilder.datasource(datasourceName, catalogMetadata.segmentGranularityString());
+ catalogMetadata.columnFacades().forEach(
+ columnFacade -> {
+ tableBuilder.column(columnFacade.spec().name(), columnFacade.spec().dataType());
+ }
+ );
+
+ if (catalogMetadata.hiddenColumns() != null && !catalogMetadata.hiddenColumns().isEmpty()) {
+ tableBuilder.hiddenColumns(catalogMetadata.hiddenColumns());
+ }
+
+ if (catalogMetadata.isSealed()) {
+ tableBuilder.sealed(true);
+ }
+
+ if (catalogMetadata.clusterKeys() != null && !catalogMetadata.clusterKeys().isEmpty()) {
+ tableBuilder.clusterColumns(catalogMetadata.clusterKeys().toArray(new ClusterKeySpec[0]));
+ }
+
+ createTableMetadata(tableBuilder.build());
+ });
+ DatasourceFacade catalogMetadata =
+ RESOLVED_TABLES.get("foo").effectiveMetadata().catalogMetadata();
+ TableBuilder tableBuilder = TableBuilder.datasource("foo", catalogMetadata.segmentGranularityString());
catalogMetadata.columnFacades().forEach(
columnFacade -> {
tableBuilder.column(columnFacade.spec().name(), columnFacade.spec().dataType());
}
);
-
- if (catalogMetadata.hiddenColumns() != null && !catalogMetadata.hiddenColumns().isEmpty()) {
- tableBuilder.hiddenColumns(catalogMetadata.hiddenColumns());
- }
-
- if (catalogMetadata.isSealed()) {
- tableBuilder.sealed(true);
- }
-
- if (catalogMetadata.clusterKeys() != null && !catalogMetadata.clusterKeys().isEmpty()) {
- tableBuilder.clusterColumns(catalogMetadata.clusterKeys().toArray(new ClusterKeySpec[0]));
- }
-
- createTableMetadata(tableBuilder.build());
- });
- DatasourceFacade catalogMetadata =
- RESOLVED_TABLES.get("foo").effectiveMetadata().catalogMetadata();
- TableBuilder tableBuilder = TableBuilder.datasource("foo", catalogMetadata.segmentGranularityString());
- catalogMetadata.columnFacades().forEach(
- columnFacade -> {
- tableBuilder.column(columnFacade.spec().name(), columnFacade.spec().dataType());
- }
- );
- }
-
- private void createTableMetadata(TableMetadata table)
- {
- try {
- storage.tables().create(table);
}
- catch (CatalogException e) {
- fail(e.getMessage());
+
+ private void createTableMetadata(TableMetadata table)
+ {
+ try {
+ storage.tables().create(table);
+ }
+ catch (CatalogException e) {
+ fail(e.getMessage());
+ }
}
}
}
diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java
index bb6a7e8..ee5bdab 100644
--- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java
+++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/FixedBucketsHistogramQuantileSqlAggregatorTest.java
@@ -36,6 +36,7 @@
import org.apache.druid.query.aggregation.histogram.FixedBucketsHistogram;
import org.apache.druid.query.aggregation.histogram.FixedBucketsHistogramAggregatorFactory;
import org.apache.druid.query.aggregation.histogram.QuantilePostAggregator;
+import org.apache.druid.query.aggregation.histogram.sql.FixedBucketsHistogramQuantileSqlAggregatorTest.FixedBucketsHistogramComponentSupplier;
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
@@ -51,8 +52,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -60,58 +64,67 @@
import java.util.List;
+@SqlTestFramework.SqlTestFrameWorkModule(FixedBucketsHistogramComponentSupplier.class)
public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class FixedBucketsHistogramComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new ApproximateHistogramDruidModule());
- }
+ public FixedBucketsHistogramComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- ApproximateHistogramDruidModule.registerSerde();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new ApproximateHistogramDruidModule());
+ }
- final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper())
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1"),
- new FixedBucketsHistogramAggregatorFactory(
- "fbhist_m1",
- "m1",
- 20,
- 0,
- 10,
- FixedBucketsHistogram.OutlierHandlingMode.IGNORE,
- false
- )
- )
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1)
- .buildMMappedIndex();
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ ApproximateHistogramDruidModule.registerSerde();
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(CalciteTests.DATASOURCE1)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper())
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1"),
+ new FixedBucketsHistogramAggregatorFactory(
+ "fbhist_m1",
+ "m1",
+ 20,
+ 0,
+ 10,
+ FixedBucketsHistogram.OutlierHandlingMode.IGNORE,
+ false
+ )
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(CalciteTests.DATASOURCE1)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
index a14be31..93ac7bb 100644
--- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
+++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
@@ -35,6 +35,7 @@
import org.apache.druid.query.aggregation.histogram.ApproximateHistogramDruidModule;
import org.apache.druid.query.aggregation.histogram.ApproximateHistogramFoldingAggregatorFactory;
import org.apache.druid.query.aggregation.histogram.QuantilePostAggregator;
+import org.apache.druid.query.aggregation.histogram.sql.QuantileSqlAggregatorTest.QuantileComponentSupplier;
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
@@ -50,8 +51,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -59,58 +63,68 @@
import java.util.List;
+@SqlTestFramework.SqlTestFrameWorkModule(QuantileComponentSupplier.class)
public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class QuantileComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new ApproximateHistogramDruidModule());
- }
+ public QuantileComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- ApproximateHistogramDruidModule.registerSerde();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new ApproximateHistogramDruidModule());
+ }
- final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper())
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1"),
- new ApproximateHistogramAggregatorFactory(
- "hist_m1",
- "m1",
- null,
- null,
- null,
- null,
- false
- )
- )
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1)
- .buildMMappedIndex();
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(CalciteTests.DATASOURCE1)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ ApproximateHistogramDruidModule.registerSerde();
+
+ final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper())
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1"),
+ new ApproximateHistogramAggregatorFactory(
+ "hist_m1",
+ "m1",
+ null,
+ null,
+ null,
+ null,
+ false
+ )
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(CalciteTests.DATASOURCE1)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
@Test
diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteArraysQueryMSQTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteArraysQueryMSQTest.java
index 4e036f5..07b1393 100644
--- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteArraysQueryMSQTest.java
+++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteArraysQueryMSQTest.java
@@ -26,50 +26,62 @@
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.msq.exec.WorkerMemoryParameters;
import org.apache.druid.msq.sql.MSQTaskSqlEngine;
+import org.apache.druid.msq.test.CalciteArraysQueryMSQTest.ArraysQueryMSQComponentSupplier;
import org.apache.druid.query.groupby.TestGroupByBuffers;
import org.apache.druid.server.QueryLifecycleFactory;
import org.apache.druid.sql.calcite.CalciteArraysQueryTest;
import org.apache.druid.sql.calcite.QueryTestBuilder;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.run.SqlEngine;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
/**
* Runs {@link CalciteArraysQueryTest} but with MSQ engine
*/
+@SqlTestFramework.SqlTestFrameWorkModule(ArraysQueryMSQComponentSupplier.class)
public class CalciteArraysQueryMSQTest extends CalciteArraysQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ public static class ArraysQueryMSQComponentSupplier extends ArraysComponentSupplier
{
- super.configureGuice(builder);
- builder.addModules(
- CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
- );
- }
+ public ArraysQueryMSQComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SqlEngine createEngine(
- QueryLifecycleFactory qlf,
- ObjectMapper queryJsonMapper,
- Injector injector
- )
- {
- final WorkerMemoryParameters workerMemoryParameters =
- WorkerMemoryParameters.createInstance(
- WorkerMemoryParameters.PROCESSING_MINIMUM_BYTES * 50,
- 2,
- 10,
- 2,
- 0,
- 0
- );
- final MSQTestOverlordServiceClient indexingServiceClient = new MSQTestOverlordServiceClient(
- queryJsonMapper,
- injector,
- new MSQTestTaskActionClient(queryJsonMapper, injector),
- workerMemoryParameters,
- ImmutableList.of()
- );
- return new MSQTaskSqlEngine(indexingServiceClient, queryJsonMapper);
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModules(
+ CalciteMSQTestsHelper.fetchModules(tempDirProducer::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
+ );
+ }
+
+ @Override
+ public SqlEngine createEngine(
+ QueryLifecycleFactory qlf,
+ ObjectMapper queryJsonMapper,
+ Injector injector
+ )
+ {
+ final WorkerMemoryParameters workerMemoryParameters =
+ WorkerMemoryParameters.createInstance(
+ WorkerMemoryParameters.PROCESSING_MINIMUM_BYTES * 50,
+ 2,
+ 10,
+ 2,
+ 0,
+ 0
+ );
+ final MSQTestOverlordServiceClient indexingServiceClient = new MSQTestOverlordServiceClient(
+ queryJsonMapper,
+ injector,
+ new MSQTestTaskActionClient(queryJsonMapper, injector),
+ workerMemoryParameters,
+ ImmutableList.of()
+ );
+ return new MSQTaskSqlEngine(indexingServiceClient, queryJsonMapper);
+ }
}
@Override
diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectJoinQueryMSQTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectJoinQueryMSQTest.java
index fa72ca1..d59bf6f 100644
--- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectJoinQueryMSQTest.java
+++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectJoinQueryMSQTest.java
@@ -31,11 +31,14 @@
import org.apache.druid.server.QueryLifecycleFactory;
import org.apache.druid.sql.calcite.CalciteJoinQueryTest;
import org.apache.druid.sql.calcite.QueryTestBuilder;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.planner.JoinAlgorithm;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.run.EngineFeature;
import org.apache.druid.sql.calcite.run.QueryMaker;
import org.apache.druid.sql.calcite.run.SqlEngine;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
/**
* Runs {@link CalciteJoinQueryTest} but with MSQ engine.
@@ -45,13 +48,9 @@
/**
* Run all tests with {@link JoinAlgorithm#BROADCAST}.
*/
+ @SqlTestFramework.SqlTestFrameWorkModule(BroadcastJoinComponentSupplier.class)
public static class BroadcastTest extends Base
{
- public BroadcastTest()
- {
- super(JoinAlgorithm.BROADCAST);
- }
-
@Override
protected QueryTestBuilder testBuilder()
{
@@ -63,11 +62,13 @@
/**
* Run all tests with {@link JoinAlgorithm#SORT_MERGE}.
*/
+ @SqlTestFramework.SqlTestFrameWorkModule(SortMergeJoinComponentSupplier.class)
public static class SortMergeTest extends Base
{
- public SortMergeTest()
+ @Override
+ public boolean isSortBasedJoin()
{
- super(JoinAlgorithm.SORT_MERGE);
+ return true;
}
@Override
@@ -82,17 +83,41 @@
public abstract static class Base extends CalciteJoinQueryTest
{
- private final JoinAlgorithm joinAlgorithm;
-
- protected Base(final JoinAlgorithm joinAlgorithm)
- {
- this.joinAlgorithm = joinAlgorithm;
- }
-
@Override
- public boolean isSortBasedJoin()
+ protected QueryTestBuilder testBuilder()
{
- return joinAlgorithm == JoinAlgorithm.SORT_MERGE;
+ return new QueryTestBuilder(new CalciteTestConfig(true))
+ .addCustomRunner(
+ new ExtractResultsFactory(
+ () -> (MSQTestOverlordServiceClient) ((MSQTaskSqlEngine) queryFramework().engine()).overlordClient()))
+ .skipVectorize(true);
+ }
+ }
+
+ protected static class SortMergeJoinComponentSupplier extends AbstractJoinComponentSupplier
+ {
+ public SortMergeJoinComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer, JoinAlgorithm.SORT_MERGE);
+ }
+ }
+
+ protected static class BroadcastJoinComponentSupplier extends AbstractJoinComponentSupplier
+ {
+ public BroadcastJoinComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer, JoinAlgorithm.BROADCAST);
+ }
+ }
+
+ protected abstract static class AbstractJoinComponentSupplier extends StandardComponentSupplier
+ {
+ private JoinAlgorithm joinAlgorithm;
+
+ public AbstractJoinComponentSupplier(TempDirProducer tempFolderProducer, JoinAlgorithm joinAlgorithm)
+ {
+ super(tempFolderProducer);
+ this.joinAlgorithm = joinAlgorithm;
}
@Override
@@ -100,7 +125,7 @@
{
super.configureGuice(builder);
builder.addModules(
- CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
+ CalciteMSQTestsHelper.fetchModules(tempDirProducer::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
);
}
@@ -136,15 +161,5 @@
}
};
}
-
- @Override
- protected QueryTestBuilder testBuilder()
- {
- return new QueryTestBuilder(new CalciteTestConfig(true))
- .addCustomRunner(
- new ExtractResultsFactory(
- () -> (MSQTestOverlordServiceClient) ((MSQTaskSqlEngine) queryFramework().engine()).overlordClient()))
- .skipVectorize(true);
- }
}
}
diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectQueryMSQTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectQueryMSQTest.java
index b2d9ab1..7555df8 100644
--- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectQueryMSQTest.java
+++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteSelectQueryMSQTest.java
@@ -29,11 +29,15 @@
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.msq.exec.WorkerMemoryParameters;
import org.apache.druid.msq.sql.MSQTaskSqlEngine;
+import org.apache.druid.msq.test.CalciteSelectQueryMSQTest.SelectMSQComponentSupplier;
import org.apache.druid.query.groupby.TestGroupByBuffers;
import org.apache.druid.server.QueryLifecycleFactory;
import org.apache.druid.sql.calcite.CalciteQueryTest;
import org.apache.druid.sql.calcite.QueryTestBuilder;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.run.SqlEngine;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.junit.Assert;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@@ -44,40 +48,49 @@
/**
* Runs {@link CalciteQueryTest} but with MSQ engine
*/
+@SqlTestFramework.SqlTestFrameWorkModule(SelectMSQComponentSupplier.class)
public class CalciteSelectQueryMSQTest extends CalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ public static class SelectMSQComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModules(CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0]));
- }
+ public SelectMSQComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
+
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModules(CalciteMSQTestsHelper.fetchModules(tempDirProducer::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0]));
+ }
- @Override
- public SqlEngine createEngine(
- QueryLifecycleFactory qlf,
- ObjectMapper queryJsonMapper,
- Injector injector
- )
- {
- final WorkerMemoryParameters workerMemoryParameters =
- WorkerMemoryParameters.createInstance(
- WorkerMemoryParameters.PROCESSING_MINIMUM_BYTES * 50,
- 2,
- 10,
- 2,
- 0,
- 0
- );
- final MSQTestOverlordServiceClient indexingServiceClient = new MSQTestOverlordServiceClient(
- queryJsonMapper,
- injector,
- new MSQTestTaskActionClient(queryJsonMapper, injector),
- workerMemoryParameters,
- ImmutableList.of()
- );
- return new MSQTaskSqlEngine(indexingServiceClient, queryJsonMapper);
+ @Override
+ public SqlEngine createEngine(
+ QueryLifecycleFactory qlf,
+ ObjectMapper queryJsonMapper,
+ Injector injector
+ )
+ {
+ final WorkerMemoryParameters workerMemoryParameters =
+ WorkerMemoryParameters.createInstance(
+ WorkerMemoryParameters.PROCESSING_MINIMUM_BYTES * 50,
+ 2,
+ 10,
+ 2,
+ 0,
+ 0
+ );
+ final MSQTestOverlordServiceClient indexingServiceClient = new MSQTestOverlordServiceClient(
+ queryJsonMapper,
+ injector,
+ new MSQTestTaskActionClient(queryJsonMapper, injector),
+ workerMemoryParameters,
+ ImmutableList.of()
+ );
+ return new MSQTaskSqlEngine(indexingServiceClient, queryJsonMapper);
+ }
}
@Override
diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteUnionQueryMSQTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteUnionQueryMSQTest.java
index babd325..acbb4a9 100644
--- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteUnionQueryMSQTest.java
+++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/CalciteUnionQueryMSQTest.java
@@ -40,51 +40,63 @@
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.CalciteUnionQueryTest;
import org.apache.druid.sql.calcite.QueryTestBuilder;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
/**
* Runs {@link CalciteUnionQueryTest} but with MSQ engine
*/
+@SqlTestFramework.SqlTestFrameWorkModule(CalciteUnionQueryMSQTest.UnionQueryMSQComponentSupplier.class)
public class CalciteUnionQueryMSQTest extends CalciteUnionQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
- {
- super.configureGuice(builder);
- builder.addModules(
- CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
- );
- }
-
- @Override
- public SqlEngine createEngine(
- QueryLifecycleFactory qlf,
- ObjectMapper queryJsonMapper,
- Injector injector
- )
+ public static class UnionQueryMSQComponentSupplier extends StandardComponentSupplier
{
- final WorkerMemoryParameters workerMemoryParameters =
- WorkerMemoryParameters.createInstance(
- WorkerMemoryParameters.PROCESSING_MINIMUM_BYTES * 50,
- 2,
- 10,
- 2,
- 0,
- 0
- );
- final MSQTestOverlordServiceClient indexingServiceClient = new MSQTestOverlordServiceClient(
- queryJsonMapper,
- injector,
- new MSQTestTaskActionClient(queryJsonMapper, injector),
- workerMemoryParameters,
- ImmutableList.of()
- );
- return new MSQTaskSqlEngine(indexingServiceClient, queryJsonMapper);
+ public UnionQueryMSQComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
+
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModules(
+ CalciteMSQTestsHelper.fetchModules(tempDirProducer::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
+ );
+ }
+
+ @Override
+ public SqlEngine createEngine(
+ QueryLifecycleFactory qlf,
+ ObjectMapper queryJsonMapper,
+ Injector injector
+ )
+ {
+ final WorkerMemoryParameters workerMemoryParameters =
+ WorkerMemoryParameters.createInstance(
+ WorkerMemoryParameters.PROCESSING_MINIMUM_BYTES * 50,
+ 2,
+ 10,
+ 2,
+ 0,
+ 0
+ );
+ final MSQTestOverlordServiceClient indexingServiceClient = new MSQTestOverlordServiceClient(
+ queryJsonMapper,
+ injector,
+ new MSQTestTaskActionClient(queryJsonMapper, injector),
+ workerMemoryParameters,
+ ImmutableList.of()
+ );
+ return new MSQTaskSqlEngine(indexingServiceClient, queryJsonMapper);
+ }
}
@Override
diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java
index a3b6fa3..3b5e14c 100644
--- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java
+++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java
@@ -114,6 +114,7 @@
import org.apache.druid.msq.sql.MSQTaskQueryMaker;
import org.apache.druid.msq.sql.MSQTaskSqlEngine;
import org.apache.druid.msq.sql.entity.PageInformation;
+import org.apache.druid.msq.test.MSQTestBase.MSQBaseComponentSupplier;
import org.apache.druid.msq.util.MultiStageQueryContext;
import org.apache.druid.msq.util.SqlStatementResourceHelper;
import org.apache.druid.query.DruidProcessingConfig;
@@ -159,6 +160,7 @@
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.SqlToolbox;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.external.ExternalOperatorConversion;
import org.apache.druid.sql.calcite.external.HttpOperatorConversion;
@@ -177,6 +179,7 @@
import org.apache.druid.sql.calcite.util.LookylooModule;
import org.apache.druid.sql.calcite.util.QueryFrameworkUtils;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.sql.calcite.view.InProcessViewManager;
import org.apache.druid.sql.guice.SqlBindings;
@@ -202,6 +205,7 @@
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
+
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
@@ -250,6 +254,7 @@
* <p>
* Controller -> Overlord communication happens in {@link MSQTestTaskActionClient}
*/
+@SqlTestFramework.SqlTestFrameWorkModule(MSQBaseComponentSupplier.class)
public class MSQTestBase extends BaseCalciteQueryTest
{
public static final Map<String, Object> DEFAULT_MSQ_CONTEXT =
@@ -336,36 +341,44 @@
)
);
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class MSQBaseComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
+ public MSQBaseComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- builder
- .addModule(new HllSketchModule())
- .addModule(new DruidModule()
- {
- // Small subset of MsqSqlModule
- @Override
- public void configure(Binder binder)
- {
- // We want this module to bring InputSourceModule along for the ride.
- binder.install(new InputSourceModule());
- binder.install(new NestedDataModule());
- NestedDataModule.registerHandlersAndSerde();
- SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
- SqlBindings.addOperatorConversion(binder, HttpOperatorConversion.class);
- SqlBindings.addOperatorConversion(binder, InlineOperatorConversion.class);
- SqlBindings.addOperatorConversion(binder, LocalOperatorConversion.class);
- }
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
- @Override
- public List<? extends com.fasterxml.jackson.databind.Module> getJacksonModules()
+ builder
+ .addModule(new HllSketchModule())
+ .addModule(new DruidModule()
{
- // We want this module to bring input sources along for the ride.
- return new InputSourceModule().getJacksonModules();
- }
- });
+ // Small subset of MsqSqlModule
+ @Override
+ public void configure(Binder binder)
+ {
+ // We want this module to bring InputSourceModule along for the ride.
+ binder.install(new InputSourceModule());
+ binder.install(new NestedDataModule());
+ NestedDataModule.registerHandlersAndSerde();
+ SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
+ SqlBindings.addOperatorConversion(binder, HttpOperatorConversion.class);
+ SqlBindings.addOperatorConversion(binder, InlineOperatorConversion.class);
+ SqlBindings.addOperatorConversion(binder, LocalOperatorConversion.class);
+ }
+
+ @Override
+ public List<? extends com.fasterxml.jackson.databind.Module> getJacksonModules()
+ {
+ // We want this module to bring input sources along for the ride.
+ return new InputSourceModule().getJacksonModules();
+ }
+ });
+ }
}
@AfterEach
diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java
index 66c6290..d93cdc9 100644
--- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java
+++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/sql/VarianceSqlAggregatorTest.java
@@ -41,6 +41,7 @@
import org.apache.druid.query.aggregation.variance.VarianceAggregatorCollector;
import org.apache.druid.query.aggregation.variance.VarianceAggregatorFactory;
import org.apache.druid.query.aggregation.variance.VarianceSerde;
+import org.apache.druid.query.aggregation.variance.sql.VarianceSqlAggregatorTest.VarianceComponentSupplier;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.orderby.DefaultLimitSpec;
@@ -57,8 +58,11 @@
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -66,61 +70,70 @@
import java.util.List;
+@SqlTestFramework.SqlTestFrameWorkModule(VarianceComponentSupplier.class)
public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ public static class VarianceComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new DruidStatsModule());
- }
+ public VarianceComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- ComplexMetrics.registerSerde(VarianceSerde.TYPE_NAME, new VarianceSerde());
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new DruidStatsModule());
+ }
- final QueryableIndex index =
- IndexBuilder.create(CalciteTests.getJsonMapper().registerModules(new DruidStatsModule().getJacksonModules()))
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withDimensionsSpec(
- new DimensionsSpec(
- ImmutableList.<DimensionSchema>builder()
- .addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2", "dim3")))
- .add(new DoubleDimensionSchema("d1"))
- .add(new FloatDimensionSchema("f1"))
- .add(new LongDimensionSchema("l1"))
- .build()
- )
- )
- .withMetrics(
- new CountAggregatorFactory("cnt"),
- new DoubleSumAggregatorFactory("m1", "m1"),
- new VarianceAggregatorFactory("var1", "m1", null, null)
- )
- .withRollup(false)
- .build()
- )
- .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS)
- .buildMMappedIndex();
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ ComplexMetrics.registerSerde(VarianceSerde.TYPE_NAME, new VarianceSerde());
- return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
- DataSegment.builder()
- .dataSource(CalciteTests.DATASOURCE3)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- );
+ final QueryableIndex index =
+ IndexBuilder.create(CalciteTests.getJsonMapper().registerModules(new DruidStatsModule().getJacksonModules()))
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withDimensionsSpec(
+ new DimensionsSpec(
+ ImmutableList.<DimensionSchema>builder()
+ .addAll(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2", "dim3")))
+ .add(new DoubleDimensionSchema("d1"))
+ .add(new FloatDimensionSchema("f1"))
+ .add(new LongDimensionSchema("l1"))
+ .build()
+ )
+ )
+ .withMetrics(
+ new CountAggregatorFactory("cnt"),
+ new DoubleSumAggregatorFactory("m1", "m1"),
+ new VarianceAggregatorFactory("var1", "m1", null, null)
+ )
+ .withRollup(false)
+ .build()
+ )
+ .rows(TestDataBuilder.ROWS1_WITH_NUMERIC_DIMS)
+ .buildMMappedIndex();
+
+ return SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate).add(
+ DataSegment.builder()
+ .dataSource(CalciteTests.DATASOURCE3)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ );
+ }
}
public void addToHolder(VarianceAggregatorCollector holder, Object raw)
diff --git a/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java b/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java
index 9df6d0f..2af3a29 100644
--- a/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java
+++ b/extensions-core/testing-tools/src/test/java/org/apache/druid/query/sql/SleepSqlTest.java
@@ -26,19 +26,32 @@
import org.apache.druid.query.Druids;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.scan.ScanQuery.ResultFormat;
+import org.apache.druid.query.sql.SleepSqlTest.SleepComponentSupplier;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.filtration.Filtration;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.junit.jupiter.api.Test;
+@SqlTestFrameWorkModule(SleepComponentSupplier.class)
public class SleepSqlTest extends BaseCalciteQueryTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ public static class SleepComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new SleepModule());
+ public SleepComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
+
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new SleepModule());
+ }
}
@Test
diff --git a/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java b/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java
index 5a18758..b75616c 100644
--- a/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java
+++ b/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java
@@ -721,32 +721,33 @@
(QueryToolChest) toolChest
);
- QueryRunner<ResultRow> queryRunnerForSegments = new FinalizeResultsQueryRunner<>(
- toolChest.mergeResults(
- (queryPlus, responseContext) -> Sequences
- .simple(
- ImmutableList.of(
- Sequences.map(
- segment1Runner
- .run(GroupByQueryRunnerTestHelper.populateResourceId(queryPlus), responseContext),
- toolChest.makePreComputeManipulatorFn(
- (GroupByQuery) queryPlus.getQuery(),
- MetricManipulatorFns.deserializing()
- )
- ),
- Sequences.map(
- segment2Runner
- .run(GroupByQueryRunnerTestHelper.populateResourceId(queryPlus), responseContext),
- toolChest.makePreComputeManipulatorFn(
- (GroupByQuery) queryPlus.getQuery(),
- MetricManipulatorFns.deserializing()
- )
+ QueryRunner<ResultRow> baseRunner = toolChest.mergeResults(
+ (queryPlus, responseContext) -> Sequences
+ .simple(
+ ImmutableList.of(
+ Sequences.map(
+ segment1Runner
+ .run(GroupByQueryRunnerTestHelper.populateResourceId(queryPlus), responseContext),
+ toolChest.makePreComputeManipulatorFn(
+ (GroupByQuery) queryPlus.getQuery(),
+ MetricManipulatorFns.deserializing()
+ )
+ ),
+ Sequences.map(
+ segment2Runner
+ .run(GroupByQueryRunnerTestHelper.populateResourceId(queryPlus), responseContext),
+ toolChest.makePreComputeManipulatorFn(
+ (GroupByQuery) queryPlus.getQuery(),
+ MetricManipulatorFns.deserializing()
)
)
)
- .flatMerge(Function.identity(), queryPlus.getQuery().getResultOrdering()),
- true
- ),
+ )
+ .flatMerge(Function.identity(), queryPlus.getQuery().getResultOrdering()),
+ true
+ );
+ QueryRunner<ResultRow> queryRunnerForSegments = new FinalizeResultsQueryRunner<>(
+ baseRunner,
(QueryToolChest) toolChest
);
GroupingEngine groupingEngine = ((GroupByQueryRunnerFactory) groupByFactory).getGroupingEngine();
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
index 854ac22..03db1cf 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java
@@ -27,21 +27,18 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
-import com.google.inject.Injector;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.error.DruidException;
import org.apache.druid.error.DruidException.Category;
import org.apache.druid.error.DruidException.Persona;
import org.apache.druid.error.DruidExceptionMatcher;
-import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.hll.VersionOneHyperLogLogCollector;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
-import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.Evals;
import org.apache.druid.query.DataSource;
@@ -50,7 +47,6 @@
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryDataSource;
-import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.UnionDataSource;
import org.apache.druid.query.aggregation.AggregatorFactory;
@@ -73,7 +69,6 @@
import org.apache.druid.query.filter.TypedInFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.having.DimFilterHavingSpec;
-import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
import org.apache.druid.query.ordering.StringComparator;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.scan.ScanQuery;
@@ -85,10 +80,7 @@
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.join.JoinType;
-import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
-import org.apache.druid.server.QueryLifecycleFactory;
-import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.ForbiddenException;
@@ -99,21 +91,13 @@
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerContext;
-import org.apache.druid.sql.calcite.planner.PlannerFactory;
-import org.apache.druid.sql.calcite.rule.ExtensionCalciteRuleProvider;
import org.apache.druid.sql.calcite.run.EngineFeature;
-import org.apache.druid.sql.calcite.run.SqlEngine;
-import org.apache.druid.sql.calcite.schema.DruidSchemaManager;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
-import org.apache.druid.sql.calcite.util.SqlTestFramework.Builder;
-import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier;
import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerFixture;
-import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
-import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardPlannerComponentSupplier;
-import org.apache.druid.sql.calcite.view.ViewManager;
import org.apache.druid.sql.http.SqlParameter;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matcher;
@@ -139,7 +123,6 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
-import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -154,8 +137,8 @@
* A base class for SQL query testing. It sets up query execution environment, provides useful helper methods,
* and populates data using {@link CalciteTests#createMockWalker}.
*/
+@SqlTestFrameWorkModule(StandardComponentSupplier.class)
public class BaseCalciteQueryTest extends CalciteTestBase
- implements QueryComponentSupplier, PlannerComponentSupplier
{
public static final double ASSERTION_EPSILON = 1e-5;
public static String NULL_STRING;
@@ -295,26 +278,11 @@
public static final Map<String, Object> OUTER_LIMIT_CONTEXT = new HashMap<>(QUERY_CONTEXT_DEFAULT);
- @Nullable
- public final SqlEngine engine0;
final boolean useDefault = NullHandling.replaceWithDefault();
public boolean cannotVectorize = false;
public boolean skipVectorize = false;
- private QueryComponentSupplier baseComponentSupplier;
- public PlannerComponentSupplier basePlannerComponentSupplier = new StandardPlannerComponentSupplier();
-
- public BaseCalciteQueryTest()
- {
- this(null);
- }
-
- public BaseCalciteQueryTest(@Nullable final SqlEngine engine)
- {
- this.engine0 = engine;
- }
-
static {
TIMESERIES_CONTEXT_LOS_ANGELES.put(QueryContexts.CTX_SQL_QUERY_ID, DUMMY_SQL_ID);
TIMESERIES_CONTEXT_LOS_ANGELES.put(PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z");
@@ -663,97 +631,6 @@
return queryFrameworkRule.get();
}
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- return baseComponentSupplier.createQuerySegmentWalker(conglomerate, joinableFactory, injector);
- }
-
- @Override
- public SqlEngine createEngine(
- final QueryLifecycleFactory qlf,
- final ObjectMapper queryJsonMapper,
- Injector injector
- )
- {
- if (engine0 == null) {
- return baseComponentSupplier.createEngine(qlf, queryJsonMapper, injector);
- } else {
- return engine0;
- }
- }
-
- @Override
- public void gatherProperties(Properties properties)
- {
- baseComponentSupplier = new StandardComponentSupplier(newTempFolder());
- baseComponentSupplier.gatherProperties(properties);
- }
-
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
- {
- baseComponentSupplier.configureGuice(builder);
- }
-
- @Override
- public QueryRunnerFactoryConglomerate createCongolmerate(Builder builder, Closer closer)
- {
- return baseComponentSupplier.createCongolmerate(builder, closer);
- }
-
- @Override
- public void configureJsonMapper(ObjectMapper mapper)
- {
- baseComponentSupplier.configureJsonMapper(mapper);
- }
-
- @Override
- public JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider)
- {
- return baseComponentSupplier.createJoinableFactoryWrapper(lookupProvider);
- }
-
- @Override
- public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
- {
- baseComponentSupplier.finalizeTestFramework(sqlTestFramework);
- }
-
- @Override
- public Set<ExtensionCalciteRuleProvider> extensionCalciteRules()
- {
- return basePlannerComponentSupplier.extensionCalciteRules();
- }
-
- @Override
- public ViewManager createViewManager()
- {
- return basePlannerComponentSupplier.createViewManager();
- }
-
- @Override
- public void populateViews(ViewManager viewManager, PlannerFactory plannerFactory)
- {
- basePlannerComponentSupplier.populateViews(viewManager, plannerFactory);
- }
-
- @Override
- public DruidSchemaManager createSchemaManager()
- {
- return basePlannerComponentSupplier.createSchemaManager();
- }
-
- @Override
- public void finalizePlanner(PlannerFixture plannerFixture)
- {
- basePlannerComponentSupplier.finalizePlanner(plannerFixture);
- }
-
public void assumeFeatureAvailable(EngineFeature feature)
{
boolean featureAvailable = queryFramework().engine().featureAvailable(feature);
@@ -1020,7 +897,7 @@
@Override
public PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
{
- return queryFramework().plannerFixture(BaseCalciteQueryTest.this, plannerConfig, authConfig);
+ return queryFramework().plannerFixture(plannerConfig, authConfig);
}
@Override
@@ -1322,7 +1199,7 @@
AuthConfig authConfig
)
{
- return queryFramework().plannerFixture(this, plannerConfig, authConfig).statementFactory();
+ return queryFramework().plannerFixture(plannerConfig, authConfig).statementFactory();
}
protected void cannotVectorize()
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java
index 949e6b9..b90721a 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteArraysQueryTest.java
@@ -69,8 +69,11 @@
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.join.JoinType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
+import org.apache.druid.sql.calcite.CalciteArraysQueryTest.ArraysComponentSupplier;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.http.SqlParameter;
import org.junit.Assert;
import org.junit.jupiter.api.Test;
@@ -83,6 +86,7 @@
/**
* Tests for array functions and array types
*/
+@SqlTestFramework.SqlTestFrameWorkModule(ArraysComponentSupplier.class)
public class CalciteArraysQueryTest extends BaseCalciteQueryTest
{
private static final Map<String, Object> QUERY_CONTEXT_UNNEST =
@@ -115,11 +119,19 @@
}
}
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class ArraysComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new NestedDataModule());
+ public ArraysComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
+
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new NestedDataModule());
+ }
}
// test some query stuffs, sort of limited since no native array column types so either need to use constructor or
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCatalogIngestionDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCatalogIngestionDmlTest.java
index 4715096..d05e618 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCatalogIngestionDmlTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteCatalogIngestionDmlTest.java
@@ -42,6 +42,7 @@
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.sql.calcite.CalciteCatalogIngestionDmlTest.CatalogIngestionDmlComponentSupplier;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.external.Externals;
import org.apache.druid.sql.calcite.filtration.Filtration;
@@ -49,8 +50,10 @@
import org.apache.druid.sql.calcite.table.DatasourceTable;
import org.apache.druid.sql.calcite.table.DruidTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
import org.junit.jupiter.api.Test;
+@SqlTestFrameWorkModule(CatalogIngestionDmlComponentSupplier.class)
public abstract class CalciteCatalogIngestionDmlTest extends CalciteIngestionDmlTest
{
private final String operationName;
@@ -65,184 +68,192 @@
public abstract String getOperationName();
public abstract String getDmlPrefixPattern();
- private static final ObjectMapper MAPPER = new DefaultObjectMapper();
- public static ImmutableMap<String, DatasourceTable> RESOLVED_TABLES = ImmutableMap.of(
- "hourDs", new DatasourceTable(
- RowSignature.builder().addTimeColumn().build(),
- new DatasourceTable.PhysicalDatasourceMetadata(
- new TableDataSource("hourDs"),
- RowSignature.builder().addTimeColumn().build(),
- false,
- false
- ),
- new DatasourceTable.EffectiveMetadata(
- new DatasourceFacade(new ResolvedTable(
- new TableDefn(
- "foo",
- DatasourceDefn.TABLE_TYPE,
- null,
- null
- ),
- new TableSpec(
- DatasourceDefn.TABLE_TYPE,
- ImmutableMap.of(DatasourceDefn.SEGMENT_GRANULARITY_PROPERTY, "PT1H"),
- ImmutableList.of(
- new ColumnSpec("__time", Columns.TIME_COLUMN, null)
- )
- ),
- MAPPER
- )),
- DatasourceTable.EffectiveMetadata.toEffectiveColumns(
- RowSignature.builder()
- .addTimeColumn()
- .build()),
- false
- )
- ),
- "noPartitonedBy", new DatasourceTable(
- RowSignature.builder().addTimeColumn().build(),
- new DatasourceTable.PhysicalDatasourceMetadata(
- new TableDataSource("hourDs"),
- RowSignature.builder().addTimeColumn().build(),
- false,
- false
- ),
- new DatasourceTable.EffectiveMetadata(
- new DatasourceFacade(new ResolvedTable(
- new TableDefn(
- "foo",
- DatasourceDefn.TABLE_TYPE,
- null,
- null
- ),
- new TableSpec(
- DatasourceDefn.TABLE_TYPE,
- ImmutableMap.of(),
- ImmutableList.of(
- new ColumnSpec("__time", Columns.TIME_COLUMN, null)
- )
- ),
- MAPPER
- )),
- DatasourceTable.EffectiveMetadata.toEffectiveColumns(
- RowSignature.builder()
- .addTimeColumn()
- .build()),
- false
- )
- ),
- "strictTableWithNoDefinedSchema", new DatasourceTable(
- RowSignature.builder().build(),
- new DatasourceTable.PhysicalDatasourceMetadata(
- new TableDataSource("strictTableWithNoDefinedSchema"),
- RowSignature.builder().build(),
- false,
- false
- ),
- new DatasourceTable.EffectiveMetadata(
- new DatasourceFacade(new ResolvedTable(
- new TableDefn(
- "strictTableWithNoDefinedSchema",
- DatasourceDefn.TABLE_TYPE,
- null,
- null
- ),
- new TableSpec(DatasourceDefn.TABLE_TYPE, ImmutableMap.of(DatasourceDefn.SEALED_PROPERTY, true), null),
- MAPPER
- )),
- DatasourceTable.EffectiveMetadata.toEffectiveColumns(RowSignature.builder().build()),
- false
- )
- ),
- "foo", new DatasourceTable(
- FOO_TABLE_SIGNATURE,
- new DatasourceTable.PhysicalDatasourceMetadata(
- new TableDataSource("foo"),
- FOO_TABLE_SIGNATURE,
- false,
- false
- ),
- new DatasourceTable.EffectiveMetadata(
- new DatasourceFacade(new ResolvedTable(
- new TableDefn(
- "foo",
- DatasourceDefn.TABLE_TYPE,
- null,
- null
- ),
- new TableSpec(
- DatasourceDefn.TABLE_TYPE,
- ImmutableMap.of(),
- ImmutableList.of(
- new ColumnSpec("__time", Columns.TIME_COLUMN, null),
- new ColumnSpec("dim1", Columns.STRING, null),
- new ColumnSpec("dim2", Columns.STRING, null),
- new ColumnSpec("dim3", Columns.STRING, null),
- new ColumnSpec("cnt", Columns.LONG, null),
- new ColumnSpec("m1", Columns.FLOAT, null),
- new ColumnSpec("m2", Columns.DOUBLE, null),
- new ColumnSpec("unique_dim1", HyperUniquesAggregatorFactory.TYPE.asTypeString(), null)
- )
- ),
- MAPPER
- )),
- DatasourceTable.EffectiveMetadata.toEffectiveColumns(FOO_TABLE_SIGNATURE),
- false
- )
- ),
- "fooSealed", new DatasourceTable(
- FOO_TABLE_SIGNATURE,
- new DatasourceTable.PhysicalDatasourceMetadata(
- new TableDataSource("foo"),
- FOO_TABLE_SIGNATURE,
- false,
- false
- ),
- new DatasourceTable.EffectiveMetadata(
- new DatasourceFacade(new ResolvedTable(
- new TableDefn(
- "foo",
- DatasourceDefn.TABLE_TYPE,
- null,
- null
- ),
- new TableSpec(
- DatasourceDefn.TABLE_TYPE,
- ImmutableMap.of(DatasourceDefn.SEALED_PROPERTY, true),
- ImmutableList.of(
- new ColumnSpec("__time", Columns.TIME_COLUMN, null),
- new ColumnSpec("dim1", Columns.STRING, null),
- new ColumnSpec("dim2", Columns.STRING, null),
- new ColumnSpec("dim3", Columns.STRING, null),
- new ColumnSpec("cnt", Columns.LONG, null),
- new ColumnSpec("m1", Columns.FLOAT, null),
- new ColumnSpec("m2", Columns.DOUBLE, null)
- )
- ),
- MAPPER
- )),
- DatasourceTable.EffectiveMetadata.toEffectiveColumns(FOO_TABLE_SIGNATURE),
- false
- )
- )
- );
-
- @Override
- public CatalogResolver createCatalogResolver()
+ public static class CatalogIngestionDmlComponentSupplier extends IngestionDmlComponentSupplier
{
- return new CatalogResolver.NullCatalogResolver() {
- @Override
- public DruidTable resolveDatasource(
- final String tableName,
- final DatasourceTable.PhysicalDatasourceMetadata dsMetadata
- )
- {
- if (RESOLVED_TABLES.get(tableName) != null) {
- return RESOLVED_TABLES.get(tableName);
+ public CatalogIngestionDmlComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
+
+ private static final ObjectMapper MAPPER = new DefaultObjectMapper();
+ public static ImmutableMap<String, DatasourceTable> RESOLVED_TABLES = ImmutableMap.of(
+ "hourDs", new DatasourceTable(
+ RowSignature.builder().addTimeColumn().build(),
+ new DatasourceTable.PhysicalDatasourceMetadata(
+ new TableDataSource("hourDs"),
+ RowSignature.builder().addTimeColumn().build(),
+ false,
+ false
+ ),
+ new DatasourceTable.EffectiveMetadata(
+ new DatasourceFacade(new ResolvedTable(
+ new TableDefn(
+ "foo",
+ DatasourceDefn.TABLE_TYPE,
+ null,
+ null
+ ),
+ new TableSpec(
+ DatasourceDefn.TABLE_TYPE,
+ ImmutableMap.of(DatasourceDefn.SEGMENT_GRANULARITY_PROPERTY, "PT1H"),
+ ImmutableList.of(
+ new ColumnSpec("__time", Columns.TIME_COLUMN, null)
+ )
+ ),
+ MAPPER
+ )),
+ DatasourceTable.EffectiveMetadata.toEffectiveColumns(
+ RowSignature.builder()
+ .addTimeColumn()
+ .build()),
+ false
+ )
+ ),
+ "noPartitonedBy", new DatasourceTable(
+ RowSignature.builder().addTimeColumn().build(),
+ new DatasourceTable.PhysicalDatasourceMetadata(
+ new TableDataSource("hourDs"),
+ RowSignature.builder().addTimeColumn().build(),
+ false,
+ false
+ ),
+ new DatasourceTable.EffectiveMetadata(
+ new DatasourceFacade(new ResolvedTable(
+ new TableDefn(
+ "foo",
+ DatasourceDefn.TABLE_TYPE,
+ null,
+ null
+ ),
+ new TableSpec(
+ DatasourceDefn.TABLE_TYPE,
+ ImmutableMap.of(),
+ ImmutableList.of(
+ new ColumnSpec("__time", Columns.TIME_COLUMN, null)
+ )
+ ),
+ MAPPER
+ )),
+ DatasourceTable.EffectiveMetadata.toEffectiveColumns(
+ RowSignature.builder()
+ .addTimeColumn()
+ .build()),
+ false
+ )
+ ),
+ "strictTableWithNoDefinedSchema", new DatasourceTable(
+ RowSignature.builder().build(),
+ new DatasourceTable.PhysicalDatasourceMetadata(
+ new TableDataSource("strictTableWithNoDefinedSchema"),
+ RowSignature.builder().build(),
+ false,
+ false
+ ),
+ new DatasourceTable.EffectiveMetadata(
+ new DatasourceFacade(new ResolvedTable(
+ new TableDefn(
+ "strictTableWithNoDefinedSchema",
+ DatasourceDefn.TABLE_TYPE,
+ null,
+ null
+ ),
+ new TableSpec(DatasourceDefn.TABLE_TYPE, ImmutableMap.of(DatasourceDefn.SEALED_PROPERTY, true), null),
+ MAPPER
+ )),
+ DatasourceTable.EffectiveMetadata.toEffectiveColumns(RowSignature.builder().build()),
+ false
+ )
+ ),
+ "foo", new DatasourceTable(
+ FOO_TABLE_SIGNATURE,
+ new DatasourceTable.PhysicalDatasourceMetadata(
+ new TableDataSource("foo"),
+ FOO_TABLE_SIGNATURE,
+ false,
+ false
+ ),
+ new DatasourceTable.EffectiveMetadata(
+ new DatasourceFacade(new ResolvedTable(
+ new TableDefn(
+ "foo",
+ DatasourceDefn.TABLE_TYPE,
+ null,
+ null
+ ),
+ new TableSpec(
+ DatasourceDefn.TABLE_TYPE,
+ ImmutableMap.of(),
+ ImmutableList.of(
+ new ColumnSpec("__time", Columns.TIME_COLUMN, null),
+ new ColumnSpec("dim1", Columns.STRING, null),
+ new ColumnSpec("dim2", Columns.STRING, null),
+ new ColumnSpec("dim3", Columns.STRING, null),
+ new ColumnSpec("cnt", Columns.LONG, null),
+ new ColumnSpec("m1", Columns.FLOAT, null),
+ new ColumnSpec("m2", Columns.DOUBLE, null),
+ new ColumnSpec("unique_dim1", HyperUniquesAggregatorFactory.TYPE.asTypeString(), null)
+ )
+ ),
+ MAPPER
+ )),
+ DatasourceTable.EffectiveMetadata.toEffectiveColumns(FOO_TABLE_SIGNATURE),
+ false
+ )
+ ),
+ "fooSealed", new DatasourceTable(
+ FOO_TABLE_SIGNATURE,
+ new DatasourceTable.PhysicalDatasourceMetadata(
+ new TableDataSource("foo"),
+ FOO_TABLE_SIGNATURE,
+ false,
+ false
+ ),
+ new DatasourceTable.EffectiveMetadata(
+ new DatasourceFacade(new ResolvedTable(
+ new TableDefn(
+ "foo",
+ DatasourceDefn.TABLE_TYPE,
+ null,
+ null
+ ),
+ new TableSpec(
+ DatasourceDefn.TABLE_TYPE,
+ ImmutableMap.of(DatasourceDefn.SEALED_PROPERTY, true),
+ ImmutableList.of(
+ new ColumnSpec("__time", Columns.TIME_COLUMN, null),
+ new ColumnSpec("dim1", Columns.STRING, null),
+ new ColumnSpec("dim2", Columns.STRING, null),
+ new ColumnSpec("dim3", Columns.STRING, null),
+ new ColumnSpec("cnt", Columns.LONG, null),
+ new ColumnSpec("m1", Columns.FLOAT, null),
+ new ColumnSpec("m2", Columns.DOUBLE, null)
+ )
+ ),
+ MAPPER
+ )),
+ DatasourceTable.EffectiveMetadata.toEffectiveColumns(FOO_TABLE_SIGNATURE),
+ false
+ )
+ )
+ );
+
+ @Override
+ public CatalogResolver createCatalogResolver()
+ {
+ return new CatalogResolver.NullCatalogResolver() {
+ @Override
+ public DruidTable resolveDatasource(
+ final String tableName,
+ final DatasourceTable.PhysicalDatasourceMetadata dsMetadata
+ )
+ {
+ if (RESOLVED_TABLES.get(tableName) != null) {
+ return RESOLVED_TABLES.get(tableName);
+ }
+ return dsMetadata == null ? null : new DatasourceTable(dsMetadata);
}
- return dsMetadata == null ? null : new DatasourceTable(dsMetadata);
- }
- };
+ };
+ }
}
/**
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExportTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExportTest.java
index c78aa53..d91da5c 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExportTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExportTest.java
@@ -34,8 +34,10 @@
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.server.security.ForbiddenException;
+import org.apache.druid.sql.calcite.CalciteExportTest.ExportComponentSupplier;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.apache.druid.sql.destination.ExportDestination;
import org.apache.druid.sql.http.SqlParameter;
import org.apache.druid.storage.StorageConfig;
@@ -51,48 +53,57 @@
import java.util.Collections;
import java.util.List;
+@SqlTestFramework.SqlTestFrameWorkModule(ExportComponentSupplier.class)
public class CalciteExportTest extends CalciteIngestionDmlTest
{
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class ExportComponentSupplier extends IngestionDmlComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(
- new DruidModule()
- {
- @Override
- public void configure(Binder binder)
- {
- }
-
- @Override
- public List<? extends Module> getJacksonModules()
- {
- return ImmutableList.of(
- new SimpleModule(StorageConnectorProvider.class.getSimpleName()).registerSubtypes(
- new NamedType(LocalFileExportStorageProvider.class, CalciteTests.FORBIDDEN_DESTINATION)
- )
- );
- }
- });
- builder.addModule(new DruidModule()
+ public ExportComponentSupplier(TempDirProducer tempFolderProducer)
{
- @Override
- public List<? extends Module> getJacksonModules()
- {
- return ImmutableList.of(
- new SimpleModule(StorageConnector.class.getSimpleName())
- .registerSubtypes(LocalFileStorageConnectorProvider.class)
- .registerSubtypes(LocalFileExportStorageProvider.class)
- );
- }
+ super(tempFolderProducer);
+ }
- @Override
- public void configure(Binder binder)
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(
+ new DruidModule()
+ {
+ @Override
+ public void configure(Binder binder)
+ {
+ }
+
+ @Override
+ public List<? extends Module> getJacksonModules()
+ {
+ return ImmutableList.of(
+ new SimpleModule(StorageConnectorProvider.class.getSimpleName()).registerSubtypes(
+ new NamedType(LocalFileExportStorageProvider.class, CalciteTests.FORBIDDEN_DESTINATION)
+ )
+ );
+ }
+ });
+ builder.addModule(new DruidModule()
{
- binder.bind(StorageConfig.class).toInstance(new StorageConfig("/tmp/export"));
- }
- });
+ @Override
+ public List<? extends Module> getJacksonModules()
+ {
+ return ImmutableList.of(
+ new SimpleModule(StorageConnector.class.getSimpleName())
+ .registerSubtypes(LocalFileStorageConnectorProvider.class)
+ .registerSubtypes(LocalFileExportStorageProvider.class)
+ );
+ }
+
+ @Override
+ public void configure(Binder binder)
+ {
+ binder.bind(StorageConfig.class).toInstance(new StorageConfig("/tmp/export"));
+ }
+ });
+ }
}
// Disabled until replace supports external destinations. To be enabled after that point.
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java
index 29c375b..f24e203 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java
@@ -30,6 +30,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Binder;
+import com.google.inject.Injector;
import org.apache.druid.data.input.AbstractInputSource;
import org.apache.druid.data.input.InputFormat;
import org.apache.druid.data.input.InputSplit;
@@ -48,10 +49,12 @@
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.server.QueryLifecycleFactory;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.SqlQueryPlus;
+import org.apache.druid.sql.calcite.CalciteIngestionDmlTest.IngestionDmlComponentSupplier;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.external.ExternalOperatorConversion;
import org.apache.druid.sql.calcite.external.HttpOperatorConversion;
@@ -60,7 +63,10 @@
import org.apache.druid.sql.calcite.parser.DruidSqlInsert;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.SqlTestFrameWorkModule;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.guice.SqlBindings;
import org.apache.druid.sql.http.SqlParameter;
import org.hamcrest.CoreMatchers;
@@ -71,6 +77,7 @@
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
+
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
@@ -83,6 +90,7 @@
import static org.hamcrest.MatcherAssert.assertThat;
+@SqlTestFrameWorkModule(IngestionDmlComponentSupplier.class)
public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
{
protected static final Map<String, Object> DEFAULT_CONTEXT =
@@ -120,66 +128,75 @@
protected boolean didTest = false;
- public CalciteIngestionDmlTest()
+ static class IngestionDmlComponentSupplier extends StandardComponentSupplier
{
- super(IngestionTestSqlEngine.INSTANCE);
- }
+ public IngestionDmlComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
- {
- super.configureGuice(builder);
+ @Override
+ public SqlEngine createEngine(QueryLifecycleFactory qlf, ObjectMapper queryJsonMapper, Injector injector)
+ {
+ return IngestionTestSqlEngine.INSTANCE;
+ }
- builder.addModule(new DruidModule() {
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
- // Clone of MSQExternalDataSourceModule since it is not
- // visible here.
- @Override
- public List<? extends Module> getJacksonModules()
- {
- return Collections.singletonList(
- new SimpleModule(getClass().getSimpleName())
- .registerSubtypes(ExternalDataSource.class)
- );
- }
+ builder.addModule(new DruidModule() {
- @Override
- public void configure(Binder binder)
- {
- // Nothing to do.
- }
- });
+ // Clone of MSQExternalDataSourceModule since it is not
+ // visible here.
+ @Override
+ public List<? extends Module> getJacksonModules()
+ {
+ return Collections.singletonList(
+ new SimpleModule(getClass().getSimpleName())
+ .registerSubtypes(ExternalDataSource.class)
+ );
+ }
- builder.addModule(new DruidModule() {
+ @Override
+ public void configure(Binder binder)
+ {
+ // Nothing to do.
+ }
+ });
- // Partial clone of MsqSqlModule, since that module is not
- // visible to this one.
+ builder.addModule(new DruidModule() {
- @Override
- public List<? extends Module> getJacksonModules()
- {
- // We want this module to bring input sources along for the ride.
- List<Module> modules = new ArrayList<>(new InputSourceModule().getJacksonModules());
- modules.add(new SimpleModule("test-module").registerSubtypes(TestFileInputSource.class));
- return modules;
- }
+ // Partial clone of MsqSqlModule, since that module is not
+ // visible to this one.
- @Override
- public void configure(Binder binder)
- {
- // We want this module to bring InputSourceModule along for the ride.
- binder.install(new InputSourceModule());
+ @Override
+ public List<? extends Module> getJacksonModules()
+ {
+ // We want this module to bring input sources along for the ride.
+ List<Module> modules = new ArrayList<>(new InputSourceModule().getJacksonModules());
+ modules.add(new SimpleModule("test-module").registerSubtypes(TestFileInputSource.class));
+ return modules;
+ }
- // Set up the EXTERN macro.
- SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
+ @Override
+ public void configure(Binder binder)
+ {
+ // We want this module to bring InputSourceModule along for the ride.
+ binder.install(new InputSourceModule());
- // Enable the extended table functions for testing even though these
- // are not enabled in production in Druid 26.
- SqlBindings.addOperatorConversion(binder, HttpOperatorConversion.class);
- SqlBindings.addOperatorConversion(binder, InlineOperatorConversion.class);
- SqlBindings.addOperatorConversion(binder, LocalOperatorConversion.class);
- }
- });
+ // Set up the EXTERN macro.
+ SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
+
+ // Enable the extended table functions for testing even though these
+ // are not enabled in production in Druid 26.
+ SqlBindings.addOperatorConversion(binder, HttpOperatorConversion.class);
+ SqlBindings.addOperatorConversion(binder, InlineOperatorConversion.class);
+ SqlBindings.addOperatorConversion(binder, LocalOperatorConversion.class);
+ }
+ });
+ }
}
@AfterEach
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java
index dceddba..e93a602 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteNestedDataQueryTest.java
@@ -71,7 +71,10 @@
import org.apache.druid.segment.virtual.NestedFieldVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
+import org.apache.druid.sql.calcite.CalciteNestedDataQueryTest.NestedComponentSupplier;
import org.apache.druid.sql.calcite.filtration.Filtration;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
@@ -84,6 +87,7 @@
import java.util.List;
import java.util.stream.Collectors;
+@SqlTestFramework.SqlTestFrameWorkModule(NestedComponentSupplier.class)
public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
{
private static final String DATA_SOURCE = "nested";
@@ -177,219 +181,227 @@
private static final List<InputRow> ROWS_MIX =
RAW_ROWS.stream().map(raw -> TestDataBuilder.createRow(raw, JSON_AND_SCALAR_MIX)).collect(Collectors.toList());
- @Override
- public void configureGuice(DruidInjectorBuilder builder)
+ protected static class NestedComponentSupplier extends StandardComponentSupplier
{
- super.configureGuice(builder);
- builder.addModule(new NestedDataModule());
- }
+ public NestedComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- @SuppressWarnings("resource")
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- final QueryRunnerFactoryConglomerate conglomerate,
- final JoinableFactoryWrapper joinableFactory,
- final Injector injector
- )
- {
- NestedDataModule.registerHandlersAndSerde();
- final QueryableIndex index =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt")
- )
- .withDimensionsSpec(ALL_JSON_COLUMNS.getDimensionsSpec())
- .withRollup(false)
- .build()
- )
- .rows(ROWS)
- .buildMMappedIndex();
+ @Override
+ public void configureGuice(DruidInjectorBuilder builder)
+ {
+ super.configureGuice(builder);
+ builder.addModule(new NestedDataModule());
+ }
- final QueryableIndex indexMix11 =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt")
- )
- .withDimensionsSpec(ALL_JSON_COLUMNS.getDimensionsSpec())
- .withRollup(false)
- .build()
- )
- .rows(ROWS)
- .buildMMappedIndex();
+ @SuppressWarnings("resource")
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ final QueryRunnerFactoryConglomerate conglomerate,
+ final JoinableFactoryWrapper joinableFactory,
+ final Injector injector
+ )
+ {
+ NestedDataModule.registerHandlersAndSerde();
+ final QueryableIndex index =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt")
+ )
+ .withDimensionsSpec(ALL_JSON_COLUMNS.getDimensionsSpec())
+ .withRollup(false)
+ .build()
+ )
+ .rows(ROWS)
+ .buildMMappedIndex();
+
+ final QueryableIndex indexMix11 =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt")
+ )
+ .withDimensionsSpec(ALL_JSON_COLUMNS.getDimensionsSpec())
+ .withRollup(false)
+ .build()
+ )
+ .rows(ROWS)
+ .buildMMappedIndex();
- final QueryableIndex indexMix12 =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt")
- )
- .withDimensionsSpec(JSON_AND_SCALAR_MIX.getDimensionsSpec())
- .withRollup(false)
- .build()
- )
- .rows(ROWS_MIX)
- .buildMMappedIndex();
+ final QueryableIndex indexMix12 =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt")
+ )
+ .withDimensionsSpec(JSON_AND_SCALAR_MIX.getDimensionsSpec())
+ .withRollup(false)
+ .build()
+ )
+ .rows(ROWS_MIX)
+ .buildMMappedIndex();
- final QueryableIndex indexMix21 =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt")
- )
- .withDimensionsSpec(JSON_AND_SCALAR_MIX.getDimensionsSpec())
- .withRollup(false)
- .build()
- )
- .rows(ROWS_MIX)
- .buildMMappedIndex();
+ final QueryableIndex indexMix21 =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt")
+ )
+ .withDimensionsSpec(JSON_AND_SCALAR_MIX.getDimensionsSpec())
+ .withRollup(false)
+ .build()
+ )
+ .rows(ROWS_MIX)
+ .buildMMappedIndex();
- final QueryableIndex indexMix22 =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withMetrics(
- new CountAggregatorFactory("cnt")
- )
- .withDimensionsSpec(ALL_JSON_COLUMNS.getDimensionsSpec())
- .withRollup(false)
- .build()
- )
- .rows(ROWS)
- .buildMMappedIndex();
+ final QueryableIndex indexMix22 =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withMetrics(
+ new CountAggregatorFactory("cnt")
+ )
+ .withDimensionsSpec(ALL_JSON_COLUMNS.getDimensionsSpec())
+ .withRollup(false)
+ .build()
+ )
+ .rows(ROWS)
+ .buildMMappedIndex();
- final QueryableIndex indexArrays =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withTimestampSpec(NestedDataTestUtils.AUTO_SCHEMA.getTimestampSpec())
- .withDimensionsSpec(NestedDataTestUtils.AUTO_SCHEMA.getDimensionsSpec())
- .withMetrics(
- new CountAggregatorFactory("cnt")
- )
- .withRollup(false)
- .build()
- )
- .inputSource(
- ResourceInputSource.of(
- NestedDataTestUtils.class.getClassLoader(),
- NestedDataTestUtils.ARRAY_TYPES_DATA_FILE
- )
- )
- .inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
- .inputTmpDir(newTempFolder())
- .buildMMappedIndex();
+ final QueryableIndex indexArrays =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withTimestampSpec(NestedDataTestUtils.AUTO_SCHEMA.getTimestampSpec())
+ .withDimensionsSpec(NestedDataTestUtils.AUTO_SCHEMA.getDimensionsSpec())
+ .withMetrics(
+ new CountAggregatorFactory("cnt")
+ )
+ .withRollup(false)
+ .build()
+ )
+ .inputSource(
+ ResourceInputSource.of(
+ NestedDataTestUtils.class.getClassLoader(),
+ NestedDataTestUtils.ARRAY_TYPES_DATA_FILE
+ )
+ )
+ .inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
+ .inputTmpDir(tempDirProducer.newTempFolder())
+ .buildMMappedIndex();
- final QueryableIndex indexAllTypesAuto =
- IndexBuilder.create()
- .tmpDir(newTempFolder())
- .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(
- new IncrementalIndexSchema.Builder()
- .withTimestampSpec(NestedDataTestUtils.AUTO_SCHEMA.getTimestampSpec())
- .withDimensionsSpec(NestedDataTestUtils.AUTO_SCHEMA.getDimensionsSpec())
- .withMetrics(
- new CountAggregatorFactory("cnt")
- )
- .withRollup(false)
- .build()
- )
- .inputSource(
- ResourceInputSource.of(
- NestedDataTestUtils.class.getClassLoader(),
- NestedDataTestUtils.ALL_TYPES_TEST_DATA_FILE
- )
- )
- .inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
- .inputTmpDir(newTempFolder())
- .buildMMappedIndex();
+ final QueryableIndex indexAllTypesAuto =
+ IndexBuilder.create()
+ .tmpDir(tempDirProducer.newTempFolder())
+ .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(
+ new IncrementalIndexSchema.Builder()
+ .withTimestampSpec(NestedDataTestUtils.AUTO_SCHEMA.getTimestampSpec())
+ .withDimensionsSpec(NestedDataTestUtils.AUTO_SCHEMA.getDimensionsSpec())
+ .withMetrics(
+ new CountAggregatorFactory("cnt")
+ )
+ .withRollup(false)
+ .build()
+ )
+ .inputSource(
+ ResourceInputSource.of(
+ NestedDataTestUtils.class.getClassLoader(),
+ NestedDataTestUtils.ALL_TYPES_TEST_DATA_FILE
+ )
+ )
+ .inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
+ .inputTmpDir(tempDirProducer.newTempFolder())
+ .buildMMappedIndex();
- SpecificSegmentsQuerySegmentWalker walker = SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate);
- walker.add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- index
- ).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE_MIXED)
- .interval(indexMix11.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- indexMix11
- ).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE_MIXED)
- .interval(indexMix12.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(1))
- .size(0)
- .build(),
- indexMix12
- ).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE_MIXED_2)
- .interval(indexMix21.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(0))
- .size(0)
- .build(),
- indexMix21
- ).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE_MIXED_2)
- .interval(index.getDataInterval())
- .version("1")
- .shardSpec(new LinearShardSpec(1))
- .size(0)
- .build(),
- indexMix22
- ).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE_ARRAYS)
- .version("1")
- .interval(indexArrays.getDataInterval())
- .shardSpec(new LinearShardSpec(1))
- .size(0)
- .build(),
- indexArrays
- ).add(
- DataSegment.builder()
- .dataSource(DATA_SOURCE_ALL)
- .version("1")
- .interval(indexAllTypesAuto.getDataInterval())
- .shardSpec(new LinearShardSpec(1))
- .size(0)
- .build(),
- indexAllTypesAuto
- );
+ SpecificSegmentsQuerySegmentWalker walker = SpecificSegmentsQuerySegmentWalker.createWalker(injector, conglomerate);
+ walker.add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ index
+ ).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE_MIXED)
+ .interval(indexMix11.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ indexMix11
+ ).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE_MIXED)
+ .interval(indexMix12.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(1))
+ .size(0)
+ .build(),
+ indexMix12
+ ).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE_MIXED_2)
+ .interval(indexMix21.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(0))
+ .size(0)
+ .build(),
+ indexMix21
+ ).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE_MIXED_2)
+ .interval(index.getDataInterval())
+ .version("1")
+ .shardSpec(new LinearShardSpec(1))
+ .size(0)
+ .build(),
+ indexMix22
+ ).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE_ARRAYS)
+ .version("1")
+ .interval(indexArrays.getDataInterval())
+ .shardSpec(new LinearShardSpec(1))
+ .size(0)
+ .build(),
+ indexArrays
+ ).add(
+ DataSegment.builder()
+ .dataSource(DATA_SOURCE_ALL)
+ .version("1")
+ .interval(indexAllTypesAuto.getDataInterval())
+ .shardSpec(new LinearShardSpec(1))
+ .size(0)
+ .build(),
+ indexAllTypesAuto
+ );
- return walker;
+ return walker;
+ }
}
@Test
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
index 661785c..c1ea79b 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
@@ -122,6 +122,8 @@
import org.apache.druid.sql.calcite.rel.CannotBuildQueryException;
import org.apache.druid.sql.calcite.run.EngineFeature;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.hamcrest.CoreMatchers;
import org.joda.time.DateTime;
@@ -148,6 +150,7 @@
import static org.junit.jupiter.api.Assumptions.assumeFalse;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
+@SqlTestFramework.SqlTestFrameWorkModule(value = StandardComponentSupplier.class)
public class CalciteQueryTest extends BaseCalciteQueryTest
{
@Test
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteScanSignatureTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteScanSignatureTest.java
index 1d3a042..ed205b8 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteScanSignatureTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteScanSignatureTest.java
@@ -29,6 +29,7 @@
import org.apache.druid.query.scan.ScanQuery;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.server.QueryLifecycleFactory;
+import org.apache.druid.sql.calcite.CalciteScanSignatureTest.ScanSignatureComponentSupplier;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.rel.DruidQuery;
@@ -36,19 +37,17 @@
import org.apache.druid.sql.calcite.run.QueryMaker;
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.util.CalciteTests;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.destination.IngestDestination;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
+@SqlTestFramework.SqlTestFrameWorkModule(ScanSignatureComponentSupplier.class)
public class CalciteScanSignatureTest extends BaseCalciteQueryTest
{
- public CalciteScanSignatureTest()
- {
- super(null);
- }
-
@Test
public void testScanSignature()
{
@@ -111,67 +110,75 @@
);
}
- @Override
- public SqlEngine createEngine(
- QueryLifecycleFactory qlf,
- ObjectMapper queryJsonMapper,
- Injector injector
- )
+ static class ScanSignatureComponentSupplier extends StandardComponentSupplier
{
- // Create an engine that says yes to EngineFeature.SCAN_NEEDS_SIGNATURE.
- return new ScanSignatureTestSqlEngine(super.createEngine(qlf, queryJsonMapper, injector));
- }
-
- private static class ScanSignatureTestSqlEngine implements SqlEngine
- {
- private final SqlEngine parent;
-
- public ScanSignatureTestSqlEngine(final SqlEngine parent)
+ public ScanSignatureComponentSupplier(TempDirProducer tempFolderProducer)
{
- this.parent = parent;
+ super(tempFolderProducer);
}
@Override
- public String name()
+ public SqlEngine createEngine(
+ QueryLifecycleFactory qlf,
+ ObjectMapper queryJsonMapper,
+ Injector injector
+ )
{
- return getClass().getName();
+ // Create an engine that says yes to EngineFeature.SCAN_NEEDS_SIGNATURE.
+ return new ScanSignatureTestSqlEngine(super.createEngine(qlf, queryJsonMapper, injector));
}
- @Override
- public boolean featureAvailable(EngineFeature feature)
+ private static class ScanSignatureTestSqlEngine implements SqlEngine
{
- return feature == EngineFeature.SCAN_NEEDS_SIGNATURE || parent.featureAvailable(feature);
- }
+ private final SqlEngine parent;
- @Override
- public void validateContext(Map<String, Object> queryContext)
- {
- // No validation.
- }
+ public ScanSignatureTestSqlEngine(final SqlEngine parent)
+ {
+ this.parent = parent;
+ }
- @Override
- public RelDataType resultTypeForSelect(RelDataTypeFactory typeFactory, RelDataType validatedRowType)
- {
- return validatedRowType;
- }
+ @Override
+ public String name()
+ {
+ return getClass().getName();
+ }
- @Override
- public RelDataType resultTypeForInsert(RelDataTypeFactory typeFactory, RelDataType validatedRowType)
- {
- throw new UnsupportedOperationException();
- }
+ @Override
+ public boolean featureAvailable(EngineFeature feature)
+ {
+ return feature == EngineFeature.SCAN_NEEDS_SIGNATURE || parent.featureAvailable(feature);
+ }
- @Override
- public QueryMaker buildQueryMakerForSelect(RelRoot relRoot, PlannerContext plannerContext)
- throws ValidationException
- {
- return parent.buildQueryMakerForSelect(relRoot, plannerContext);
- }
+ @Override
+ public void validateContext(Map<String, Object> queryContext)
+ {
+ // No validation.
+ }
- @Override
- public QueryMaker buildQueryMakerForInsert(IngestDestination destination, RelRoot relRoot, PlannerContext plannerContext)
- {
- throw new UnsupportedOperationException();
+ @Override
+ public RelDataType resultTypeForSelect(RelDataTypeFactory typeFactory, RelDataType validatedRowType)
+ {
+ return validatedRowType;
+ }
+
+ @Override
+ public RelDataType resultTypeForInsert(RelDataTypeFactory typeFactory, RelDataType validatedRowType)
+ {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public QueryMaker buildQueryMakerForSelect(RelRoot relRoot, PlannerContext plannerContext)
+ throws ValidationException
+ {
+ return parent.buildQueryMakerForSelect(relRoot, plannerContext);
+ }
+
+ @Override
+ public QueryMaker buildQueryMakerForInsert(IngestDestination destination, RelRoot relRoot, PlannerContext plannerContext)
+ {
+ throw new UnsupportedOperationException();
+ }
}
}
}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteStrictInsertTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteStrictInsertTest.java
index 03045eb..6f2d50b 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteStrictInsertTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteStrictInsertTest.java
@@ -20,9 +20,11 @@
package org.apache.druid.sql.calcite;
import org.apache.druid.error.DruidException;
+import org.apache.druid.sql.calcite.CalciteStrictInsertTest.StrictInsertComponentSupplier;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.CatalogResolver;
import org.apache.druid.sql.calcite.planner.CatalogResolver.NullCatalogResolver;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.junit.jupiter.api.Test;
/**
@@ -30,18 +32,27 @@
* to only work with existing datasources. The strict option is a config option which
* we enable only for this one test.
*/
+@SqlTestFramework.SqlTestFrameWorkModule(StrictInsertComponentSupplier.class)
public class CalciteStrictInsertTest extends CalciteIngestionDmlTest
{
- @Override
- public CatalogResolver createCatalogResolver()
+ static class StrictInsertComponentSupplier extends IngestionDmlComponentSupplier
{
- return new NullCatalogResolver() {
- @Override
- public boolean ingestRequiresExistingTable()
- {
- return true;
- }
- };
+ public StrictInsertComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
+
+ @Override
+ public CatalogResolver createCatalogResolver()
+ {
+ return new NullCatalogResolver() {
+ @Override
+ public boolean ingestRequiresExistingTable()
+ {
+ return true;
+ }
+ };
+ }
}
@Test
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java
index 7c73b11..0a2d2f4 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteJoinQueryTest.java
@@ -26,7 +26,6 @@
import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
-import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
@@ -50,14 +49,13 @@
@Override
protected QueryTestBuilder testBuilder()
{
- PlannerComponentSupplier componentSupplier = this;
CalciteTestConfig testConfig = new CalciteTestConfig(CONTEXT_OVERRIDES)
{
@Override
public SqlTestFramework.PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
{
plannerConfig = plannerConfig.withOverrides(CONTEXT_OVERRIDES);
- return queryFramework().plannerFixture(componentSupplier, plannerConfig, authConfig);
+ return queryFramework().plannerFixture(plannerConfig, authConfig);
}
};
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java
index 1a0ae44..3b003b5 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteQueryTest.java
@@ -25,7 +25,6 @@
import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
-import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(NotYetSupportedProcessor.class)
@@ -41,14 +40,13 @@
@Override
protected QueryTestBuilder testBuilder()
{
- PlannerComponentSupplier componentSupplier = this;
CalciteTestConfig testConfig = new CalciteTestConfig(CONTEXT_OVERRIDES)
{
@Override
public SqlTestFramework.PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
{
plannerConfig = plannerConfig.withOverrides(CONTEXT_OVERRIDES);
- return queryFramework().plannerFixture(componentSupplier, plannerConfig, authConfig);
+ return queryFramework().plannerFixture(plannerConfig, authConfig);
}
};
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java
index 7ddfc59..746c479 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DecoupledPlanningCalciteUnionQueryTest.java
@@ -25,7 +25,6 @@
import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
-import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(NotYetSupportedProcessor.class)
@@ -41,14 +40,13 @@
@Override
protected QueryTestBuilder testBuilder()
{
- PlannerComponentSupplier componentSupplier = this;
CalciteTestConfig testConfig = new CalciteTestConfig(CONTEXT_OVERRIDES)
{
@Override
public SqlTestFramework.PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
{
plannerConfig = plannerConfig.withOverrides(CONTEXT_OVERRIDES);
- return queryFramework().plannerFixture(componentSupplier, plannerConfig, authConfig);
+ return queryFramework().plannerFixture(plannerConfig, authConfig);
}
};
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java
index 59f7de2..7c2b61b 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java
@@ -53,11 +53,14 @@
import org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.DisableUnless.DisableUnlessRule;
+import org.apache.druid.sql.calcite.DrillWindowQueryTest.DrillComponentSupplier;
import org.apache.druid.sql.calcite.NotYetSupported.Modes;
import org.apache.druid.sql.calcite.NotYetSupported.NotYetSupportedProcessor;
import org.apache.druid.sql.calcite.QueryTestRunner.QueryResults;
import org.apache.druid.sql.calcite.planner.PlannerCaptureHook;
import org.apache.druid.sql.calcite.planner.PlannerContext;
+import org.apache.druid.sql.calcite.util.SqlTestFramework;
+import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.NumberedShardSpec;
import org.joda.time.DateTime;
@@ -109,6 +112,7 @@
* so it is believed that most iteration on tests will happen through the
* CalciteWindowQueryTest instead of this class.
*/
+@SqlTestFramework.SqlTestFrameWorkModule(DrillComponentSupplier.class)
public class DrillWindowQueryTest extends BaseCalciteQueryTest
{
private static final ObjectMapper MAPPER = new DefaultObjectMapper();
@@ -238,104 +242,155 @@
}
}
- @Override
- public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
- QueryRunnerFactoryConglomerate conglomerate,
- JoinableFactoryWrapper joinableFactory,
- Injector injector
- )
+ protected static class DrillComponentSupplier extends StandardComponentSupplier
{
- final SpecificSegmentsQuerySegmentWalker retVal = super.createQuerySegmentWalker(
- conglomerate,
- joinableFactory,
- injector);
+ public DrillComponentSupplier(TempDirProducer tempFolderProducer)
+ {
+ super(tempFolderProducer);
+ }
- attachIndex(
- retVal,
- "tblWnulls.parquet",
- new LongDimensionSchema("c1"),
- new StringDimensionSchema("c2"));
+ @Override
+ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
+ QueryRunnerFactoryConglomerate conglomerate,
+ JoinableFactoryWrapper joinableFactory,
+ Injector injector
+ )
+ {
+ final SpecificSegmentsQuerySegmentWalker retVal = super.createQuerySegmentWalker(
+ conglomerate,
+ joinableFactory,
+ injector);
- // {"col0":1,"col1":65534,"col2":256.0,"col3":1234.9,"col4":73578580,"col5":1393720082338,"col6":421185052800000,"col7":false,"col8":"CA","col9":"AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXZ"}
- attachIndex(
- retVal,
- "allTypsUniq.parquet",
- new LongDimensionSchema("col0"),
- new LongDimensionSchema("col1"),
- new DoubleDimensionSchema("col2"),
- new DoubleDimensionSchema("col3"),
- new LongDimensionSchema("col4"),
- new LongDimensionSchema("col5"),
- new LongDimensionSchema("col6"),
- new StringDimensionSchema("col7"),
- new StringDimensionSchema("col8"),
- new StringDimensionSchema("col9"));
- attachIndex(
- retVal,
- "smlTbl.parquet",
- // "col_int": 8122,
- new LongDimensionSchema("col_int"),
- // "col_bgint": 817200,
- new LongDimensionSchema("col_bgint"),
- // "col_char_2": "IN",
- new StringDimensionSchema("col_char_2"),
- // "col_vchar_52":
- // "AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXB",
- new StringDimensionSchema("col_vchar_52"),
- // "col_tmstmp": 1409617682418,
- new LongDimensionSchema("col_tmstmp"),
- // "col_dt": 422717616000000,
- new LongDimensionSchema("col_dt"),
- // "col_booln": false,
- new StringDimensionSchema("col_booln"),
- // "col_dbl": 12900.48,
- new DoubleDimensionSchema("col_dbl"),
- // "col_tm": 33109170
- new LongDimensionSchema("col_tm"));
- attachIndex(
- retVal,
- "fewRowsAllData.parquet",
- // "col0":12024,
- new LongDimensionSchema("col0"),
- // "col1":307168,
- new LongDimensionSchema("col1"),
- // "col2":"VT",
- new StringDimensionSchema("col2"),
- // "col3":"DXXXXXXXXXXXXXXXXXXXXXXXXXEXXXXXXXXXXXXXXXXXXXXXXXXF",
- new StringDimensionSchema("col3"),
- // "col4":1338596882419,
- new LongDimensionSchema("col4"),
- // "col5":422705433600000,
- new LongDimensionSchema("col5"),
- // "col6":true,
- new StringDimensionSchema("col6"),
- // "col7":3.95110006277E8,
- new DoubleDimensionSchema("col7"),
- // "col8":67465430
- new LongDimensionSchema("col8"));
- attachIndex(
- retVal,
- "t_alltype.parquet",
- // "c1":1,
- new LongDimensionSchema("c1"),
- // "c2":592475043,
- new LongDimensionSchema("c2"),
- // "c3":616080519999272,
- new LongDimensionSchema("c3"),
- // "c4":"ObHeWTDEcbGzssDwPwurfs",
- new StringDimensionSchema("c4"),
- // "c5":"0sZxIfZ CGwTOaLWZ6nWkUNx",
- new StringDimensionSchema("c5"),
- // "c6":1456290852307,
- new LongDimensionSchema("c6"),
- // "c7":421426627200000,
- new LongDimensionSchema("c7"),
- // "c8":true,
- new StringDimensionSchema("c8"),
- // "c9":0.626179100469
- new DoubleDimensionSchema("c9"));
+ attachIndex(
+ retVal,
+ "tblWnulls.parquet",
+ new LongDimensionSchema("c1"),
+ new StringDimensionSchema("c2"));
- return retVal;
+ // {"col0":1,"col1":65534,"col2":256.0,"col3":1234.9,"col4":73578580,"col5":1393720082338,"col6":421185052800000,"col7":false,"col8":"CA","col9":"AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXZ"}
+ attachIndex(
+ retVal,
+ "allTypsUniq.parquet",
+ new LongDimensionSchema("col0"),
+ new LongDimensionSchema("col1"),
+ new DoubleDimensionSchema("col2"),
+ new DoubleDimensionSchema("col3"),
+ new LongDimensionSchema("col4"),
+ new LongDimensionSchema("col5"),
+ new LongDimensionSchema("col6"),
+ new StringDimensionSchema("col7"),
+ new StringDimensionSchema("col8"),
+ new StringDimensionSchema("col9"));
+ attachIndex(
+ retVal,
+ "smlTbl.parquet",
+ // "col_int": 8122,
+ new LongDimensionSchema("col_int"),
+ // "col_bgint": 817200,
+ new LongDimensionSchema("col_bgint"),
+ // "col_char_2": "IN",
+ new StringDimensionSchema("col_char_2"),
+ // "col_vchar_52":
+ // "AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXB",
+ new StringDimensionSchema("col_vchar_52"),
+ // "col_tmstmp": 1409617682418,
+ new LongDimensionSchema("col_tmstmp"),
+ // "col_dt": 422717616000000,
+ new LongDimensionSchema("col_dt"),
+ // "col_booln": false,
+ new StringDimensionSchema("col_booln"),
+ // "col_dbl": 12900.48,
+ new DoubleDimensionSchema("col_dbl"),
+ // "col_tm": 33109170
+ new LongDimensionSchema("col_tm"));
+ attachIndex(
+ retVal,
+ "fewRowsAllData.parquet",
+ // "col0":12024,
+ new LongDimensionSchema("col0"),
+ // "col1":307168,
+ new LongDimensionSchema("col1"),
+ // "col2":"VT",
+ new StringDimensionSchema("col2"),
+ // "col3":"DXXXXXXXXXXXXXXXXXXXXXXXXXEXXXXXXXXXXXXXXXXXXXXXXXXF",
+ new StringDimensionSchema("col3"),
+ // "col4":1338596882419,
+ new LongDimensionSchema("col4"),
+ // "col5":422705433600000,
+ new LongDimensionSchema("col5"),
+ // "col6":true,
+ new StringDimensionSchema("col6"),
+ // "col7":3.95110006277E8,
+ new DoubleDimensionSchema("col7"),
+ // "col8":67465430
+ new LongDimensionSchema("col8"));
+ attachIndex(
+ retVal,
+ "t_alltype.parquet",
+ // "c1":1,
+ new LongDimensionSchema("c1"),
+ // "c2":592475043,
+ new LongDimensionSchema("c2"),
+ // "c3":616080519999272,
+ new LongDimensionSchema("c3"),
+ // "c4":"ObHeWTDEcbGzssDwPwurfs",
+ new StringDimensionSchema("c4"),
+ // "c5":"0sZxIfZ CGwTOaLWZ6nWkUNx",
+ new StringDimensionSchema("c5"),
+ // "c6":1456290852307,
+ new LongDimensionSchema("c6"),
+ // "c7":421426627200000,
+ new LongDimensionSchema("c7"),
+ // "c8":true,
+ new StringDimensionSchema("c8"),
+ // "c9":0.626179100469
+ new DoubleDimensionSchema("c9"));
+
+ return retVal;
+ }
+
+ @SuppressWarnings({"rawtypes", "unchecked"})
+ private void attachIndex(SpecificSegmentsQuerySegmentWalker texasRanger, String dataSource, DimensionSchema... dims)
+ {
+ ArrayList<String> dimensionNames = new ArrayList<>(dims.length);
+ for (DimensionSchema dimension : dims) {
+ dimensionNames.add(dimension.getName());
+ }
+
+ final File tmpFolder = tempDirProducer.newTempFolder();
+ final QueryableIndex queryableIndex = IndexBuilder
+ .create()
+ .tmpDir(new File(tmpFolder, dataSource))
+ .segmentWriteOutMediumFactory(OnHeapMemorySegmentWriteOutMediumFactory.instance())
+ .schema(new IncrementalIndexSchema.Builder()
+ .withRollup(false)
+ .withDimensionsSpec(new DimensionsSpec(Arrays.asList(dims)))
+ .build())
+ .rows(
+ () -> {
+ try {
+ return Iterators.transform(
+ MAPPER.readerFor(Map.class)
+ .readValues(
+ ClassLoader.getSystemResource("drill/window/datasources/" + dataSource + ".json")),
+ (Function<Map, InputRow>) input -> new MapBasedInputRow(0, dimensionNames, input));
+ }
+ catch (IOException e) {
+ throw new RE(e, "problem reading file");
+ }
+ })
+ .buildMMappedIndex();
+
+ texasRanger.add(
+ DataSegment.builder()
+ .dataSource(dataSource)
+ .interval(Intervals.ETERNITY)
+ .version("1")
+ .shardSpec(new NumberedShardSpec(0, 0))
+ .size(0)
+ .build(),
+ queryableIndex);
+ }
}
public class TextualResultsVerifier implements ResultsVerifier
@@ -497,48 +552,6 @@
}
}
- @SuppressWarnings({"rawtypes", "unchecked"})
- private void attachIndex(SpecificSegmentsQuerySegmentWalker texasRanger, String dataSource, DimensionSchema... dims)
- {
- ArrayList<String> dimensionNames = new ArrayList<>(dims.length);
- for (DimensionSchema dimension : dims) {
- dimensionNames.add(dimension.getName());
- }
-
- final File tmpFolder = newTempFolder();
- final QueryableIndex queryableIndex = IndexBuilder
- .create()
- .tmpDir(new File(tmpFolder, dataSource))
- .segmentWriteOutMediumFactory(OnHeapMemorySegmentWriteOutMediumFactory.instance())
- .schema(new IncrementalIndexSchema.Builder()
- .withRollup(false)
- .withDimensionsSpec(new DimensionsSpec(Arrays.asList(dims)))
- .build())
- .rows(
- () -> {
- try {
- return Iterators.transform(
- MAPPER.readerFor(Map.class)
- .readValues(
- ClassLoader.getSystemResource("drill/window/datasources/" + dataSource + ".json")),
- (Function<Map, InputRow>) input -> new MapBasedInputRow(0, dimensionNames, input));
- }
- catch (IOException e) {
- throw new RE(e, "problem reading file");
- }
- })
- .buildMMappedIndex();
-
- texasRanger.add(
- DataSegment.builder()
- .dataSource(dataSource)
- .interval(Intervals.ETERNITY)
- .version("1")
- .shardSpec(new NumberedShardSpec(0, 0))
- .size(0)
- .build(),
- queryableIndex);
- }
// testcases_start
@DrillTest("aggregates/aggOWnFn_11")
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java
index df50dd6..c3880ee 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java
@@ -19,21 +19,26 @@
package org.apache.druid.sql.calcite;
+import org.apache.druid.java.util.common.RE;
import org.apache.druid.query.topn.TopNQueryConfig;
import org.apache.druid.sql.calcite.util.CacheTestHelperModule.ResultCacheMode;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier;
import org.junit.jupiter.api.extension.AfterAllCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
+
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
+import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
+import java.util.function.Function;
/**
* Annotation to specify desired framework settings.
@@ -56,14 +61,42 @@
/**
* @see {@link SqlTestFrameworkConfig}
*/
- class Rule implements AfterAllCallback, BeforeEachCallback
+ class Rule implements AfterAllCallback, BeforeEachCallback, BeforeAllCallback
{
Map<SqlTestFrameworkConfig, ConfigurationInstance> configMap = new HashMap<>();
private SqlTestFrameworkConfig config;
- private QueryComponentSupplier testHost;
+ private Function<TempDirProducer, QueryComponentSupplier> testHostSupplier;
private Method method;
@Override
+ public void beforeAll(ExtensionContext context) throws Exception
+ {
+ Class<?> testClass = context.getTestClass().get();
+ SqlTestFramework.SqlTestFrameWorkModule moduleAnnotation = getModuleAnnotationFor(testClass);
+ Constructor<? extends QueryComponentSupplier> constructor = moduleAnnotation.value().getConstructor(TempDirProducer.class);
+ testHostSupplier = f -> {
+ try {
+ return constructor.newInstance(f);
+ }
+ catch (Exception e) {
+ throw new RE(e, "Unable to create QueryComponentSupplier");
+ }
+ };
+ }
+
+ private SqlTestFramework.SqlTestFrameWorkModule getModuleAnnotationFor(Class<?> testClass)
+ {
+ SqlTestFramework.SqlTestFrameWorkModule annotation = testClass.getAnnotation(SqlTestFramework.SqlTestFrameWorkModule.class);
+ if (annotation == null) {
+ if (testClass.getSuperclass() == null) {
+ throw new RE("Can't get QueryComponentSupplier for testclass!");
+ }
+ return getModuleAnnotationFor(testClass.getSuperclass());
+ }
+ return annotation;
+ }
+
+ @Override
public void afterAll(ExtensionContext context)
{
for (ConfigurationInstance f : configMap.values()) {
@@ -75,10 +108,8 @@
@Override
public void beforeEach(ExtensionContext context)
{
- testHost = (QueryComponentSupplier) context.getTestInstance().get();
method = context.getTestMethod().get();
setConfig(method.getAnnotation(SqlTestFrameworkConfig.class));
-
}
@SqlTestFrameworkConfig
@@ -120,7 +151,7 @@
ConfigurationInstance buildConfiguration(SqlTestFrameworkConfig config)
{
- return new ConfigurationInstance(config, testHost);
+ return new ConfigurationInstance(config, testHostSupplier.apply(new TempDirProducer("druid-test")));
}
}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/TempDirProducer.java b/sql/src/test/java/org/apache/druid/sql/calcite/TempDirProducer.java
new file mode 100644
index 0000000..65aceff
--- /dev/null
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/TempDirProducer.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite;
+
+import org.apache.druid.java.util.common.FileUtils;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Creates a hierarchy of temp dirs.
+ *
+ * All created directories will be accessible until the shutdown of the current JVM.
+ * Installs only a single shutdown hook.
+ */
+public class TempDirProducer implements Closeable
+{
+ private final File tempDir;
+
+ public TempDirProducer(String prefix)
+ {
+ tempDir = FileUtils.createTempDir(prefix);
+ }
+
+ public TempDirProducer(TempDirProducer tempDirProducer, String prefix)
+ {
+ this.tempDir = FileUtils.createTempDirInLocation(tempDirProducer.tempDir.toPath(), prefix);
+ }
+
+ public TempDirProducer getProducer(String prefix)
+ {
+ return new TempDirProducer(this, prefix);
+ }
+
+ public File newTempFolder(String prefix)
+ {
+ return FileUtils.createTempDirInLocation(tempDir.toPath(), prefix);
+ }
+
+ public File newTempFolder()
+ {
+ return newTempFolder(null);
+ }
+
+ @Override
+ public void close() throws IOException
+ {
+ FileUtils.deleteDirectory(tempDir);
+ }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java
index 1506a5c..f436792 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java
@@ -50,6 +50,7 @@
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.sql.SqlStatementFactory;
+import org.apache.druid.sql.calcite.TempDirProducer;
import org.apache.druid.sql.calcite.aggregation.SqlAggregationModule;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.CatalogResolver;
@@ -67,8 +68,12 @@
import org.apache.druid.sql.calcite.view.ViewManager;
import org.apache.druid.timeline.DataSegment;
-import java.io.File;
+import java.io.Closeable;
import java.io.IOException;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
@@ -99,7 +104,7 @@
* <p>
* The framework should be built once per test class (not once per test method.)
* Then, for each planner setup, call
- * {@link #plannerFixture(PlannerComponentSupplier, PlannerConfig, AuthConfig)}
+ * {@link #plannerFixture(PlannerConfig, AuthConfig)}
* to get a {@link PlannerFixture} with a view manager and planner factory. Call
* {@link PlannerFixture#statementFactory()} to
* obtain a the test-specific planner and wrapper classes for that test. After
@@ -119,6 +124,16 @@
public class SqlTestFramework
{
/**
+ * Declares which {@link QueryComponentSupplier} must be used for the class.
+ */
+ @Retention(RetentionPolicy.RUNTIME)
+ @Target({ElementType.TYPE})
+ public @interface SqlTestFrameWorkModule
+ {
+ Class<? extends QueryComponentSupplier> value();
+ }
+
+ /**
* Interface to provide various framework components. Extend to customize,
* use {@link StandardComponentSupplier} for the "standard" components.
* <p>
@@ -126,7 +141,7 @@
* exist in {@code BaseCalciteQueryTest}. Any changes here will impact that
* base class, and possibly many test cases that extend that class.
*/
- public interface QueryComponentSupplier
+ public interface QueryComponentSupplier extends Closeable
{
/**
* Gather properties to be used within tests. Particularly useful when choosing
@@ -174,6 +189,8 @@
JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider);
void finalizeTestFramework(SqlTestFramework sqlTestFramework);
+
+ PlannerComponentSupplier getPlannerComponentSupplier();
}
public interface PlannerComponentSupplier
@@ -197,13 +214,25 @@
*/
public static class StandardComponentSupplier implements QueryComponentSupplier
{
- private final File temporaryFolder;
+ protected final TempDirProducer tempDirProducer;
+ private final PlannerComponentSupplier plannerComponentSupplier;
public StandardComponentSupplier(
- final File temporaryFolder
+ final TempDirProducer tempDirProducer
)
{
- this.temporaryFolder = temporaryFolder;
+ this.tempDirProducer = tempDirProducer;
+ this.plannerComponentSupplier = buildPlannerComponentSupplier();
+ }
+
+ /**
+ * Build the {@link PlannerComponentSupplier}.
+ *
+ * Implementations may override how this is being built.
+ */
+ protected PlannerComponentSupplier buildPlannerComponentSupplier()
+ {
+ return new StandardPlannerComponentSupplier();
}
@Override
@@ -245,7 +274,7 @@
return TestDataBuilder.createMockWalker(
injector,
conglomerate,
- temporaryFolder,
+ tempDirProducer.newTempFolder("segments"),
QueryStackTests.DEFAULT_NOOP_SCHEDULER,
joinableFactory
);
@@ -285,6 +314,18 @@
public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
{
}
+
+ @Override
+ public PlannerComponentSupplier getPlannerComponentSupplier()
+ {
+ return plannerComponentSupplier;
+ }
+
+ @Override
+ public void close() throws IOException
+ {
+ tempDirProducer.close();
+ }
}
public static class StandardPlannerComponentSupplier implements PlannerComponentSupplier
@@ -642,18 +683,19 @@
* planner fixture is specific to one test and one planner config.
*/
public PlannerFixture plannerFixture(
- PlannerComponentSupplier componentSupplier,
PlannerConfig plannerConfig,
AuthConfig authConfig
)
{
- return new PlannerFixture(this, componentSupplier, plannerConfig, authConfig);
+ PlannerComponentSupplier plannerComponentSupplier = componentSupplier.getPlannerComponentSupplier();
+ return new PlannerFixture(this, plannerComponentSupplier, plannerConfig, authConfig);
}
public void close()
{
try {
resourceCloser.close();
+ componentSupplier.close();
}
catch (IOException e) {
throw new RE(e);