blob: 173a324dc456ca413377fe4b5d540b4982bc5e8b [file] [log] [blame]
-- Automatically generated by SQLQueryTestSuite
-- !query
create temp view gstest1(a,b,v)
as values (1,1,10),(1,1,11),(1,2,12),(1,2,13),(1,3,14),
(2,3,15),
(3,3,16),(3,4,17),
(4,1,18),(4,1,19)
-- !query analysis
CreateViewCommand `gstest1`, [(a,None), (b,None), (v,None)], values (1,1,10),(1,1,11),(1,2,12),(1,2,13),(1,3,14),
(2,3,15),
(3,3,16),(3,4,17),
(4,1,18),(4,1,19), false, false, LocalTempView, true
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
create table gstest2 (a integer, b integer, c integer, d integer,
e integer, f integer, g integer, h integer) using parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`gstest2`, false
-- !query
insert into gstest2 values
(1, 1, 1, 1, 1, 1, 1, 1),
(1, 1, 1, 1, 1, 1, 1, 2),
(1, 1, 1, 1, 1, 1, 2, 2),
(1, 1, 1, 1, 1, 2, 2, 2),
(1, 1, 1, 1, 2, 2, 2, 2),
(1, 1, 1, 2, 2, 2, 2, 2),
(1, 1, 2, 2, 2, 2, 2, 2),
(1, 2, 2, 2, 2, 2, 2, 2),
(2, 2, 2, 2, 2, 2, 2, 2)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/gstest2, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/gstest2], Append, `spark_catalog`.`default`.`gstest2`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/gstest2), [a, b, c, d, e, f, g, h]
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS c#x, cast(col4#x as int) AS d#x, cast(col5#x as int) AS e#x, cast(col6#x as int) AS f#x, cast(col7#x as int) AS g#x, cast(col8#x as int) AS h#x]
+- LocalRelation [col1#x, col2#x, col3#x, col4#x, col5#x, col6#x, col7#x, col8#x]
-- !query
create table gstest3 (a integer, b integer, c integer, d integer) using parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`gstest3`, false
-- !query
insert into gstest3 values
(1, 1, 1, 1),
(2, 2, 2, 2)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/gstest3, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/gstest3], Append, `spark_catalog`.`default`.`gstest3`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/gstest3), [a, b, c, d]
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS c#x, cast(col4#x as int) AS d#x]
+- LocalRelation [col1#x, col2#x, col3#x, col4#x]
-- !query
create table gstest4(id integer, v integer,
unhashable_col /* bit(4) */ byte, unsortable_col /* xid */ integer) using parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`gstest4`, false
-- !query
insert into gstest4
values (1,1,tinyint('0'),1), (2,2,tinyint('1'),1),
(3,4,tinyint('2'),2), (4,8,tinyint('3'),2),
(5,16,tinyint('0'),2), (6,32,tinyint('1'),2),
(7,64,tinyint('2'),1), (8,128,tinyint('3'),1)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/gstest4, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/gstest4], Append, `spark_catalog`.`default`.`gstest4`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/gstest4), [id, v, unhashable_col, unsortable_col]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as int) AS v#x, cast(col3#x as tinyint) AS unhashable_col#x, cast(col4#x as int) AS unsortable_col#x]
+- LocalRelation [col1#x, col2#x, col3#x, col4#x]
-- !query
create table gstest_empty (a integer, b integer, v integer) using parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`gstest_empty`, false
-- !query
select a, b, grouping(a), grouping(b), sum(v), count(*), max(v)
from gstest1 group by rollup (a,b)
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL, max(v#x) AS max(v)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, null, 1], [a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias gstest1
+- View (`gstest1`, [a#x, b#x, v#x])
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
select a, b, grouping(a), grouping(b), sum(v), count(*), max(v)
from gstest1 group by rollup (a,b) order by a,b
-- !query analysis
Sort [a#x ASC NULLS FIRST, b#x ASC NULLS FIRST], true
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL, max(v#x) AS max(v)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, null, 1], [a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias gstest1
+- View (`gstest1`, [a#x, b#x, v#x])
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
select a, b, grouping(a), grouping(b), sum(v), count(*), max(v)
from gstest1 group by rollup (a,b) order by b desc, a
-- !query analysis
Sort [b#x DESC NULLS LAST, a#x ASC NULLS FIRST], true
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL, max(v#x) AS max(v)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, null, 1], [a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias gstest1
+- View (`gstest1`, [a#x, b#x, v#x])
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
select a, b, grouping(a), grouping(b), sum(v), count(*), max(v)
from gstest1 group by rollup (a,b) order by coalesce(a,0)+coalesce(b,0), a
-- !query analysis
Sort [(coalesce(a#x, 0) + coalesce(b#x, 0)) ASC NULLS FIRST, a#x ASC NULLS FIRST], true
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL, max(v#x) AS max(v)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, null, 1], [a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias gstest1
+- View (`gstest1`, [a#x, b#x, v#x])
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
select a, b, sum(c), sum(sum(c)) over (order by a,b) as rsum
from gstest2 group by rollup (a,b) order by rsum, a, b
-- !query analysis
Sort [rsum#xL ASC NULLS FIRST, a#x ASC NULLS FIRST, b#x ASC NULLS FIRST], true
+- Project [a#x, b#x, sum(c)#xL, rsum#xL]
+- Project [a#x, b#x, sum(c)#xL, _w0#xL, rsum#xL, rsum#xL]
+- Window [sum(_w0#xL) windowspecdefinition(a#x ASC NULLS FIRST, b#x ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS rsum#xL], [a#x ASC NULLS FIRST, b#x ASC NULLS FIRST]
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, sum(c#x) AS sum(c)#xL, sum(c#x) AS _w0#xL]
+- Expand [[a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, 0], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, null, 1], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, null, null, 3]], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest2
+- Relation spark_catalog.default.gstest2[a#x,b#x,c#x,d#x,e#x,f#x,g#x,h#x] parquet
-- !query
select a, b, sum(v), count(*) from gstest_empty group by grouping sets ((a,b),a)
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, null, 1]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select a, b, sum(v), count(*) from gstest_empty group by grouping sets ((a,b),())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select a, b, sum(v), count(*) from gstest_empty group by grouping sets ((a,b),(),(),())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL, _gen_grouping_pos#x], [a#x, b#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0, 0], [a#x, b#x, v#x, null, null, 3, 1], [a#x, b#x, v#x, null, null, 3, 2], [a#x, b#x, v#x, null, null, 3, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL, _gen_grouping_pos#x]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select sum(v), count(*) from gstest_empty group by grouping sets ((),(),())
-- !query analysis
Aggregate [spark_grouping_id#xL, _gen_grouping_pos#x], [sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, 0, 0], [a#x, b#x, v#x, 0, 1], [a#x, b#x, v#x, 0, 2]], [a#x, b#x, v#x, spark_grouping_id#xL, _gen_grouping_pos#x]
+- Project [a#x, b#x, v#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select t1.a, t2.b, sum(t1.v), count(*) from gstest_empty t1, gstest_empty t2
group by grouping sets ((t1.a,t2.b),())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.gstest_empty
: +- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select t1.a, t2.b, grouping(t1.a), grouping(t2.b), sum(t1.v), max(t2.a)
from gstest1 t1, gstest2 t2
group by grouping sets ((t1.a, t2.b), ())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, max(a#x) AS max(a)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x AS a#x, b#x AS b#x]
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias gstest1
: +- View (`gstest1`, [a#x, b#x, v#x])
: +- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
: +- LocalRelation [col1#x, col2#x, col3#x]
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.gstest2
+- Relation spark_catalog.default.gstest2[a#x,b#x,c#x,d#x,e#x,f#x,g#x,h#x] parquet
-- !query
select t1.a, t2.b, grouping(t1.a), grouping(t2.b), sum(t1.v), max(t2.a)
from gstest1 t1 join gstest2 t2 on (t1.a=t2.a)
group by grouping sets ((t1.a, t2.b), ())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, max(a#x) AS max(a)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x AS a#x, b#x AS b#x]
+- Join Inner, (a#x = a#x)
:- SubqueryAlias t1
: +- SubqueryAlias gstest1
: +- View (`gstest1`, [a#x, b#x, v#x])
: +- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
: +- LocalRelation [col1#x, col2#x, col3#x]
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.gstest2
+- Relation spark_catalog.default.gstest2[a#x,b#x,c#x,d#x,e#x,f#x,g#x,h#x] parquet
-- !query
select a, b, grouping(a), grouping(b), sum(t1.v), max(t2.c)
from gstest1 t1 join gstest2 t2 using (a,b)
group by grouping sets ((a, b), ())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, max(c#x) AS max(c)#x]
+- Expand [[a#x, b#x, v#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, 0], [a#x, b#x, v#x, c#x, d#x, e#x, f#x, g#x, h#x, null, null, 3]], [a#x, b#x, v#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x AS a#x, b#x AS b#x]
+- Project [a#x, b#x, v#x, c#x, d#x, e#x, f#x, g#x, h#x]
+- Join Inner, ((a#x = a#x) AND (b#x = b#x))
:- SubqueryAlias t1
: +- SubqueryAlias gstest1
: +- View (`gstest1`, [a#x, b#x, v#x])
: +- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
: +- LocalRelation [col1#x, col2#x, col3#x]
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.gstest2
+- Relation spark_catalog.default.gstest2[a#x,b#x,c#x,d#x,e#x,f#x,g#x,h#x] parquet
-- !query
select four, x
from (select four, ten, 'foo' as x from tenk1) as t
group by grouping sets (four, x)
having x = 'foo'
-- !query analysis
Filter (x#x = foo)
+- Aggregate [four#x, x#x, spark_grouping_id#xL], [four#x, x#x]
+- Expand [[four#x, ten#x, x#x, four#x, null, 1], [four#x, ten#x, x#x, null, x#x, 2]], [four#x, ten#x, x#x, four#x, x#x, spark_grouping_id#xL]
+- Project [four#x, ten#x, x#x, four#x AS four#x, x#x AS x#x]
+- SubqueryAlias t
+- Project [four#x, ten#x, foo AS x#x]
+- SubqueryAlias spark_catalog.default.tenk1
+- Relation spark_catalog.default.tenk1[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select four, x || 'x'
from (select four, ten, 'foo' as x from tenk1) as t
group by grouping sets (four, x)
order by four
-- !query analysis
Sort [four#x ASC NULLS FIRST], true
+- Aggregate [four#x, x#x, spark_grouping_id#xL], [four#x, concat(x#x, x) AS concat(x, x)#x]
+- Expand [[four#x, ten#x, x#x, four#x, null, 1], [four#x, ten#x, x#x, null, x#x, 2]], [four#x, ten#x, x#x, four#x, x#x, spark_grouping_id#xL]
+- Project [four#x, ten#x, x#x, four#x AS four#x, x#x AS x#x]
+- SubqueryAlias t
+- Project [four#x, ten#x, foo AS x#x]
+- SubqueryAlias spark_catalog.default.tenk1
+- Relation spark_catalog.default.tenk1[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select (x+y)*1, sum(z)
from (select 1 as x, 2 as y, 3 as z) s
group by grouping sets (x+y, x)
-- !query analysis
Aggregate [(x#x + y#x)#x, x#x, spark_grouping_id#xL], [((x#x + y#x)#x * 1) AS ((x + y) * 1)#x, sum(z#x) AS sum(z)#xL]
+- Expand [[x#x, y#x, z#x, (x#x + y#x)#x, null, 1], [x#x, y#x, z#x, null, x#x, 2]], [x#x, y#x, z#x, (x#x + y#x)#x, x#x, spark_grouping_id#xL]
+- Project [x#x, y#x, z#x, (x#x + y#x) AS (x#x + y#x)#x, x#x AS x#x]
+- SubqueryAlias s
+- Project [1 AS x#x, 2 AS y#x, 3 AS z#x]
+- OneRowRelation
-- !query
CREATE TEMP VIEW int8_tbl AS SELECT * FROM VALUES
(123L, 456L),
(123L, 4567890123456789L),
(4567890123456789L, 123L),
(4567890123456789L, 4567890123456789L),
(4567890123456789L, -4567890123456789L) as int8_tbl(q1, q2)
-- !query analysis
CreateViewCommand `int8_tbl`, SELECT * FROM VALUES
(123L, 456L),
(123L, 4567890123456789L),
(4567890123456789L, 123L),
(4567890123456789L, 4567890123456789L),
(4567890123456789L, -4567890123456789L) as int8_tbl(q1, q2), false, false, LocalTempView, true
+- Project [q1#xL, q2#xL]
+- SubqueryAlias int8_tbl
+- LocalRelation [q1#xL, q2#xL]
-- !query
select x, not x as not_x, q2 from
(select *, q1 = 1 as x from int8_tbl i1) as t
group by grouping sets(x, q2)
order by x, q2
-- !query analysis
Sort [x#x ASC NULLS FIRST, q2#xL ASC NULLS FIRST], true
+- Aggregate [x#x, q2#xL, spark_grouping_id#xL], [x#x, NOT x#x AS not_x#x, q2#xL]
+- Expand [[q1#xL, q2#xL, x#x, x#x, null, 1], [q1#xL, q2#xL, x#x, null, q2#xL, 2]], [q1#xL, q2#xL, x#x, x#x, q2#xL, spark_grouping_id#xL]
+- Project [q1#xL, q2#xL, x#x, x#x AS x#x, q2#xL AS q2#xL]
+- SubqueryAlias t
+- Project [q1#xL, q2#xL, (q1#xL = cast(1 as bigint)) AS x#x]
+- SubqueryAlias i1
+- SubqueryAlias int8_tbl
+- View (`int8_tbl`, [q1#xL, q2#xL])
+- Project [cast(q1#xL as bigint) AS q1#xL, cast(q2#xL as bigint) AS q2#xL]
+- Project [q1#xL, q2#xL]
+- SubqueryAlias int8_tbl
+- LocalRelation [q1#xL, q2#xL]
-- !query
DROP VIEW int8_tbl
-- !query analysis
DropTempViewCommand int8_tbl
-- !query
select ten, sum(distinct four) from onek a
group by grouping sets((ten,four),(ten))
having exists (select 1 from onek b where sum(distinct a.four) = b.four)
-- !query analysis
Filter exists#x [sum(DISTINCT four)#xL]
: +- Project [1 AS 1#x]
: +- Filter (outer(sum(DISTINCT four)#xL) = cast(four#x as bigint))
: +- SubqueryAlias b
: +- SubqueryAlias spark_catalog.default.onek
: +- Relation spark_catalog.default.onek[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
+- Aggregate [ten#x, four#x, spark_grouping_id#xL], [ten#x, sum(distinct four#x) AS sum(DISTINCT four)#xL]
+- Expand [[unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, four#x, 0], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, null, 1]], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, four#x, spark_grouping_id#xL]
+- Project [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x AS ten#x, four#x AS four#x]
+- SubqueryAlias a
+- SubqueryAlias spark_catalog.default.onek
+- Relation spark_catalog.default.onek[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select a,count(*) from gstest2 group by rollup(a) order by a
-- !query analysis
Sort [a#x ASC NULLS FIRST], true
+- Aggregate [a#x, spark_grouping_id#xL], [a#x, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, 0], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, null, 1]], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, spark_grouping_id#xL]
+- Project [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x AS a#x]
+- SubqueryAlias spark_catalog.default.gstest2
+- Relation spark_catalog.default.gstest2[a#x,b#x,c#x,d#x,e#x,f#x,g#x,h#x] parquet
-- !query
select a,count(*) from gstest2 group by rollup(a) having a is distinct from 1 order by a
-- !query analysis
Sort [a#x ASC NULLS FIRST], true
+- Filter NOT (a#x <=> 1)
+- Aggregate [a#x, spark_grouping_id#xL], [a#x, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, 0], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, null, 1]], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, spark_grouping_id#xL]
+- Project [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x AS a#x]
+- SubqueryAlias spark_catalog.default.gstest2
+- Relation spark_catalog.default.gstest2[a#x,b#x,c#x,d#x,e#x,f#x,g#x,h#x] parquet
-- !query
select ten, grouping(ten) from onek
group by grouping sets(ten) having grouping(ten) >= 0
order by 2,1
-- !query analysis
Sort [grouping(ten)#x ASC NULLS FIRST, ten#x ASC NULLS FIRST], true
+- Project [ten#x, grouping(ten)#x]
+- Filter (cast(cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) as int) >= 0)
+- Aggregate [ten#x, spark_grouping_id#xL], [ten#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(ten)#x, spark_grouping_id#xL]
+- Expand [[unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, 0]], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, spark_grouping_id#xL]
+- Project [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x AS ten#x]
+- SubqueryAlias spark_catalog.default.onek
+- Relation spark_catalog.default.onek[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select ten, grouping(ten) from onek
group by grouping sets(ten, four) having grouping(ten) > 0
order by 2,1
-- !query analysis
Sort [grouping(ten)#x ASC NULLS FIRST, ten#x ASC NULLS FIRST], true
+- Project [ten#x, grouping(ten)#x]
+- Filter (cast(cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) as int) > 0)
+- Aggregate [ten#x, four#x, spark_grouping_id#xL], [ten#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(ten)#x, spark_grouping_id#xL]
+- Expand [[unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, null, 1], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, null, four#x, 2]], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, four#x, spark_grouping_id#xL]
+- Project [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x AS ten#x, four#x AS four#x]
+- SubqueryAlias spark_catalog.default.onek
+- Relation spark_catalog.default.onek[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select ten, grouping(ten) from onek
group by rollup(ten) having grouping(ten) > 0
order by 2,1
-- !query analysis
Sort [grouping(ten)#x ASC NULLS FIRST, ten#x ASC NULLS FIRST], true
+- Project [ten#x, grouping(ten)#x]
+- Filter (cast(cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) as int) > 0)
+- Aggregate [ten#x, spark_grouping_id#xL], [ten#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(ten)#x, spark_grouping_id#xL]
+- Expand [[unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, 0], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, null, 1]], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, spark_grouping_id#xL]
+- Project [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x AS ten#x]
+- SubqueryAlias spark_catalog.default.onek
+- Relation spark_catalog.default.onek[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select ten, grouping(ten) from onek
group by cube(ten) having grouping(ten) > 0
order by 2,1
-- !query analysis
Sort [grouping(ten)#x ASC NULLS FIRST, ten#x ASC NULLS FIRST], true
+- Project [ten#x, grouping(ten)#x]
+- Filter (cast(cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) as int) > 0)
+- Aggregate [ten#x, spark_grouping_id#xL], [ten#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(ten)#x, spark_grouping_id#xL]
+- Expand [[unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, 0], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, null, 1]], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, spark_grouping_id#xL]
+- Project [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x AS ten#x]
+- SubqueryAlias spark_catalog.default.onek
+- Relation spark_catalog.default.onek[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select ten, sum(distinct four) filter (where string(four) like '123') from onek a
group by rollup(ten)
-- !query analysis
Aggregate [ten#x, spark_grouping_id#xL], [ten#x, sum(distinct four#x) FILTER (WHERE cast(four#x as string) LIKE 123) AS sum(DISTINCT four) FILTER (WHERE four LIKE 123)#xL]
+- Expand [[unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, 0], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, null, 1]], [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x, spark_grouping_id#xL]
+- Project [unique1#x, unique2#x, two#x, four#x, ten#x, twenty#x, hundred#x, thousand#x, twothousand#x, fivethous#x, tenthous#x, odd#x, even#x, stringu1#x, stringu2#x, string4#x, ten#x AS ten#x]
+- SubqueryAlias a
+- SubqueryAlias spark_catalog.default.onek
+- Relation spark_catalog.default.onek[unique1#x,unique2#x,two#x,four#x,ten#x,twenty#x,hundred#x,thousand#x,twothousand#x,fivethous#x,tenthous#x,odd#x,even#x,stringu1#x,stringu2#x,string4#x] parquet
-- !query
select count(*) from gstest4 group by rollup(unhashable_col,unsortable_col)
-- !query analysis
Aggregate [unhashable_col#x, unsortable_col#x, spark_grouping_id#xL], [count(1) AS count(1)#xL]
+- Expand [[id#x, v#x, unhashable_col#x, unsortable_col#x, unhashable_col#x, unsortable_col#x, 0], [id#x, v#x, unhashable_col#x, unsortable_col#x, unhashable_col#x, null, 1], [id#x, v#x, unhashable_col#x, unsortable_col#x, null, null, 3]], [id#x, v#x, unhashable_col#x, unsortable_col#x, unhashable_col#x, unsortable_col#x, spark_grouping_id#xL]
+- Project [id#x, v#x, unhashable_col#x, unsortable_col#x, unhashable_col#x AS unhashable_col#x, unsortable_col#x AS unsortable_col#x]
+- SubqueryAlias spark_catalog.default.gstest4
+- Relation spark_catalog.default.gstest4[id#x,v#x,unhashable_col#x,unsortable_col#x] parquet
-- !query
select a, b, grouping(a), grouping(b), sum(v), count(*), max(v)
from gstest1 group by grouping sets ((a),(b)) order by 3,4,1,2 /* 3,1,2 */
-- !query analysis
Sort [grouping(a)#x ASC NULLS FIRST, grouping(b)#x ASC NULLS FIRST, a#x ASC NULLS FIRST, b#x ASC NULLS FIRST], true
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL, max(v#x) AS max(v)#x]
+- Expand [[a#x, b#x, v#x, a#x, null, 1], [a#x, b#x, v#x, null, b#x, 2]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias gstest1
+- View (`gstest1`, [a#x, b#x, v#x])
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
select a, b, grouping(a), grouping(b), sum(v), count(*), max(v)
from gstest1 group by cube(a,b) order by 3,4,1,2 /* 3,1,2 */
-- !query analysis
Sort [grouping(a)#x ASC NULLS FIRST, grouping(b)#x ASC NULLS FIRST, a#x ASC NULLS FIRST, b#x ASC NULLS FIRST], true
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL, max(v#x) AS max(v)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, null, 1], [a#x, b#x, v#x, null, b#x, 2], [a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias gstest1
+- View (`gstest1`, [a#x, b#x, v#x])
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
select unsortable_col, count(*)
from gstest4 group by grouping sets ((unsortable_col),(unsortable_col))
order by string(unsortable_col)
-- !query analysis
Sort [cast(unsortable_col#x as string) ASC NULLS FIRST], true
+- Aggregate [unsortable_col#x, spark_grouping_id#xL, _gen_grouping_pos#x], [unsortable_col#x, count(1) AS count(1)#xL]
+- Expand [[id#x, v#x, unhashable_col#x, unsortable_col#x, unsortable_col#x, 0, 0], [id#x, v#x, unhashable_col#x, unsortable_col#x, unsortable_col#x, 0, 1]], [id#x, v#x, unhashable_col#x, unsortable_col#x, unsortable_col#x, spark_grouping_id#xL, _gen_grouping_pos#x]
+- Project [id#x, v#x, unhashable_col#x, unsortable_col#x, unsortable_col#x AS unsortable_col#x]
+- SubqueryAlias spark_catalog.default.gstest4
+- Relation spark_catalog.default.gstest4[id#x,v#x,unhashable_col#x,unsortable_col#x] parquet
-- !query
select unhashable_col, unsortable_col,
grouping(unhashable_col), grouping(unsortable_col),
count(*), sum(v)
from gstest4 group by grouping sets ((unhashable_col),(unsortable_col))
order by 3, 4, 6 /* 3, 5 */
-- !query analysis
Sort [grouping(unhashable_col)#x ASC NULLS FIRST, grouping(unsortable_col)#x ASC NULLS FIRST, sum(v)#xL ASC NULLS FIRST], true
+- Aggregate [unhashable_col#x, unsortable_col#x, spark_grouping_id#xL], [unhashable_col#x, unsortable_col#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(unhashable_col)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(unsortable_col)#x, count(1) AS count(1)#xL, sum(v#x) AS sum(v)#xL]
+- Expand [[id#x, v#x, unhashable_col#x, unsortable_col#x, unhashable_col#x, null, 1], [id#x, v#x, unhashable_col#x, unsortable_col#x, null, unsortable_col#x, 2]], [id#x, v#x, unhashable_col#x, unsortable_col#x, unhashable_col#x, unsortable_col#x, spark_grouping_id#xL]
+- Project [id#x, v#x, unhashable_col#x, unsortable_col#x, unhashable_col#x AS unhashable_col#x, unsortable_col#x AS unsortable_col#x]
+- SubqueryAlias spark_catalog.default.gstest4
+- Relation spark_catalog.default.gstest4[id#x,v#x,unhashable_col#x,unsortable_col#x] parquet
-- !query
select unhashable_col, unsortable_col,
grouping(unhashable_col), grouping(unsortable_col),
count(*), sum(v)
from gstest4 group by grouping sets ((v,unhashable_col),(v,unsortable_col))
order by 3, 4, 6 /* 3,5 */
-- !query analysis
Sort [grouping(unhashable_col)#x ASC NULLS FIRST, grouping(unsortable_col)#x ASC NULLS FIRST, sum(v)#xL ASC NULLS FIRST], true
+- Aggregate [v#x, unhashable_col#x, unsortable_col#x, spark_grouping_id#xL], [unhashable_col#x, unsortable_col#x, cast((shiftright(spark_grouping_id#xL, 1) & 1) as tinyint) AS grouping(unhashable_col)#x, cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) AS grouping(unsortable_col)#x, count(1) AS count(1)#xL, sum(v#x) AS sum(v)#xL]
+- Expand [[id#x, v#x, unhashable_col#x, unsortable_col#x, v#x, unhashable_col#x, null, 1], [id#x, v#x, unhashable_col#x, unsortable_col#x, v#x, null, unsortable_col#x, 2]], [id#x, v#x, unhashable_col#x, unsortable_col#x, v#x, unhashable_col#x, unsortable_col#x, spark_grouping_id#xL]
+- Project [id#x, v#x, unhashable_col#x, unsortable_col#x, v#x AS v#x, unhashable_col#x AS unhashable_col#x, unsortable_col#x AS unsortable_col#x]
+- SubqueryAlias spark_catalog.default.gstest4
+- Relation spark_catalog.default.gstest4[id#x,v#x,unhashable_col#x,unsortable_col#x] parquet
-- !query
select a, b, sum(v), count(*) from gstest_empty group by grouping sets ((a,b),a)
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, a#x, null, 1]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select a, b, sum(v), count(*) from gstest_empty group by grouping sets ((a,b),())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0], [a#x, b#x, v#x, null, null, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select a, b, sum(v), count(*) from gstest_empty group by grouping sets ((a,b),(),(),())
-- !query analysis
Aggregate [a#x, b#x, spark_grouping_id#xL, _gen_grouping_pos#x], [a#x, b#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, a#x, b#x, 0, 0], [a#x, b#x, v#x, null, null, 3, 1], [a#x, b#x, v#x, null, null, 3, 2], [a#x, b#x, v#x, null, null, 3, 3]], [a#x, b#x, v#x, a#x, b#x, spark_grouping_id#xL, _gen_grouping_pos#x]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select sum(v), count(*) from gstest_empty group by grouping sets ((),(),())
-- !query analysis
Aggregate [spark_grouping_id#xL, _gen_grouping_pos#x], [sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL]
+- Expand [[a#x, b#x, v#x, 0, 0], [a#x, b#x, v#x, 0, 1], [a#x, b#x, v#x, 0, 2]], [a#x, b#x, v#x, spark_grouping_id#xL, _gen_grouping_pos#x]
+- Project [a#x, b#x, v#x]
+- SubqueryAlias spark_catalog.default.gstest_empty
+- Relation spark_catalog.default.gstest_empty[a#x,b#x,v#x] parquet
-- !query
select a, b, grouping(a), grouping(b), sum(v), count(*), max(v)
from gstest1 group by grouping sets ((a,b),(a+1,b+1),(a+2,b+2)) order by 3,4,7 /* 3,6 */
-- !query analysis
Sort [grouping(a)#x ASC NULLS FIRST, grouping(b)#x ASC NULLS FIRST, max(v)#x ASC NULLS FIRST], true
+- Aggregate [a#x, b#x, (a#x + 1)#x, (b#x + 1)#x, (a#x + 2)#x, (b#x + 2)#x, spark_grouping_id#xL], [a#x, b#x, cast((shiftright(spark_grouping_id#xL, 5) & 1) as tinyint) AS grouping(a)#x, cast((shiftright(spark_grouping_id#xL, 4) & 1) as tinyint) AS grouping(b)#x, sum(v#x) AS sum(v)#xL, count(1) AS count(1)#xL, max(v#x) AS max(v)#x]
+- Expand [[a#x, b#x, v#x, a#x, b#x, null, null, null, null, 15], [a#x, b#x, v#x, null, null, (a#x + 1)#x, (b#x + 1)#x, null, null, 51], [a#x, b#x, v#x, null, null, null, null, (a#x + 2)#x, (b#x + 2)#x, 60]], [a#x, b#x, v#x, a#x, b#x, (a#x + 1)#x, (b#x + 1)#x, (a#x + 2)#x, (b#x + 2)#x, spark_grouping_id#xL]
+- Project [a#x, b#x, v#x, a#x AS a#x, b#x AS b#x, (a#x + 1) AS (a#x + 1)#x, (b#x + 1) AS (b#x + 1)#x, (a#x + 2) AS (a#x + 2)#x, (b#x + 2) AS (b#x + 2)#x]
+- SubqueryAlias gstest1
+- View (`gstest1`, [a#x, b#x, v#x])
+- Project [cast(col1#x as int) AS a#x, cast(col2#x as int) AS b#x, cast(col3#x as int) AS v#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
select a, b, sum(c), sum(sum(c)) over (order by a,b) as rsum
from gstest2 group by cube (a,b) order by rsum, a, b
-- !query analysis
Sort [rsum#xL ASC NULLS FIRST, a#x ASC NULLS FIRST, b#x ASC NULLS FIRST], true
+- Project [a#x, b#x, sum(c)#xL, rsum#xL]
+- Project [a#x, b#x, sum(c)#xL, _w0#xL, rsum#xL, rsum#xL]
+- Window [sum(_w0#xL) windowspecdefinition(a#x ASC NULLS FIRST, b#x ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS rsum#xL], [a#x ASC NULLS FIRST, b#x ASC NULLS FIRST]
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, sum(c#x) AS sum(c)#xL, sum(c#x) AS _w0#xL]
+- Expand [[a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, 0], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, null, 1], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, null, b#x, 2], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, null, null, 3]], [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, c#x, d#x, e#x, f#x, g#x, h#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest2
+- Relation spark_catalog.default.gstest2[a#x,b#x,c#x,d#x,e#x,f#x,g#x,h#x] parquet
-- !query
SELECT a, b, count(*), max(a), max(b) FROM gstest3 GROUP BY GROUPING SETS(a, b,()) ORDER BY a, b
-- !query analysis
Sort [a#x ASC NULLS FIRST, b#x ASC NULLS FIRST], true
+- Aggregate [a#x, b#x, spark_grouping_id#xL], [a#x, b#x, count(1) AS count(1)#xL, max(a#x) AS max(a)#x, max(b#x) AS max(b)#x]
+- Expand [[a#x, b#x, c#x, d#x, a#x, null, 1], [a#x, b#x, c#x, d#x, null, b#x, 2], [a#x, b#x, c#x, d#x, null, null, 3]], [a#x, b#x, c#x, d#x, a#x, b#x, spark_grouping_id#xL]
+- Project [a#x, b#x, c#x, d#x, a#x AS a#x, b#x AS b#x]
+- SubqueryAlias spark_catalog.default.gstest3
+- Relation spark_catalog.default.gstest3[a#x,b#x,c#x,d#x] parquet
-- !query
select v||'a', case grouping(v||'a') when 1 then 1 else 0 end, count(*)
from values (1, 'a'), (1, 'b') u(i,v)
group by rollup(i, v||'a') order by 1,3
-- !query analysis
Sort [concat(v, a)#x ASC NULLS FIRST, count(1)#xL ASC NULLS FIRST], true
+- Aggregate [i#x, concat(v#x, a)#x, spark_grouping_id#xL], [concat(v#x, a)#x AS concat(v, a)#x, CASE WHEN (cast(cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) as int) = 1) THEN 1 ELSE 0 END AS CASE WHEN (grouping(concat(v, a)) = 1) THEN 1 ELSE 0 END#x, count(1) AS count(1)#xL]
+- Expand [[i#x, v#x, i#x, concat(v#x, a)#x, 0], [i#x, v#x, i#x, null, 1], [i#x, v#x, null, null, 3]], [i#x, v#x, i#x, concat(v#x, a)#x, spark_grouping_id#xL]
+- Project [i#x, v#x, i#x AS i#x, concat(v#x, a) AS concat(v#x, a)#x]
+- SubqueryAlias u
+- LocalRelation [i#x, v#x]
-- !query
select v||'a', case when grouping(v||'a') = 1 then 1 else 0 end, count(*)
from values (1, 'a'), (1, 'b') u(i,v)
group by rollup(i, v||'a') order by 1,3
-- !query analysis
Sort [concat(v, a)#x ASC NULLS FIRST, count(1)#xL ASC NULLS FIRST], true
+- Aggregate [i#x, concat(v#x, a)#x, spark_grouping_id#xL], [concat(v#x, a)#x AS concat(v, a)#x, CASE WHEN (cast(cast((shiftright(spark_grouping_id#xL, 0) & 1) as tinyint) as int) = 1) THEN 1 ELSE 0 END AS CASE WHEN (grouping(concat(v, a)) = 1) THEN 1 ELSE 0 END#x, count(1) AS count(1)#xL]
+- Expand [[i#x, v#x, i#x, concat(v#x, a)#x, 0], [i#x, v#x, i#x, null, 1], [i#x, v#x, null, null, 3]], [i#x, v#x, i#x, concat(v#x, a)#x, spark_grouping_id#xL]
+- Project [i#x, v#x, i#x AS i#x, concat(v#x, a) AS concat(v#x, a)#x]
+- SubqueryAlias u
+- LocalRelation [i#x, v#x]
-- !query
DROP VIEW gstest1
-- !query analysis
DropTempViewCommand gstest1
-- !query
DROP TABLE gstest2
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.gstest2
-- !query
DROP TABLE gstest3
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.gstest3
-- !query
DROP TABLE gstest4
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.gstest4
-- !query
DROP TABLE gstest_empty
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.gstest_empty