blob: 438e98f559db7ee1e054ce0759741a56d1d54499 [file] [log] [blame]
-- Automatically generated by SQLQueryTestSuite
-- !query
select * from dummy(3)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "UNRESOLVABLE_TABLE_VALUED_FUNCTION",
"sqlState" : "42883",
"messageParameters" : {
"name" : "`dummy`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 22,
"fragment" : "dummy(3)"
} ]
}
-- !query
select * from range(6 + cos(3))
-- !query analysis
Project [id#xL]
+- Range (0, 5, step=1)
-- !query
select * from range(5, 10)
-- !query analysis
Project [id#xL]
+- Range (5, 10, step=1)
-- !query
select * from range(0, 10, 2)
-- !query analysis
Project [id#xL]
+- Range (0, 10, step=2)
-- !query
select * from range(0, 10, 1, 200)
-- !query analysis
Project [id#xL]
+- Range (0, 10, step=1, splits=Some(200))
-- !query
select * from range(1, 1, 1, 1, 1)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "5",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "[1, 2, 3, 4]",
"functionName" : "`range`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 34,
"fragment" : "range(1, 1, 1, 1, 1)"
} ]
}
-- !query
select * from range(1, null)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"functionName" : "`range`",
"inputSql" : "\"NULL\"",
"inputType" : "\"VOID\"",
"paramIndex" : "second",
"requiredType" : "\"BIGINT\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 28,
"fragment" : "range(1, null)"
} ]
}
-- !query
select * from range(array(1, 2, 3))
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"functionName" : "`range`",
"inputSql" : "\"array(1, 2, 3)\"",
"inputType" : "\"ARRAY<INT>\"",
"paramIndex" : "second",
"requiredType" : "\"BIGINT\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 35,
"fragment" : "range(array(1, 2, 3))"
} ]
}
-- !query
select * from range(0, 5, 0)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "FAILED_FUNCTION_CALL",
"sqlState" : "38000",
"messageParameters" : {
"funcName" : "`range`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 28,
"fragment" : "range(0, 5, 0)"
} ]
}
-- !query
select * from RaNgE(2)
-- !query analysis
Project [id#xL]
+- Range (0, 2, step=1)
-- !query
select i from range(0, 2) t(i)
-- !query analysis
Project [i#xL]
+- SubqueryAlias t
+- Project [id#xL AS i#xL]
+- Range (0, 2, step=1)
-- !query
select * from range(0, (select 1))
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "42K08",
"messageParameters" : {
"funcName" : "`range`",
"paramName" : "`end`",
"paramType" : "\"BIGINT\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 34,
"fragment" : "range(0, (select 1))"
} ]
}
-- !query
select * from values (0, 1) t(c1, c2), lateral range(0, c2)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "42K08",
"messageParameters" : {
"funcName" : "`range`",
"paramName" : "`end`",
"paramType" : "\"BIGINT\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 48,
"stopIndex" : 59,
"fragment" : "range(0, c2)"
} ]
}
-- !query
select * from explode(array(1, 2))
-- !query analysis
Project [col#x]
+- Generate explode(array(1, 2)), false, [col#x]
+- OneRowRelation
-- !query
select * from explode(map('a', 1, 'b', 2))
-- !query analysis
Project [key#x, value#x]
+- Generate explode(map(a, 1, b, 2)), false, [key#x, value#x]
+- OneRowRelation
-- !query
select * from explode(array())
-- !query analysis
Project [col#x]
+- Generate explode(array()), false, [col#x]
+- OneRowRelation
-- !query
select * from explode(map())
-- !query analysis
Project [key#x, value#x]
+- Generate explode(map()), false, [key#x, value#x]
+- OneRowRelation
-- !query
select * from explode(array(1, 2)) t(c1)
-- !query analysis
Project [c1#x]
+- SubqueryAlias t
+- Project [col#x AS c1#x]
+- Generate explode(array(1, 2)), false, [col#x]
+- OneRowRelation
-- !query
select * from explode(map('a', 1, 'b', 2)) t(k, v)
-- !query analysis
Project [k#x, v#x]
+- SubqueryAlias t
+- Project [key#x AS k#x, value#x AS v#x]
+- Generate explode(map(a, 1, b, 2)), false, [key#x, value#x]
+- OneRowRelation
-- !query
select * from explode(array(rand(0)))
-- !query analysis
[Analyzer test output redacted due to nondeterminism]
-- !query
select * from explode(null)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"NULL\"",
"inputType" : "\"VOID\"",
"paramIndex" : "first",
"requiredType" : "(\"ARRAY\" or \"MAP\")",
"sqlExpr" : "\"explode(NULL)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 27,
"fragment" : "explode(null)"
} ]
}
-- !query
select * from explode(null) t(c1)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"NULL\"",
"inputType" : "\"VOID\"",
"paramIndex" : "first",
"requiredType" : "(\"ARRAY\" or \"MAP\")",
"sqlExpr" : "\"explode(NULL)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 33,
"fragment" : "explode(null) t(c1)"
} ]
}
-- !query
select * from explode(1)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"1\"",
"inputType" : "\"INT\"",
"paramIndex" : "first",
"requiredType" : "(\"ARRAY\" or \"MAP\")",
"sqlExpr" : "\"explode(1)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 24,
"fragment" : "explode(1)"
} ]
}
-- !query
select * from explode(1, 2)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "2",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "1",
"functionName" : "`explode`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 27,
"fragment" : "explode(1, 2)"
} ]
}
-- !query
select * from explode(explode(array(1)))
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "UNSUPPORTED_GENERATOR.NESTED_IN_EXPRESSIONS",
"sqlState" : "42K0E",
"messageParameters" : {
"expression" : "\"explode(explode(array(1)))\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 40,
"fragment" : "explode(explode(array(1)))"
} ]
}
-- !query
select * from explode(array(1, 2)) t(c1, c2)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "NUM_TABLE_VALUE_ALIASES_MISMATCH",
"sqlState" : "42826",
"messageParameters" : {
"aliasesNum" : "2",
"funcName" : "`explode`",
"outColsNum" : "1"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 44,
"fragment" : "explode(array(1, 2)) t(c1, c2)"
} ]
}
-- !query
select * from explode_outer(array(1, 2))
-- !query analysis
Project [col#x]
+- Generate explode(array(1, 2)), true, [col#x]
+- OneRowRelation
-- !query
select * from explode_outer(map('a', 1, 'b', 2))
-- !query analysis
Project [key#x, value#x]
+- Generate explode(map(a, 1, b, 2)), true, [key#x, value#x]
+- OneRowRelation
-- !query
select * from explode_outer(array())
-- !query analysis
Project [col#x]
+- Generate explode(array()), true, [col#x]
+- OneRowRelation
-- !query
select * from explode_outer(map())
-- !query analysis
Project [key#x, value#x]
+- Generate explode(map()), true, [key#x, value#x]
+- OneRowRelation
-- !query
select * from range(2) join explode(array(1, 2))
-- !query analysis
Project [id#xL, col#x]
+- Join Inner
:- Range (0, 2, step=1)
+- Generate explode(array(1, 2)), false, [col#x]
+- OneRowRelation
-- !query
select * from range(2) join explode_outer(array())
-- !query analysis
Project [id#xL, col#x]
+- Join Inner
:- Range (0, 2, step=1)
+- Generate explode(array()), true, [col#x]
+- OneRowRelation
-- !query
select * from inline(array(struct(1, 'a'), struct(2, 'b')))
-- !query analysis
Project [col1#x, col2#x]
+- Generate inline(array(struct(col1, 1, col2, a), struct(col1, 2, col2, b))), false, [col1#x, col2#x]
+- OneRowRelation
-- !query
select * from inline(array(struct(1, 'a'), struct(2, 'b'))) t(x, y)
-- !query analysis
Project [x#x, y#x]
+- SubqueryAlias t
+- Project [col1#x AS x#x, col2#x AS y#x]
+- Generate inline(array(struct(col1, 1, col2, a), struct(col1, 2, col2, b))), false, [col1#x, col2#x]
+- OneRowRelation
-- !query
select * from inline(array_remove(array(struct(1, 'a')), struct(1, 'a')))
-- !query analysis
Project [col1#x, col2#x]
+- Generate inline(array_remove(array(struct(col1, 1, col2, a)), struct(col1, 1, col2, a))), false, [col1#x, col2#x]
+- OneRowRelation
-- !query
select * from inline(null)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"NULL\"",
"inputType" : "\"VOID\"",
"paramIndex" : "first",
"requiredType" : "\"ARRAY<STRUCT>\"",
"sqlExpr" : "\"inline(NULL)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 26,
"fragment" : "inline(null)"
} ]
}
-- !query
select * from inline(array(struct(1, 2), struct(2, 3))) t(a, b, c)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "NUM_TABLE_VALUE_ALIASES_MISMATCH",
"sqlState" : "42826",
"messageParameters" : {
"aliasesNum" : "3",
"funcName" : "`inline`",
"outColsNum" : "2"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 66,
"fragment" : "inline(array(struct(1, 2), struct(2, 3))) t(a, b, c)"
} ]
}
-- !query
select * from inline_outer(array(struct(1, 'a'), struct(2, 'b')))
-- !query analysis
Project [col1#x, col2#x]
+- Generate inline(array(struct(col1, 1, col2, a), struct(col1, 2, col2, b))), true, [col1#x, col2#x]
+- OneRowRelation
-- !query
select * from inline_outer(array_remove(array(struct(1, 'a')), struct(1, 'a')))
-- !query analysis
Project [col1#x, col2#x]
+- Generate inline(array_remove(array(struct(col1, 1, col2, a)), struct(col1, 1, col2, a))), true, [col1#x, col2#x]
+- OneRowRelation
-- !query
select * from posexplode(array())
-- !query analysis
Project [pos#x, col#x]
+- Generate posexplode(array()), false, [pos#x, col#x]
+- OneRowRelation
-- !query
select * from posexplode(array(1, 2))
-- !query analysis
Project [pos#x, col#x]
+- Generate posexplode(array(1, 2)), false, [pos#x, col#x]
+- OneRowRelation
-- !query
select * from posexplode(array(1, 2)) t(pos, x)
-- !query analysis
Project [pos#x, x#x]
+- SubqueryAlias t
+- Project [pos#x AS pos#x, col#x AS x#x]
+- Generate posexplode(array(1, 2)), false, [pos#x, col#x]
+- OneRowRelation
-- !query
select * from posexplode(map())
-- !query analysis
Project [pos#x, key#x, value#x]
+- Generate posexplode(map()), false, [pos#x, key#x, value#x]
+- OneRowRelation
-- !query
select * from posexplode(map('a', 1, 'b', 2))
-- !query analysis
Project [pos#x, key#x, value#x]
+- Generate posexplode(map(a, 1, b, 2)), false, [pos#x, key#x, value#x]
+- OneRowRelation
-- !query
select * from posexplode(map('a', 1, 'b', 2)) t(pos, k, v)
-- !query analysis
Project [pos#x, k#x, v#x]
+- SubqueryAlias t
+- Project [pos#x AS pos#x, key#x AS k#x, value#x AS v#x]
+- Generate posexplode(map(a, 1, b, 2)), false, [pos#x, key#x, value#x]
+- OneRowRelation
-- !query
select * from posexplode(1)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"1\"",
"inputType" : "\"INT\"",
"paramIndex" : "first",
"requiredType" : "(\"ARRAY\" or \"MAP\")",
"sqlExpr" : "\"posexplode(1)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 27,
"fragment" : "posexplode(1)"
} ]
}
-- !query
select * from posexplode(1, 2)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "2",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "1",
"functionName" : "`posexplode`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 30,
"fragment" : "posexplode(1, 2)"
} ]
}
-- !query
select * from posexplode(explode(array(1)))
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "UNSUPPORTED_GENERATOR.NESTED_IN_EXPRESSIONS",
"sqlState" : "42K0E",
"messageParameters" : {
"expression" : "\"posexplode(explode(array(1)))\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 43,
"fragment" : "posexplode(explode(array(1)))"
} ]
}
-- !query
select * from posexplode(array(1, 2)) t(x)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "NUM_TABLE_VALUE_ALIASES_MISMATCH",
"sqlState" : "42826",
"messageParameters" : {
"aliasesNum" : "1",
"funcName" : "`posexplode`",
"outColsNum" : "2"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 42,
"fragment" : "posexplode(array(1, 2)) t(x)"
} ]
}
-- !query
select * from posexplode_outer(array())
-- !query analysis
Project [pos#x, col#x]
+- Generate posexplode(array()), true, [pos#x, col#x]
+- OneRowRelation
-- !query
select * from posexplode_outer(array(1, 2))
-- !query analysis
Project [pos#x, col#x]
+- Generate posexplode(array(1, 2)), true, [pos#x, col#x]
+- OneRowRelation
-- !query
select * from posexplode_outer(map())
-- !query analysis
Project [pos#x, key#x, value#x]
+- Generate posexplode(map()), true, [pos#x, key#x, value#x]
+- OneRowRelation
-- !query
select * from posexplode_outer(map('a', 1, 'b', 2))
-- !query analysis
Project [pos#x, key#x, value#x]
+- Generate posexplode(map(a, 1, b, 2)), true, [pos#x, key#x, value#x]
+- OneRowRelation
-- !query
select * from json_tuple('{"a": 1, "b": 2}', 'a', 'b')
-- !query analysis
Project [c0#x, c1#x]
+- Generate json_tuple({"a": 1, "b": 2}, a, b), false, [c0#x, c1#x]
+- OneRowRelation
-- !query
select * from json_tuple('{"a": 1, "b": 2}', 'a', 'c')
-- !query analysis
Project [c0#x, c1#x]
+- Generate json_tuple({"a": 1, "b": 2}, a, c), false, [c0#x, c1#x]
+- OneRowRelation
-- !query
select * from json_tuple('{"a": 1, "b": 2}', 'a', 'a')
-- !query analysis
Project [c0#x, c1#x]
+- Generate json_tuple({"a": 1, "b": 2}, a, a), false, [c0#x, c1#x]
+- OneRowRelation
-- !query
select * from json_tuple('{"a": 1, "b": 2}', 'a', 'b') AS t(x, y)
-- !query analysis
Project [x#x, y#x]
+- SubqueryAlias t
+- Project [c0#x AS x#x, c1#x AS y#x]
+- Generate json_tuple({"a": 1, "b": 2}, a, b), false, [c0#x, c1#x]
+- OneRowRelation
-- !query
select * from json_tuple('{"a": bad, "b": string}', 'a', 'b')
-- !query analysis
Project [c0#x, c1#x]
+- Generate json_tuple({"a": bad, "b": string}, a, b), false, [c0#x, c1#x]
+- OneRowRelation
-- !query
select * from json_tuple()
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "0",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "> 1",
"functionName" : "`json_tuple`"
}
}
-- !query
select * from json_tuple('{"a": 1}')
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "1",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "> 1",
"functionName" : "`json_tuple`"
}
}
-- !query
select * from json_tuple('{"a": 1}', 1)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.NON_STRING_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"funcName" : "`json_tuple`",
"sqlExpr" : "\"json_tuple({\"a\": 1}, 1)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 39,
"fragment" : "json_tuple('{\"a\": 1}', 1)"
} ]
}
-- !query
select * from json_tuple('{"a": 1}', null)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.NON_STRING_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"funcName" : "`json_tuple`",
"sqlExpr" : "\"json_tuple({\"a\": 1}, NULL)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 42,
"fragment" : "json_tuple('{\"a\": 1}', null)"
} ]
}
-- !query
select * from json_tuple('{"a": 1, "b": 2}', 'a', 'b') AS t(x)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "NUM_TABLE_VALUE_ALIASES_MISMATCH",
"sqlState" : "42826",
"messageParameters" : {
"aliasesNum" : "1",
"funcName" : "`json_tuple`",
"outColsNum" : "2"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 62,
"fragment" : "json_tuple('{\"a\": 1, \"b\": 2}', 'a', 'b') AS t(x)"
} ]
}
-- !query
select * from stack(1, 1, 2, 3)
-- !query analysis
Project [col0#x, col1#x, col2#x]
+- Generate stack(1, 1, 2, 3), false, [col0#x, col1#x, col2#x]
+- OneRowRelation
-- !query
select * from stack(2, 1, 2, 3)
-- !query analysis
Project [col0#x, col1#x]
+- Generate stack(2, 1, 2, 3), false, [col0#x, col1#x]
+- OneRowRelation
-- !query
select * from stack(3, 1, 2, 3) t(x)
-- !query analysis
Project [x#x]
+- SubqueryAlias t
+- Project [col0#x AS x#x]
+- Generate stack(3, 1, 2, 3), false, [col0#x]
+- OneRowRelation
-- !query
select * from stack(4, 1, 2, 3) t(x)
-- !query analysis
Project [x#x]
+- SubqueryAlias t
+- Project [col0#x AS x#x]
+- Generate stack(4, 1, 2, 3), false, [col0#x]
+- OneRowRelation
-- !query
select * from stack(2, 1, 1.1, 'a', 2, 2.2, 'b') t(a, b, c)
-- !query analysis
Project [a#x, b#x, c#x]
+- SubqueryAlias t
+- Project [col0#x AS a#x, col1#x AS b#x, col2#x AS c#x]
+- Generate stack(2, 1, 1.1, a, 2, 2.2, b), false, [col0#x, col1#x, col2#x]
+- OneRowRelation
-- !query
select * from stack(2, 1, 1.1, null, 2, null, 'b') t(a, b, c)
-- !query analysis
Project [a#x, b#x, c#x]
+- SubqueryAlias t
+- Project [col0#x AS a#x, col1#x AS b#x, col2#x AS c#x]
+- Generate stack(2, 1, 1.1, null, 2, null, b), false, [col0#x, col1#x, col2#x]
+- OneRowRelation
-- !query
select * from stack()
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "0",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "> 1",
"functionName" : "`stack`"
}
}
-- !query
select * from stack(2, 1, 2, 3) t(a, b, c)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "NUM_TABLE_VALUE_ALIASES_MISMATCH",
"sqlState" : "42826",
"messageParameters" : {
"aliasesNum" : "3",
"funcName" : "`stack`",
"outColsNum" : "2"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 42,
"fragment" : "stack(2, 1, 2, 3) t(a, b, c)"
} ]
}
-- !query
select * from stack(2, 1, '1.1', 'a', 2, 2.2, 'b')
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.STACK_COLUMN_DIFF_TYPES",
"sqlState" : "42K09",
"messageParameters" : {
"columnIndex" : "1",
"leftParamIndex" : "2",
"leftType" : "\"STRING\"",
"rightParamIndex" : "5",
"rightType" : "\"DECIMAL(2,1)\"",
"sqlExpr" : "\"stack(2, 1, 1.1, a, 2, 2.2, b)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 50,
"fragment" : "stack(2, 1, '1.1', 'a', 2, 2.2, 'b')"
} ]
}
-- !query
select * from stack(2, explode(array(1, 2, 3)))
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "UNSUPPORTED_GENERATOR.NESTED_IN_EXPRESSIONS",
"sqlState" : "42K0E",
"messageParameters" : {
"expression" : "\"stack(2, explode(array(1, 2, 3)))\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 15,
"stopIndex" : 47,
"fragment" : "stack(2, explode(array(1, 2, 3)))"
} ]
}