blob: 6c2ae23291755099c0075f1ba6adfafb9dfae257 [file] [log] [blame]
-- Automatically generated by SQLQueryTestSuite
-- !query
CREATE TABLE num_data (id int, val decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_data`, false
-- !query
CREATE TABLE num_exp_add (id1 int, id2 int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_add`, false
-- !query
CREATE TABLE num_exp_sub (id1 int, id2 int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_sub`, false
-- !query
CREATE TABLE num_exp_div (id1 int, id2 int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_div`, false
-- !query
CREATE TABLE num_exp_mul (id1 int, id2 int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_mul`, false
-- !query
CREATE TABLE num_exp_sqrt (id int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_sqrt`, false
-- !query
CREATE TABLE num_exp_ln (id int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_ln`, false
-- !query
CREATE TABLE num_exp_log10 (id int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_log10`, false
-- !query
CREATE TABLE num_exp_power_10_ln (id int, expected decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_exp_power_10_ln`, false
-- !query
CREATE TABLE num_result (id1 int, id2 int, result decimal(38,10)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_result`, false
-- !query
INSERT INTO num_exp_add VALUES (0,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,2,-34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,2,34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,2,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,2,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,3,4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,3,-4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,3,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,3,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,4,7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,4,-7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,4,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,4,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,5,16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,5,-16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,5,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,5,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,6,93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,6,-93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,6,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,6,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,7,-83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,7,83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,7,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,7,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,8,74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,8,-74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,8,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,8,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (0,9,-24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (0,9,24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (0,9,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (0,9,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,2,-34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,2,34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,2,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,2,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,3,4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,3,-4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,3,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,3,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,4,7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,4,-7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,4,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,4,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,5,16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,5,-16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,5,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,5,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,6,93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,6,-93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,6,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,6,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,7,-83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,7,83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,7,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,7,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,8,74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,8,-74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,8,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,8,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (1,9,-24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (1,9,24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (1,9,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (1,9,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,0,-34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,0,-34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,1,-34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,1,-34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,2,-68676984.430794094)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,2,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,2,1179132047626883.596862135856320209)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,2,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,3,-34338487.905397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,3,-34338496.525397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,3,-147998901.44836127257)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,3,-7967167.56737750510440835266)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,4,-26539030.803497047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,4,-42137953.627297047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,4,-267821744976817.8111137106593)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,4,-4.40267480046830116685)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,5,-34322095.176906047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,5,-34354889.253888047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,5,-563049578578.769242506736077)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,5,-2094.18866914563535496429)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,6,-34244590.637766787)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,6,-34432393.793027307)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,6,-3224438592470.18449811926184222)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,6,-365.68599891479766440940)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,7,-117366977.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,7,48689992.784602953)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,7,2851072985828710.485883795)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,7,.41357483778485235518)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,8,-34263611.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,8,-34413373.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,8,-2571300635581.146276407)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,8,-458.57416721727870888476)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (2,9,-59265296.260444467)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (2,9,-9411688.170349627)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (2,9,855948866655588.453741509242968740)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (2,9,1.37757299946438931811)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,0,4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,0,4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,1,4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,1,4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,2,-34338487.905397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,2,34338496.525397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,2,-147998901.44836127257)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,2,-.00000012551512084352)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,3,8.62)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,3,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,3,18.5761)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,3,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,4,7799465.7219)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,4,-7799457.1019)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,4,33615678.685289)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,4,.00000055260225961552)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,5,16401.348491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,5,-16392.728491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,5,70671.23589621)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,5,.00026285234387695504)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,6,93905.88763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,6,-93897.26763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,6,404715.7995864206)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,6,.00004589912234457595)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,7,-83028480.69)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,7,83028489.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,7,-357852770.35)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,7,-.00000005190989574240)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,8,74885.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,8,-74876.69)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,8,322737.11)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,8,.00005755799201399553)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (3,9,-24926799.735047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (3,9,24926808.355047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (3,9,-107434525.43415438020)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (3,9,-.00000017290624149854)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,0,7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,0,7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,1,7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,1,7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,2,-26539030.803497047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,2,42137953.627297047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,2,-267821744976817.8111137106593)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,2,-.22713465002993920385)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,3,7799465.7219)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,3,7799457.1019)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,3,33615678.685289)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,3,1809619.81714617169373549883)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,4,15598922.8238)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,4,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,4,60831598315717.14146161)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,4,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,5,7815858.450391)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,5,7783064.373409)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,5,127888068979.9935054429)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,5,475.66281046305802686061)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,6,7893362.98953026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,6,7705559.83426974)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,6,732381731243.745115764094)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,6,83.05996138436129499606)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,7,-75229023.5881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,7,90827946.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,7,-647577464846017.9715)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,7,-.09393717604145131637)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,8,7874342.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,8,7724580.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,8,584031469984.4839)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,8,104.15808298366741897143)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (4,9,-17127342.633147420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (4,9,32726265.456947420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (4,9,-194415646271340.1815956522980)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (4,9,-.31289456112403769409)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,0,16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,0,16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,1,16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,1,16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,2,-34322095.176906047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,2,34354889.253888047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,2,-563049578578.769242506736077)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,2,-.00047751189505192446)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,3,16401.348491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,3,16392.728491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,3,70671.23589621)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,3,3804.41728329466357308584)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,4,7815858.450391)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,4,-7783064.373409)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,4,127888068979.9935054429)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,4,.00210232958726897192)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,5,32794.076982)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,5,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,5,268862871.275335557081)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,5,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,6,110298.61612126)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,6,-77504.53913926)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,6,1539707782.76899778633766)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,6,.17461941433576102689)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,7,-83012087.961509)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,7,83044882.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,7,-1361421264394.416135)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,7,-.00019748690453643710)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,8,91278.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,8,-58483.961509)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,8,1227826639.244571)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,8,.21897461960978085228)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (5,9,-24910407.006556420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (5,9,24943201.083538420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (5,9,-408725765384.257043660243220)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (5,9,-.00065780749354660427)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,0,93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,0,93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,1,93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,1,93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,2,-34244590.637766787)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,2,34432393.793027307)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,2,-3224438592470.18449811926184222)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,2,-.00273458651128995823)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,3,93905.88763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,3,93897.26763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,3,404715.7995864206)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,3,21786.90896293735498839907)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,4,7893362.98953026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,4,-7705559.83426974)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,4,732381731243.745115764094)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,4,.01203949512295682469)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,5,110298.61612126)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,5,77504.53913926)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,5,1539707782.76899778633766)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,5,5.72674008674192359679)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,6,187803.15526052)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,6,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,6,8817506281.4517452372676676)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,6,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,7,-82934583.42236974)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,7,83122386.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,7,-7796505729750.37795610)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,7,-.00113095617281538980)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,8,168782.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,8,19020.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,8,7031444034.53149906)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,8,1.25401073209839612184)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (6,9,-24832902.467417160)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (6,9,25020705.622677680)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (6,9,-2340666225110.29929521292692920)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (6,9,-.00376709254265256789)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,0,-83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,0,-83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,1,-83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,1,-83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,2,-117366977.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,2,-48689992.784602953)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,2,2851072985828710.485883795)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,2,2.41794207151503385700)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,3,-83028480.69)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,3,-83028489.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,3,-357852770.35)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,3,-19264149.65197215777262180974)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,4,-75229023.5881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,4,-90827946.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,4,-647577464846017.9715)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,4,-10.64541262725136247686)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,5,-83012087.961509)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,5,-83044882.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,5,-1361421264394.416135)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,5,-5063.62688881730941836574)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,6,-82934583.42236974)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,6,-83122386.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,6,-7796505729750.37795610)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,6,-884.20756174009028770294)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,7,-166056970)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,7,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,7,6893729321395225)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#xL as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#xL]
-- !query
INSERT INTO num_exp_div VALUES (7,7,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,8,-82953604)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,8,-83103366)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,8,-6217255985285)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#xL as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#xL]
-- !query
INSERT INTO num_exp_div VALUES (7,8,-1108.80577182462841041118)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (7,9,-107955289.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (7,9,-58101680.954952580)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (7,9,2069634775752159.035758700)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (7,9,3.33089171198810413382)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,0,74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,0,74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,1,74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,1,74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,2,-34263611.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,2,34413373.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,2,-2571300635581.146276407)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,2,-.00218067233500788615)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,3,74885.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,3,74876.69)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,3,322737.11)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,3,17373.78190255220417633410)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,4,7874342.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,4,-7724580.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,4,584031469984.4839)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,4,.00960079113741758956)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,5,91278.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,5,58483.961509)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,5,1227826639.244571)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,5,4.56673929509287019456)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,6,168782.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,6,-19020.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,6,7031444034.53149906)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,6,.79744134113322314424)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,7,-82953604)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,7,83103366)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,7,-6217255985285)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#xL as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#xL]
-- !query
INSERT INTO num_exp_div VALUES (8,7,-.00090187120721280172)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,8,149762)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,8,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,8,5607164161)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#xL as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#xL]
-- !query
INSERT INTO num_exp_div VALUES (8,8,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (8,9,-24851923.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (8,9,25001685.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (8,9,-1866544013697.195857020)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (8,9,-.00300403532938582735)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,0,-24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,0,-24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,1,-24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,1,-24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,2,-59265296.260444467)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,2,9411688.170349627)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,2,855948866655588.453741509242968740)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,2,.72591434384152961526)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,3,-24926799.735047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,3,-24926808.355047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,3,-107434525.43415438020)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,3,-5783481.21694835730858468677)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,4,-17127342.633147420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,4,-32726265.456947420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,4,-194415646271340.1815956522980)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,4,-3.19596478892958416484)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,5,-24910407.006556420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,5,-24943201.083538420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,5,-408725765384.257043660243220)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,5,-1520.20159364322004505807)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,6,-24832902.467417160)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,6,-25020705.622677680)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,6,-2340666225110.29929521292692920)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,6,-265.45671195426965751280)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,7,-107955289.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,7,58101680.954952580)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,7,2069634775752159.035758700)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,7,.30021990699995814689)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,8,-24851923.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,8,-25001685.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,8,-1866544013697.195857020)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,8,-332.88556569820675471748)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_add VALUES (9,9,-49853608.090094840)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_add, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_add], Append, `spark_catalog`.`default`.`num_exp_add`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_add), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sub VALUES (9,9,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sub, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sub], Append, `spark_catalog`.`default`.`num_exp_sub`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sub), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_mul VALUES (9,9,621345559900192.420120630048656400)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_mul, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_mul], Append, `spark_catalog`.`default`.`num_exp_mul`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_mul), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_div VALUES (9,9,1.00000000000000000000)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_div, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_div], Append, `spark_catalog`.`default`.`num_exp_div`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_div), [id1, id2, expected]
+- Project [cast(col1#x as int) AS id1#x, cast(col2#x as int) AS id2#x, cast(col3#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x, col3#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (0,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (1,0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (2,5859.90547836712524903505)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (3,2.07605394920266944396)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (4,2792.75158435189147418923)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (5,128.05092147657509145473)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (6,306.43364311096782703406)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (7,9111.99676251039939975230)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (8,273.64392922189960397542)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_sqrt VALUES (9,4992.67503899937593364766)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt], Append, `spark_catalog`.`default`.`num_exp_sqrt`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_sqrt), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (2,17.35177750493897715514)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (3,1.46093790411565641971)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (4,15.86956523951936572464)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (5,9.70485601768871834038)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (6,11.45000246622944403127)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (7,18.23469429965478772991)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (8,11.22365546576315513668)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_ln VALUES (9,17.03145425013166006962)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_ln], Append, `spark_catalog`.`default`.`num_exp_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (2,7.53578122160797276459)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (3,.63447727016073160075)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (4,6.89206461372691743345)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (5,4.21476541614777768626)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (6,4.97267288886207207671)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (7,7.91922711353275546914)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (8,4.87437163556421004138)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_log10 VALUES (9,7.39666659961986567059)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_log10, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_log10], Append, `spark_catalog`.`default`.`num_exp_log10`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_log10), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (0,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (1,double('NaN'))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (2,224790267919917955.13261618583642653184)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (3,28.90266599445155957393)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (4,7405685069594999.07733999469386277636)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (5,5068226527.32127265408584640098)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (6,281839893606.99372343357047819067)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.42330819910640247627)
-- !query analysis
org.apache.spark.SparkArithmeticException
{
"errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
"sqlState" : "22003",
"messageParameters" : {
"maxPrecision" : "38",
"precision" : "39"
}
}
-- !query
INSERT INTO num_exp_power_10_ln VALUES (8,167361463828.07491320069016125952)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_exp_power_10_ln VALUES (9,107511333880052007.04141124673540337457)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln], Append, `spark_catalog`.`default`.`num_exp_power_10_ln`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_exp_power_10_ln), [id, expected]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS expected#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (0, 0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (1, 0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (2, -34338492.215397047)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (3, 4.31)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (4, 7799461.4119)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (5, 16397.038491)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (6, 93901.57763026)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (7, -83028485)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (8, 74881)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO num_data VALUES (9, -24926804.045047420)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_data, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_data], Append, `spark_catalog`.`default`.`num_data`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_data), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(38,10)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
SELECT * FROM num_data
-- !query analysis
Project [id#x, val#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, t1.val + t2.val
FROM num_data t1, num_data t2
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(id#x as int) AS id2#x, cast((val + val)#x as decimal(38,10)) AS result#x]
+- Project [id#x, id#x, (val#x + val#x) AS (val + val)#x]
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_data
: +- Relation spark_catalog.default.num_data[id#x,val#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.id2, t1.result, t2.expected
FROM num_result t1, num_exp_add t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, id2#x, result#x, expected#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_add
+- Relation spark_catalog.default.num_exp_add[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, round(t1.val + t2.val, 10)
FROM num_data t1, num_data t2
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(id#x as int) AS id2#x, cast(round((val + val), 10)#x as decimal(38,10)) AS result#x]
+- Project [id#x, id#x, round((val#x + val#x), 10) AS round((val + val), 10)#x]
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_data
: +- Relation spark_catalog.default.num_data[id#x,val#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.id2, t1.result, round(t2.expected, 10) as expected
FROM num_result t1, num_exp_add t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != round(t2.expected, 10)
-- !query analysis
Project [id1#x, id2#x, result#x, round(expected#x, 10) AS expected#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = round(expected#x, 10)))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_add
+- Relation spark_catalog.default.num_exp_add[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, t1.val - t2.val
FROM num_data t1, num_data t2
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(id#x as int) AS id2#x, cast((val - val)#x as decimal(38,10)) AS result#x]
+- Project [id#x, id#x, (val#x - val#x) AS (val - val)#x]
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_data
: +- Relation spark_catalog.default.num_data[id#x,val#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.id2, t1.result, t2.expected
FROM num_result t1, num_exp_sub t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, id2#x, result#x, expected#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_sub
+- Relation spark_catalog.default.num_exp_sub[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, round(t1.val - t2.val, 40)
FROM num_data t1, num_data t2
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(id#x as int) AS id2#x, cast(round((val - val), 40)#x as decimal(38,10)) AS result#x]
+- Project [id#x, id#x, round((val#x - val#x), 40) AS round((val - val), 40)#x]
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_data
: +- Relation spark_catalog.default.num_data[id#x,val#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.id2, t1.result, round(t2.expected, 40)
FROM num_result t1, num_exp_sub t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != round(t2.expected, 40)
-- !query analysis
Project [id1#x, id2#x, result#x, round(expected#x, 40) AS round(expected, 40)#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = round(expected#x, 40)))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_sub
+- Relation spark_catalog.default.num_exp_sub[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, t1.val, t2.val, t1.val * t2.val
FROM num_data t1, num_data t2
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "INSERT_COLUMN_ARITY_MISMATCH.TOO_MANY_DATA_COLUMNS",
"sqlState" : "21S01",
"messageParameters" : {
"dataColumns" : "`id`, `id`, `val`, `val`, `(val * val)`",
"tableColumns" : "`id1`, `id2`, `result`",
"tableName" : "`spark_catalog`.`default`.`num_result`"
}
}
-- !query
SELECT t1.id1, t1.id2, t1.result, t2.expected
FROM num_result t1, num_exp_mul t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, id2#x, result#x, expected#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_mul
+- Relation spark_catalog.default.num_exp_mul[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, round(t1.val * t2.val, 30)
FROM num_data t1, num_data t2
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(id#x as int) AS id2#x, cast(round((val * val), 30)#x as decimal(38,10)) AS result#x]
+- Project [id#x, id#x, round((val#x * val#x), 30) AS round((val * val), 30)#x]
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_data
: +- Relation spark_catalog.default.num_data[id#x,val#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.id2, t1.result, round(t2.expected, 30) as expected
FROM num_result t1, num_exp_mul t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != round(t2.expected, 30)
-- !query analysis
Project [id1#x, id2#x, result#x, round(expected#x, 30) AS expected#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = round(expected#x, 30)))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_mul
+- Relation spark_catalog.default.num_exp_mul[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, t1.val / t2.val
FROM num_data t1, num_data t2
WHERE t2.val != '0.0'
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(id#x as int) AS id2#x, cast((val / val)#x as decimal(38,10)) AS result#x]
+- Project [id#x, id#x, (val#x / val#x) AS (val / val)#x]
+- Filter NOT (cast(val#x as double) = cast(0.0 as double))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_data
: +- Relation spark_catalog.default.num_data[id#x,val#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.id2, t1.result, t2.expected
FROM num_result t1, num_exp_div t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, id2#x, result#x, expected#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_div
+- Relation spark_catalog.default.num_exp_div[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT t1.id, t2.id, round(t1.val / t2.val, 80)
FROM num_data t1, num_data t2
WHERE t2.val != '0.0'
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(id#x as int) AS id2#x, cast(round((val / val), 80)#x as decimal(38,10)) AS result#x]
+- Project [id#x, id#x, round((val#x / val#x), 80) AS round((val / val), 80)#x]
+- Filter NOT (cast(val#x as double) = cast(0.0 as double))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_data
: +- Relation spark_catalog.default.num_data[id#x,val#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.id2, t1.result, round(t2.expected, 80) as expected
FROM num_result t1, num_exp_div t2
WHERE t1.id1 = t2.id1 AND t1.id2 = t2.id2
AND t1.result != round(t2.expected, 80)
-- !query analysis
Project [id1#x, id2#x, result#x, round(expected#x, 80) AS expected#x]
+- Filter (((id1#x = id1#x) AND (id2#x = id2#x)) AND NOT (result#x = round(expected#x, 80)))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_div
+- Relation spark_catalog.default.num_exp_div[id1#x,id2#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT id, 0, SQRT(ABS(val))
FROM num_data
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(0#x as int) AS id2#x, cast(SQRT(abs(val))#x as decimal(38,10)) AS result#x]
+- Project [id#x, 0 AS 0#x, SQRT(cast(abs(val#x) as double)) AS SQRT(abs(val))#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.result, t2.expected
FROM num_result t1, num_exp_sqrt t2
WHERE t1.id1 = t2.id
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, result#x, expected#x]
+- Filter ((id1#x = id#x) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_sqrt
+- Relation spark_catalog.default.num_exp_sqrt[id#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT id, 0, LN(ABS(val))
FROM num_data
WHERE val != '0.0'
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(0#x as int) AS id2#x, cast(ln(abs(val))#x as decimal(38,10)) AS result#x]
+- Project [id#x, 0 AS 0#x, ln(cast(abs(val#x) as double)) AS ln(abs(val))#x]
+- Filter NOT (cast(val#x as double) = cast(0.0 as double))
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.result, t2.expected
FROM num_result t1, num_exp_ln t2
WHERE t1.id1 = t2.id
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, result#x, expected#x]
+- Filter ((id1#x = id#x) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_ln
+- Relation spark_catalog.default.num_exp_ln[id#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT id, 0, LOG(cast('10' as decimal(38, 18)), ABS(val))
FROM num_data
WHERE val != '0.0'
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(0#x as int) AS id2#x, cast(LOG(CAST(10 AS DECIMAL(38,18)), abs(val))#x as decimal(38,10)) AS result#x]
+- Project [id#x, 0 AS 0#x, LOG(cast(cast(10 as decimal(38,18)) as double), cast(abs(val#x) as double)) AS LOG(CAST(10 AS DECIMAL(38,18)), abs(val))#x]
+- Filter NOT (cast(val#x as double) = cast(0.0 as double))
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.result, t2.expected
FROM num_result t1, num_exp_log10 t2
WHERE t1.id1 = t2.id
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, result#x, expected#x]
+- Filter ((id1#x = id#x) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_log10
+- Relation spark_catalog.default.num_exp_log10[id#x,expected#x] parquet
-- !query
TRUNCATE TABLE num_result
-- !query analysis
TruncateTableCommand `spark_catalog`.`default`.`num_result`
-- !query
INSERT INTO num_result SELECT id, 0, POWER(cast('10' as decimal(38, 18)), LN(ABS(round(val,200))))
FROM num_data
WHERE val != '0.0'
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_result, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_result], Append, `spark_catalog`.`default`.`num_result`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_result), [id1, id2, result]
+- Project [cast(id#x as int) AS id1#x, cast(0#x as int) AS id2#x, cast(POWER(CAST(10 AS DECIMAL(38,18)), ln(abs(round(val, 200))))#x as decimal(38,10)) AS result#x]
+- Project [id#x, 0 AS 0#x, POWER(cast(cast(10 as decimal(38,18)) as double), ln(cast(abs(round(val#x, 200)) as double))) AS POWER(CAST(10 AS DECIMAL(38,18)), ln(abs(round(val, 200))))#x]
+- Filter NOT (cast(val#x as double) = cast(0.0 as double))
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT t1.id1, t1.result, t2.expected
FROM num_result t1, num_exp_power_10_ln t2
WHERE t1.id1 = t2.id
AND t1.result != t2.expected
-- !query analysis
Project [id1#x, result#x, expected#x]
+- Filter ((id1#x = id#x) AND NOT (result#x = expected#x))
+- Join Inner
:- SubqueryAlias t1
: +- SubqueryAlias spark_catalog.default.num_result
: +- Relation spark_catalog.default.num_result[id1#x,id2#x,result#x] parquet
+- SubqueryAlias t2
+- SubqueryAlias spark_catalog.default.num_exp_power_10_ln
+- Relation spark_catalog.default.num_exp_power_10_ln[id#x,expected#x] parquet
-- !query
SELECT AVG(val) FROM num_data
-- !query analysis
Aggregate [avg(val#x) AS avg(val)#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
CREATE TABLE fract_only (id int, val decimal(4,4)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`fract_only`, false
-- !query
INSERT INTO fract_only VALUES (1, 0.0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/fract_only, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/fract_only], Append, `spark_catalog`.`default`.`fract_only`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/fract_only), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(4,4)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO fract_only VALUES (2, 0.1)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/fract_only, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/fract_only], Append, `spark_catalog`.`default`.`fract_only`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/fract_only), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(4,4)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO fract_only VALUES (4, -0.9999)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/fract_only, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/fract_only], Append, `spark_catalog`.`default`.`fract_only`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/fract_only), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(4,4)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO fract_only VALUES (5, 0.99994)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/fract_only, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/fract_only], Append, `spark_catalog`.`default`.`fract_only`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/fract_only), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(4,4)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO fract_only VALUES (7, 0.00001)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/fract_only, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/fract_only], Append, `spark_catalog`.`default`.`fract_only`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/fract_only), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(4,4)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
INSERT INTO fract_only VALUES (8, 0.00017)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/fract_only, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/fract_only], Append, `spark_catalog`.`default`.`fract_only`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/fract_only), [id, val]
+- Project [cast(col1#x as int) AS id#x, cast(col2#x as decimal(4,4)) AS val#x]
+- LocalRelation [col1#x, col2#x]
-- !query
SELECT * FROM fract_only
-- !query analysis
Project [id#x, val#x]
+- SubqueryAlias spark_catalog.default.fract_only
+- Relation spark_catalog.default.fract_only[id#x,val#x] parquet
-- !query
DROP TABLE fract_only
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.fract_only
-- !query
SELECT decimal(double('NaN'))
-- !query analysis
Project [cast(cast(NaN as double) as decimal(10,0)) AS NaN#x]
+- OneRowRelation
-- !query
SELECT decimal(double('Infinity'))
-- !query analysis
Project [cast(cast(Infinity as double) as decimal(10,0)) AS Infinity#x]
+- OneRowRelation
-- !query
SELECT decimal(double('-Infinity'))
-- !query analysis
Project [cast(cast(-Infinity as double) as decimal(10,0)) AS -Infinity#x]
+- OneRowRelation
-- !query
SELECT decimal(float('NaN'))
-- !query analysis
Project [cast(cast(NaN as float) as decimal(10,0)) AS NaN#x]
+- OneRowRelation
-- !query
SELECT decimal(float('Infinity'))
-- !query analysis
Project [cast(cast(Infinity as float) as decimal(10,0)) AS Infinity#x]
+- OneRowRelation
-- !query
SELECT decimal(float('-Infinity'))
-- !query analysis
Project [cast(cast(-Infinity as float) as decimal(10,0)) AS -Infinity#x]
+- OneRowRelation
-- !query
CREATE TABLE ceil_floor_round (a decimal(38, 18)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`ceil_floor_round`, false
-- !query
INSERT INTO ceil_floor_round VALUES (-5.5)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/ceil_floor_round, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/ceil_floor_round], Append, `spark_catalog`.`default`.`ceil_floor_round`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/ceil_floor_round), [a]
+- Project [cast(col1#x as decimal(38,18)) AS a#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO ceil_floor_round VALUES (-5.499999)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/ceil_floor_round, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/ceil_floor_round], Append, `spark_catalog`.`default`.`ceil_floor_round`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/ceil_floor_round), [a]
+- Project [cast(col1#x as decimal(38,18)) AS a#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO ceil_floor_round VALUES (9.5)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/ceil_floor_round, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/ceil_floor_round], Append, `spark_catalog`.`default`.`ceil_floor_round`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/ceil_floor_round), [a]
+- Project [cast(col1#x as decimal(38,18)) AS a#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO ceil_floor_round VALUES (9.4999999)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/ceil_floor_round, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/ceil_floor_round], Append, `spark_catalog`.`default`.`ceil_floor_round`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/ceil_floor_round), [a]
+- Project [cast(col1#x as decimal(38,18)) AS a#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO ceil_floor_round VALUES (0.0)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/ceil_floor_round, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/ceil_floor_round], Append, `spark_catalog`.`default`.`ceil_floor_round`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/ceil_floor_round), [a]
+- Project [cast(col1#x as decimal(38,18)) AS a#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO ceil_floor_round VALUES (0.0000001)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/ceil_floor_round, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/ceil_floor_round], Append, `spark_catalog`.`default`.`ceil_floor_round`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/ceil_floor_round), [a]
+- Project [cast(col1#x as decimal(38,18)) AS a#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO ceil_floor_round VALUES (-0.000001)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/ceil_floor_round, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/ceil_floor_round], Append, `spark_catalog`.`default`.`ceil_floor_round`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/ceil_floor_round), [a]
+- Project [cast(col1#x as decimal(38,18)) AS a#x]
+- LocalRelation [col1#x]
-- !query
SELECT a, ceil(a), ceiling(a), floor(a), round(a) FROM ceil_floor_round
-- !query analysis
Project [a#x, CEIL(a#x) AS CEIL(a)#x, ceiling(a#x) AS ceiling(a)#x, FLOOR(a#x) AS FLOOR(a)#x, round(a#x, 0) AS round(a, 0)#x]
+- SubqueryAlias spark_catalog.default.ceil_floor_round
+- Relation spark_catalog.default.ceil_floor_round[a#x] parquet
-- !query
DROP TABLE ceil_floor_round
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.ceil_floor_round
-- !query
SELECT width_bucket(5.0, 3.0, 4.0, 0)
-- !query analysis
Project [width_bucket(cast(5.0 as double), cast(3.0 as double), cast(4.0 as double), cast(0 as bigint)) AS width_bucket(5.0, 3.0, 4.0, 0)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(5.0, 3.0, 4.0, -5)
-- !query analysis
Project [width_bucket(cast(5.0 as double), cast(3.0 as double), cast(4.0 as double), cast(-5 as bigint)) AS width_bucket(5.0, 3.0, 4.0, -5)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(3.5, 3.0, 3.0, 888)
-- !query analysis
Project [width_bucket(cast(3.5 as double), cast(3.0 as double), cast(3.0 as double), cast(888 as bigint)) AS width_bucket(3.5, 3.0, 3.0, 888)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(double(5.0), double(3.0), double(4.0), 0)
-- !query analysis
Project [width_bucket(cast(5.0 as double), cast(3.0 as double), cast(4.0 as double), cast(0 as bigint)) AS width_bucket(5.0, 3.0, 4.0, 0)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(double(5.0), double(3.0), double(4.0), -5)
-- !query analysis
Project [width_bucket(cast(5.0 as double), cast(3.0 as double), cast(4.0 as double), cast(-5 as bigint)) AS width_bucket(5.0, 3.0, 4.0, -5)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(double(3.5), double(3.0), double(3.0), 888)
-- !query analysis
Project [width_bucket(cast(3.5 as double), cast(3.0 as double), cast(3.0 as double), cast(888 as bigint)) AS width_bucket(3.5, 3.0, 3.0, 888)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket('NaN', 3.0, 4.0, 888)
-- !query analysis
Project [width_bucket(cast(NaN as double), cast(3.0 as double), cast(4.0 as double), cast(888 as bigint)) AS width_bucket(NaN, 3.0, 4.0, 888)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(double(0), 'NaN', double(4.0), 888)
-- !query analysis
Project [width_bucket(cast(0 as double), cast(NaN as double), cast(4.0 as double), cast(888 as bigint)) AS width_bucket(0, NaN, 4.0, 888)#xL]
+- OneRowRelation
-- !query
CREATE TABLE width_bucket_test (operand_num decimal(30,15), operand_f8 double) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`width_bucket_test`, false
-- !query
INSERT INTO width_bucket_test VALUES
(-5.2, -5.2),
(-0.0000000001, -0.0000000001),
(0.000000000001, 0.000000000001),
(1, 1),
(1.99999999999999, 1.99999999999999),
(2, 2),
(2.00000000000001, 2.00000000000001),
(3, 3),
(4, 4),
(4.5, 4.5),
(5, 5),
(5.5, 5.5),
(6, 6),
(7, 7),
(8, 8),
(9, 9),
(9.99999999999999, 9.99999999999999),
(10, 10),
(10.0000000000001, 10.0000000000001)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/width_bucket_test, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/width_bucket_test], Append, `spark_catalog`.`default`.`width_bucket_test`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/width_bucket_test), [operand_num, operand_f8]
+- Project [cast(col1#x as decimal(30,15)) AS operand_num#x, cast(col2#x as double) AS operand_f8#x]
+- LocalRelation [col1#x, col2#x]
-- !query
SELECT
operand_num,
width_bucket(operand_num, 0, 10, 5) AS wb_1,
width_bucket(operand_f8, 0, 10, 5) AS wb_1f,
width_bucket(operand_num, 10, 0, 5) AS wb_2,
width_bucket(operand_f8, 10, 0, 5) AS wb_2f,
width_bucket(operand_num, 2, 8, 4) AS wb_3,
width_bucket(operand_f8, 2, 8, 4) AS wb_3f,
width_bucket(operand_num, 5.0, 5.5, 20) AS wb_4,
width_bucket(operand_f8, 5.0, 5.5, 20) AS wb_4f,
width_bucket(operand_num, -25, 25, 10) AS wb_5,
width_bucket(operand_f8, -25, 25, 10) AS wb_5f
FROM width_bucket_test
ORDER BY operand_num ASC
-- !query analysis
Sort [operand_num#x ASC NULLS FIRST], true
+- Project [operand_num#x, width_bucket(cast(operand_num#x as double), cast(0 as double), cast(10 as double), cast(5 as bigint)) AS wb_1#xL, width_bucket(operand_f8#x, cast(0 as double), cast(10 as double), cast(5 as bigint)) AS wb_1f#xL, width_bucket(cast(operand_num#x as double), cast(10 as double), cast(0 as double), cast(5 as bigint)) AS wb_2#xL, width_bucket(operand_f8#x, cast(10 as double), cast(0 as double), cast(5 as bigint)) AS wb_2f#xL, width_bucket(cast(operand_num#x as double), cast(2 as double), cast(8 as double), cast(4 as bigint)) AS wb_3#xL, width_bucket(operand_f8#x, cast(2 as double), cast(8 as double), cast(4 as bigint)) AS wb_3f#xL, width_bucket(cast(operand_num#x as double), cast(5.0 as double), cast(5.5 as double), cast(20 as bigint)) AS wb_4#xL, width_bucket(operand_f8#x, cast(5.0 as double), cast(5.5 as double), cast(20 as bigint)) AS wb_4f#xL, width_bucket(cast(operand_num#x as double), cast(-25 as double), cast(25 as double), cast(10 as bigint)) AS wb_5#xL, width_bucket(operand_f8#x, cast(-25 as double), cast(25 as double), cast(10 as bigint)) AS wb_5f#xL]
+- SubqueryAlias spark_catalog.default.width_bucket_test
+- Relation spark_catalog.default.width_bucket_test[operand_num#x,operand_f8#x] parquet
-- !query
SELECT width_bucket(double(0.0), double('Infinity'), 5, 10)
-- !query analysis
Project [width_bucket(cast(0.0 as double), cast(Infinity as double), cast(5 as double), cast(10 as bigint)) AS width_bucket(0.0, Infinity, 5, 10)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(double(0.0), 5, double('-Infinity'), 20)
-- !query analysis
Project [width_bucket(cast(0.0 as double), cast(5 as double), cast(-Infinity as double), cast(20 as bigint)) AS width_bucket(0.0, 5, -Infinity, 20)#xL]
+- OneRowRelation
-- !query
SELECT width_bucket(double('Infinity'), 1, 10, 10),
width_bucket(double('-Infinity'), 1, 10, 10)
-- !query analysis
Project [width_bucket(cast(Infinity as double), cast(1 as double), cast(10 as double), cast(10 as bigint)) AS width_bucket(Infinity, 1, 10, 10)#xL, width_bucket(cast(-Infinity as double), cast(1 as double), cast(10 as double), cast(10 as bigint)) AS width_bucket(-Infinity, 1, 10, 10)#xL]
+- OneRowRelation
-- !query
DROP TABLE width_bucket_test
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.width_bucket_test
-- !query
SELECT '' AS to_char_3, to_char(val, '9999999999999999.999999999999999PR'), val
FROM num_data
-- !query analysis
Project [ AS to_char_3#x, to_char(val#x, 9999999999999999.999999999999999PR) AS to_char(val, 9999999999999999.999999999999999PR)#x, val#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT '' AS to_char_4, to_char(val, '9999999999999999.999999999999999S'), val
FROM num_data
-- !query analysis
Project [ AS to_char_4#x, to_char(val#x, 9999999999999999.999999999999999S) AS to_char(val, 9999999999999999.999999999999999S)#x, val#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT '' AS to_char_5, to_char(val, 'MI9999999999999999.999999999999999'), val FROM num_data
-- !query analysis
Project [ AS to_char_5#x, to_char(val#x, MI9999999999999999.999999999999999) AS to_char(val, MI9999999999999999.999999999999999)#x, val#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT '' AS to_char_9, to_char(val, '0999999999999999.999999999999999'), val FROM num_data
-- !query analysis
Project [ AS to_char_9#x, to_char(val#x, 0999999999999999.999999999999999) AS to_char(val, 0999999999999999.999999999999999)#x, val#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT '' AS to_char_10, to_char(val, 'S0999999999999999.999999999999999'), val FROM num_data
-- !query analysis
Project [ AS to_char_10#x, to_char(val#x, S0999999999999999.999999999999999) AS to_char(val, S0999999999999999.999999999999999)#x, val#x]
+- SubqueryAlias spark_catalog.default.num_data
+- Relation spark_catalog.default.num_data[id#x,val#x] parquet
-- !query
SELECT '' AS to_number_1, to_number('-34,338,492', '99G999G999')
-- !query analysis
Project [ AS to_number_1#x, to_number(-34,338,492, 99G999G999) AS to_number(-34,338,492, 99G999G999)#x]
+- OneRowRelation
-- !query
SELECT '' AS to_number_2, to_number('-34,338,492.654,878', '99G999G999D999G999')
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "INVALID_FORMAT.THOUSANDS_SEPS_MUST_BEFORE_DEC",
"sqlState" : "42601",
"messageParameters" : {
"format" : "'99G999G999D999G999'"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 28,
"stopIndex" : 81,
"fragment" : "to_number('-34,338,492.654,878', '99G999G999D999G999')"
} ]
}
-- !query
SELECT '' AS to_number_4, to_number('0.00001-', '9.999999S')
-- !query analysis
Project [ AS to_number_4#x, to_number(0.00001-, 9.999999S) AS to_number(0.00001-, 9.999999S)#x]
+- OneRowRelation
-- !query
SELECT '' AS to_number_9, to_number('.0', '99999999.99999999')
-- !query analysis
Project [ AS to_number_9#x, to_number(.0, 99999999.99999999) AS to_number(.0, 99999999.99999999)#x]
+- OneRowRelation
-- !query
SELECT '' AS to_number_10, to_number('0', '99.99')
-- !query analysis
Project [ AS to_number_10#x, to_number(0, 99.99) AS to_number(0, 99.99)#x]
+- OneRowRelation
-- !query
SELECT '' AS to_number_12, to_number('.01-', '99.99S')
-- !query analysis
Project [ AS to_number_12#x, to_number(.01-, 99.99S) AS to_number(.01-, 99.99S)#x]
+- OneRowRelation
-- !query
SELECT '' AS to_number_14, to_number('34,50','999,99')
-- !query analysis
Project [ AS to_number_14#x, to_number(34,50, 999,99) AS to_number(34,50, 999,99)#x]
+- OneRowRelation
-- !query
SELECT '' AS to_number_15, to_number('123,000','999G')
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "INVALID_FORMAT.CONT_THOUSANDS_SEPS",
"sqlState" : "42601",
"messageParameters" : {
"format" : "'999G'"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 28,
"stopIndex" : 54,
"fragment" : "to_number('123,000','999G')"
} ]
}
-- !query
SELECT '' AS to_number_16, to_number('123456','999G999')
-- !query analysis
Project [ AS to_number_16#x, to_number(123456, 999G999) AS to_number(123456, 999G999)#x]
+- OneRowRelation
-- !query
CREATE TABLE num_input_test (n1 decimal(38, 18)) USING parquet
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`num_input_test`, false
-- !query
INSERT INTO num_input_test VALUES (double(trim(' 123')))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_input_test, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_input_test], Append, `spark_catalog`.`default`.`num_input_test`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_input_test), [n1]
+- Project [cast(col1#x as decimal(38,18)) AS n1#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO num_input_test VALUES (double(trim(' 3245874 ')))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_input_test, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_input_test], Append, `spark_catalog`.`default`.`num_input_test`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_input_test), [n1]
+- Project [cast(col1#x as decimal(38,18)) AS n1#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO num_input_test VALUES (double(trim(' -93853')))
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_input_test, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_input_test], Append, `spark_catalog`.`default`.`num_input_test`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_input_test), [n1]
+- Project [cast(col1#x as decimal(38,18)) AS n1#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO num_input_test VALUES (555.50)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_input_test, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_input_test], Append, `spark_catalog`.`default`.`num_input_test`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_input_test), [n1]
+- Project [cast(col1#x as decimal(38,18)) AS n1#x]
+- LocalRelation [col1#x]
-- !query
INSERT INTO num_input_test VALUES (-555.50)
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in comparison]/{warehouse_dir}/num_input_test, false, Parquet, [path=file:[not included in comparison]/{warehouse_dir}/num_input_test], Append, `spark_catalog`.`default`.`num_input_test`, org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included in comparison]/{warehouse_dir}/num_input_test), [n1]
+- Project [cast(col1#x as decimal(38,18)) AS n1#x]
+- LocalRelation [col1#x]
-- !query
SELECT * FROM num_input_test
-- !query analysis
Project [n1#x]
+- SubqueryAlias spark_catalog.default.num_input_test
+- Relation spark_catalog.default.num_input_test[n1#x] parquet
-- !query
select cast(999999999999999999999 as decimal(38, 0))/1000000000000000000000
-- !query analysis
Project [(cast(999999999999999999999 as decimal(38,0)) / 1000000000000000000000) AS (CAST(999999999999999999999 AS DECIMAL(38,0)) / 1000000000000000000000)#x]
+- OneRowRelation
-- !query
select div(cast(999999999999999999999 as decimal(38, 0)),1000000000000000000000)
-- !query analysis
Project [(cast(999999999999999999999 as decimal(38,0)) div 1000000000000000000000) AS (CAST(999999999999999999999 AS DECIMAL(38,0)) div 1000000000000000000000)#xL]
+- OneRowRelation
-- !query
select mod(cast(999999999999999999999 as decimal(38, 0)),1000000000000000000000)
-- !query analysis
Project [mod(cast(999999999999999999999 as decimal(38,0)), 1000000000000000000000) AS mod(CAST(999999999999999999999 AS DECIMAL(38,0)), 1000000000000000000000)#x]
+- OneRowRelation
-- !query
select div(cast(-9999999999999999999999 as decimal(38, 0)),1000000000000000000000)
-- !query analysis
Project [(cast(-9999999999999999999999 as decimal(38,0)) div 1000000000000000000000) AS (CAST(-9999999999999999999999 AS DECIMAL(38,0)) div 1000000000000000000000)#xL]
+- OneRowRelation
-- !query
select mod(cast(-9999999999999999999999 as decimal(38, 0)),1000000000000000000000)
-- !query analysis
Project [mod(cast(-9999999999999999999999 as decimal(38,0)), 1000000000000000000000) AS mod(CAST(-9999999999999999999999 AS DECIMAL(38,0)), 1000000000000000000000)#x]
+- OneRowRelation
-- !query
select div(cast(-9999999999999999999999 as decimal(38, 0)),1000000000000000000000)*1000000000000000000000 + mod(cast(-9999999999999999999999 as decimal(38, 0)),1000000000000000000000)
-- !query analysis
Project [((cast((cast(-9999999999999999999999 as decimal(38,0)) div 1000000000000000000000) as decimal(20,0)) * 1000000000000000000000) + mod(cast(-9999999999999999999999 as decimal(38,0)), 1000000000000000000000)) AS (((CAST(-9999999999999999999999 AS DECIMAL(38,0)) div 1000000000000000000000) * 1000000000000000000000) + mod(CAST(-9999999999999999999999 AS DECIMAL(38,0)), 1000000000000000000000))#x]
+- OneRowRelation
-- !query
select mod (70.0,70)
-- !query analysis
Project [mod(70.0, cast(70 as decimal(2,0))) AS mod(70.0, 70)#x]
+- OneRowRelation
-- !query
select div (70.0,70)
-- !query analysis
Project [(70.0 div cast(70 as decimal(2,0))) AS (70.0 div 70)#xL]
+- OneRowRelation
-- !query
select 70.0 / 70
-- !query analysis
Project [(70.0 / cast(70 as decimal(2,0))) AS (70.0 / 70)#x]
+- OneRowRelation
-- !query
select 12345678901234567890 % 123
-- !query analysis
Project [(12345678901234567890 % cast(123 as decimal(3,0))) AS (12345678901234567890 % 123)#x]
+- OneRowRelation
-- !query
select exp(0.0)
-- !query analysis
Project [EXP(cast(0.0 as double)) AS EXP(0.0)#x]
+- OneRowRelation
-- !query
select exp(1.0)
-- !query analysis
Project [EXP(cast(1.0 as double)) AS EXP(1.0)#x]
+- OneRowRelation
-- !query
select exp(32.999)
-- !query analysis
Project [EXP(cast(32.999 as double)) AS EXP(32.999)#x]
+- OneRowRelation
-- !query
select exp(-32.999)
-- !query analysis
Project [EXP(cast(-32.999 as double)) AS EXP(-32.999)#x]
+- OneRowRelation
-- !query
select exp(123.456)
-- !query analysis
Project [EXP(cast(123.456 as double)) AS EXP(123.456)#x]
+- OneRowRelation
-- !query
select exp(-123.456)
-- !query analysis
Project [EXP(cast(-123.456 as double)) AS EXP(-123.456)#x]
+- OneRowRelation
-- !query
select exp(1234.5678)
-- !query analysis
Project [EXP(cast(1234.5678 as double)) AS EXP(1234.5678)#x]
+- OneRowRelation
-- !query
select * from range(cast(0.0 as decimal(38, 18)), cast(4.0 as decimal(38, 18)))
-- !query analysis
Project [id#xL]
+- Range (0, 4, step=1)
-- !query
select * from range(cast(0.1 as decimal(38, 18)), cast(4.0 as decimal(38, 18)), cast(1.3 as decimal(38, 18)))
-- !query analysis
Project [id#xL]
+- Range (0, 4, step=1)
-- !query
select * from range(cast(4.0 as decimal(38, 18)), cast(-1.5 as decimal(38, 18)), cast(-2.2 as decimal(38, 18)))
-- !query analysis
Project [id#xL]
+- Range (4, -1, step=-2)
-- !query
select ln(1.2345678e-28)
-- !query analysis
Project [ln(1.2345678E-28) AS ln(1.2345678E-28)#x]
+- OneRowRelation
-- !query
select ln(0.0456789)
-- !query analysis
Project [ln(cast(0.0456789 as double)) AS ln(0.0456789)#x]
+- OneRowRelation
-- !query
select ln(0.99949452)
-- !query analysis
Project [ln(cast(0.99949452 as double)) AS ln(0.99949452)#x]
+- OneRowRelation
-- !query
select ln(1.00049687395)
-- !query analysis
Project [ln(cast(1.00049687395 as double)) AS ln(1.00049687395)#x]
+- OneRowRelation
-- !query
select ln(1234.567890123456789)
-- !query analysis
Project [ln(cast(1234.567890123456789 as double)) AS ln(1234.567890123456789)#x]
+- OneRowRelation
-- !query
select ln(5.80397490724e5)
-- !query analysis
Project [ln(580397.490724) AS ln(580397.490724)#x]
+- OneRowRelation
-- !query
select ln(9.342536355e34)
-- !query analysis
Project [ln(9.342536355E34) AS ln(9.342536355E34)#x]
+- OneRowRelation
-- !query
select log(3.4634998359873254962349856073435545)
-- !query analysis
Project [LOG(E(), cast(3.4634998359873254962349856073435545 as double)) AS LOG(E(), 3.4634998359873254962349856073435545)#x]
+- OneRowRelation
-- !query
select log(9.999999999999999999)
-- !query analysis
Project [LOG(E(), cast(9.999999999999999999 as double)) AS LOG(E(), 9.999999999999999999)#x]
+- OneRowRelation
-- !query
select log(10.00000000000000000)
-- !query analysis
Project [LOG(E(), cast(10.00000000000000000 as double)) AS LOG(E(), 10.00000000000000000)#x]
+- OneRowRelation
-- !query
select log(10.00000000000000001)
-- !query analysis
Project [LOG(E(), cast(10.00000000000000001 as double)) AS LOG(E(), 10.00000000000000001)#x]
+- OneRowRelation
-- !query
select log(590489.45235237)
-- !query analysis
Project [LOG(E(), cast(590489.45235237 as double)) AS LOG(E(), 590489.45235237)#x]
+- OneRowRelation
-- !query
select log(0.99923, 4.58934e34)
-- !query analysis
Project [LOG(cast(0.99923 as double), 4.58934E34) AS LOG(0.99923, 4.58934E34)#x]
+- OneRowRelation
-- !query
select log(1.000016, 8.452010e18)
-- !query analysis
Project [LOG(cast(1.000016 as double), 8.45201E18) AS LOG(1.000016, 8.45201E18)#x]
+- OneRowRelation
-- !query
SELECT SUM(decimal(9999)) FROM range(1, 100001)
-- !query analysis
Aggregate [sum(cast(9999 as decimal(10,0))) AS sum(9999)#x]
+- Range (1, 100001, step=1)
-- !query
SELECT SUM(decimal(-9999)) FROM range(1, 100001)
-- !query analysis
Aggregate [sum(cast(-9999 as decimal(10,0))) AS sum(-9999)#x]
+- Range (1, 100001, step=1)
-- !query
DROP TABLE num_data
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_data
-- !query
DROP TABLE num_exp_add
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_add
-- !query
DROP TABLE num_exp_sub
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_sub
-- !query
DROP TABLE num_exp_div
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_div
-- !query
DROP TABLE num_exp_mul
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_mul
-- !query
DROP TABLE num_exp_sqrt
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_sqrt
-- !query
DROP TABLE num_exp_ln
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_ln
-- !query
DROP TABLE num_exp_log10
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_log10
-- !query
DROP TABLE num_exp_power_10_ln
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_exp_power_10_ln
-- !query
DROP TABLE num_result
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_result
-- !query
DROP TABLE num_input_test
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.num_input_test