blob: b3a4992719c3df32aa38560a4f9190391723da78 [file] [log] [blame]
PREHOOK: query: explain
create table summary as select *, key + 1, concat(value, value) from src limit 20
PREHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: query: explain
create table summary as select *, key + 1, concat(value, value) from src limit 20
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 depends on stages: Stage-1
Stage-3 depends on stages: Stage-0
Stage-2 depends on stages: Stage-3
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string), (UDFToDouble(key) + 1.0D) (type: double), concat(value, value) (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 20
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
sort order:
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
TopN Hash Memory Usage: 0.1
value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double), _col3 (type: string)
Execution mode: vectorized
Reduce Operator Tree:
Select Operator
expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: double), VALUE._col3 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 20
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.summary
Stage: Stage-0
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-3
Create Table Operator:
Create Table
columns: key string, value string, _c1 double, _c2 string
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.summary
Stage: Stage-2
Stats Work
Basic Stats Work:
PREHOOK: query: create table summary as select *, key + 1, concat(value, value) from src limit 20
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src
PREHOOK: Output: database:default
PREHOOK: Output: default@summary
POSTHOOK: query: create table summary as select *, key + 1, concat(value, value) from src limit 20
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src
POSTHOOK: Output: database:default
POSTHOOK: Output: default@summary
POSTHOOK: Lineage: summary._c1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: summary._c2 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: summary.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: summary.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: describe formatted summary
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@summary
POSTHOOK: query: describe formatted summary
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@summary
# col_name data_type comment
key string
value string
_c1 double
_c2 string
# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
bucketing_version 2
numFiles 1
numRows 20
rawDataSize 620
totalSize 640
#### A masked pattern was here ####
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
PREHOOK: query: select * from summary
PREHOOK: type: QUERY
PREHOOK: Input: default@summary
#### A masked pattern was here ####
POSTHOOK: query: select * from summary
POSTHOOK: type: QUERY
POSTHOOK: Input: default@summary
#### A masked pattern was here ####
128 val_128 129.0 val_128val_128
150 val_150 151.0 val_150val_150
165 val_165 166.0 val_165val_165
193 val_193 194.0 val_193val_193
213 val_213 214.0 val_213val_213
224 val_224 225.0 val_224val_224
238 val_238 239.0 val_238val_238
255 val_255 256.0 val_255val_255
265 val_265 266.0 val_265val_265
27 val_27 28.0 val_27val_27
273 val_273 274.0 val_273val_273
278 val_278 279.0 val_278val_278
311 val_311 312.0 val_311val_311
369 val_369 370.0 val_369val_369
401 val_401 402.0 val_401val_401
409 val_409 410.0 val_409val_409
484 val_484 485.0 val_484val_484
66 val_66 67.0 val_66val_66
86 val_86 87.0 val_86val_86
98 val_98 99.0 val_98val_98
PREHOOK: query: explain
create table x4 as select *, rank() over(partition by key order by value) as rr from src1
PREHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: query: explain
create table x4 as select *, rank() over(partition by key order by value) as rr from src1
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 depends on stages: Stage-1
Stage-3 depends on stages: Stage-0
Stage-2 depends on stages: Stage-3
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src1
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: key (type: string), value (type: string)
sort order: ++
Map-reduce partition columns: key (type: string)
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
Execution mode: vectorized
Reduce Operator Tree:
Select Operator
expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
PTF Operator
Function definitions:
Input definition
input alias: ptf_0
output shape: _col0: string, _col1: string
type: WINDOWING
Windowing table definition
input alias: ptf_1
name: windowingtablefunction
order by: _col1 ASC NULLS FIRST
partition by: _col0
raw input shape:
window functions:
window function definition
alias: rank_window_0
arguments: _col1
name: rank
window function: GenericUDAFRankEvaluator
window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
isPivotResult: true
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), rank_window_0 (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x4
Stage: Stage-0
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-3
Create Table Operator:
Create Table
columns: key string, value string, rr int
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x4
Stage: Stage-2
Stats Work
Basic Stats Work:
PREHOOK: query: create table x4 as select *, rank() over(partition by key order by value) as rr from src1
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src1
PREHOOK: Output: database:default
PREHOOK: Output: default@x4
POSTHOOK: query: create table x4 as select *, rank() over(partition by key order by value) as rr from src1
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src1
POSTHOOK: Output: database:default
POSTHOOK: Output: default@x4
POSTHOOK: Lineage: x4.key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: x4.rr SCRIPT [(src1)src1.FieldSchema(name:key, type:string, comment:default), (src1)src1.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: x4.value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: describe formatted x4
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@x4
POSTHOOK: query: describe formatted x4
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@x4
# col_name data_type comment
key string
value string
rr int
# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
bucketing_version 2
numFiles 1
numRows 25
rawDataSize 242
totalSize 267
#### A masked pattern was here ####
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
PREHOOK: query: select * from x4
PREHOOK: type: QUERY
PREHOOK: Input: default@x4
#### A masked pattern was here ####
POSTHOOK: query: select * from x4
POSTHOOK: type: QUERY
POSTHOOK: Input: default@x4
#### A masked pattern was here ####
1
1
1
1
val_165 5
val_193 6
val_265 7
val_27 8
val_409 9
val_484 10
128 1
146 val_146 1
150 val_150 1
213 val_213 1
224 1
238 val_238 1
255 val_255 1
273 val_273 1
278 val_278 1
311 val_311 1
369 1
401 val_401 1
406 val_406 1
66 val_66 1
98 val_98 1
PREHOOK: query: explain
create table x5 as select *, lead(key,1) over(partition by key order by value) as lead1 from src limit 20
PREHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: query: explain
create table x5 as select *, lead(key,1) over(partition by key order by value) as lead1 from src limit 20
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-2 depends on stages: Stage-1
Stage-0 depends on stages: Stage-2
Stage-4 depends on stages: Stage-0
Stage-3 depends on stages: Stage-4
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: key (type: string), value (type: string)
sort order: ++
Map-reduce partition columns: key (type: string)
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Execution mode: vectorized
Reduce Operator Tree:
Select Operator
expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
PTF Operator
Function definitions:
Input definition
input alias: ptf_0
output shape: _col0: string, _col1: string
type: WINDOWING
Windowing table definition
input alias: ptf_1
name: windowingtablefunction
order by: _col1 ASC NULLS FIRST
partition by: _col0
raw input shape:
window functions:
window function definition
alias: lead_window_0
arguments: _col0, 1
name: lead
window function: GenericUDAFLeadEvaluator
window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
isPivotResult: true
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), lead_window_0 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 20
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-2
Map Reduce
Map Operator Tree:
TableScan
Reduce Output Operator
sort order:
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
TopN Hash Memory Usage: 0.1
value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
Execution mode: vectorized
Reduce Operator Tree:
Select Operator
expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 20
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x5
Stage: Stage-0
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-4
Create Table Operator:
Create Table
columns: key string, value string, lead1 string
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x5
Stage: Stage-3
Stats Work
Basic Stats Work:
PREHOOK: query: create table x5 as select *, lead(key,1) over(partition by key order by value) as lead1 from src limit 20
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src
PREHOOK: Output: database:default
PREHOOK: Output: default@x5
POSTHOOK: query: create table x5 as select *, lead(key,1) over(partition by key order by value) as lead1 from src limit 20
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src
POSTHOOK: Output: database:default
POSTHOOK: Output: default@x5
POSTHOOK: Lineage: x5.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: x5.lead1 SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: x5.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: describe formatted x5
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@x5
POSTHOOK: query: describe formatted x5
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@x5
# col_name data_type comment
key string
value string
lead1 string
# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
bucketing_version 2
numFiles 1
numRows 20
rawDataSize 268
totalSize 288
#### A masked pattern was here ####
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
PREHOOK: query: select * from x5
PREHOOK: type: QUERY
PREHOOK: Input: default@x5
#### A masked pattern was here ####
POSTHOOK: query: select * from x5
POSTHOOK: type: QUERY
POSTHOOK: Input: default@x5
#### A masked pattern was here ####
0 val_0 0
0 val_0 0
0 val_0 NULL
10 val_10 NULL
100 val_100 100
100 val_100 NULL
103 val_103 103
103 val_103 NULL
104 val_104 104
104 val_104 NULL
105 val_105 NULL
11 val_11 NULL
111 val_111 NULL
113 val_113 113
113 val_113 NULL
114 val_114 NULL
116 val_116 NULL
118 val_118 118
118 val_118 NULL
119 val_119 119
PREHOOK: query: explain
create table x6 as select * from (select *, key + 1 from src1) a
PREHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: query: explain
create table x6 as select * from (select *, key + 1 from src1) a
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
Stage-4
Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
Stage-8 depends on stages: Stage-0
Stage-2 depends on stages: Stage-8
Stage-3
Stage-5
Stage-6 depends on stages: Stage-5
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src1
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string), (UDFToDouble(key) + 1.0D) (type: double)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x6
Execution mode: vectorized
Stage: Stage-7
Conditional Operator
Stage: Stage-4
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-0
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-8
Create Table Operator:
Create Table
columns: key string, value string, _c1 double
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x6
Stage: Stage-2
Stats Work
Basic Stats Work:
Stage: Stage-3
Map Reduce
Map Operator Tree:
TableScan
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x6
Stage: Stage-5
Map Reduce
Map Operator Tree:
TableScan
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x6
Stage: Stage-6
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
PREHOOK: query: create table x6 as select * from (select *, key + 1 from src1) a
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src1
PREHOOK: Output: database:default
PREHOOK: Output: default@x6
POSTHOOK: query: create table x6 as select * from (select *, key + 1 from src1) a
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src1
POSTHOOK: Output: database:default
POSTHOOK: Output: default@x6
POSTHOOK: Lineage: x6._c1 EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: x6.key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: x6.value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: describe formatted x6
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@x6
POSTHOOK: query: describe formatted x6
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@x6
# col_name data_type comment
key string
value string
_c1 double
# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
bucketing_version 2
numFiles 1
numRows 25
rawDataSize 309
totalSize 334
#### A masked pattern was here ####
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
PREHOOK: query: select * from x6
PREHOOK: type: QUERY
PREHOOK: Input: default@x6
#### A masked pattern was here ####
POSTHOOK: query: select * from x6
POSTHOOK: type: QUERY
POSTHOOK: Input: default@x6
#### A masked pattern was here ####
NULL
NULL
NULL
NULL
val_165 NULL
val_193 NULL
val_265 NULL
val_27 NULL
val_409 NULL
val_484 NULL
128 129.0
146 val_146 147.0
150 val_150 151.0
213 val_213 214.0
224 225.0
238 val_238 239.0
255 val_255 256.0
273 val_273 274.0
278 val_278 279.0
311 val_311 312.0
369 370.0
401 val_401 402.0
406 val_406 407.0
66 val_66 67.0
98 val_98 99.0
PREHOOK: query: explain
create table x7 as select * from (select *, count(value) from src group by key, value) a
PREHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: query: explain
create table x7 as select * from (select *, count(value) from src group by key, value) a
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 depends on stages: Stage-1
Stage-3 depends on stages: Stage-0
Stage-2 depends on stages: Stage-3
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string)
outputColumnNames: key, value
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(value)
keys: key (type: string), value (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: bigint)
Execution mode: vectorized
Reduce Operator Tree:
Group By Operator
aggregations: count(VALUE._col0)
keys: KEY._col0 (type: string), KEY._col1 (type: string)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x7
Stage: Stage-0
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-3
Create Table Operator:
Create Table
columns: _col0 string, _col1 string, _c1 bigint
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x7
Stage: Stage-2
Stats Work
Basic Stats Work:
PREHOOK: query: create table x7 as select * from (select *, count(value) from src group by key, value) a
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src
PREHOOK: Output: database:default
PREHOOK: Output: default@x7
POSTHOOK: query: create table x7 as select * from (select *, count(value) from src group by key, value) a
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src
POSTHOOK: Output: database:default
POSTHOOK: Output: default@x7
POSTHOOK: Lineage: x7._c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: x7._col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: x7._col1 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: describe formatted x7
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@x7
POSTHOOK: query: describe formatted x7
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@x7
# col_name data_type comment
_col0 string
_col1 string
_c1 bigint
# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
bucketing_version 2
numFiles 1
numRows 309
rawDataSize 3891
totalSize 4200
#### A masked pattern was here ####
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
PREHOOK: query: select * from x7
PREHOOK: type: QUERY
PREHOOK: Input: default@x7
#### A masked pattern was here ####
POSTHOOK: query: select * from x7
POSTHOOK: type: QUERY
POSTHOOK: Input: default@x7
#### A masked pattern was here ####
0 val_0 3
10 val_10 1
100 val_100 2
103 val_103 2
104 val_104 2
105 val_105 1
11 val_11 1
111 val_111 1
113 val_113 2
114 val_114 1
116 val_116 1
118 val_118 2
119 val_119 3
12 val_12 2
120 val_120 2
125 val_125 2
126 val_126 1
128 val_128 3
129 val_129 2
131 val_131 1
133 val_133 1
134 val_134 2
136 val_136 1
137 val_137 2
138 val_138 4
143 val_143 1
145 val_145 1
146 val_146 2
149 val_149 2
15 val_15 2
150 val_150 1
152 val_152 2
153 val_153 1
155 val_155 1
156 val_156 1
157 val_157 1
158 val_158 1
160 val_160 1
162 val_162 1
163 val_163 1
164 val_164 2
165 val_165 2
166 val_166 1
167 val_167 3
168 val_168 1
169 val_169 4
17 val_17 1
170 val_170 1
172 val_172 2
174 val_174 2
175 val_175 2
176 val_176 2
177 val_177 1
178 val_178 1
179 val_179 2
18 val_18 2
180 val_180 1
181 val_181 1
183 val_183 1
186 val_186 1
187 val_187 3
189 val_189 1
19 val_19 1
190 val_190 1
191 val_191 2
192 val_192 1
193 val_193 3
194 val_194 1
195 val_195 2
196 val_196 1
197 val_197 2
199 val_199 3
2 val_2 1
20 val_20 1
200 val_200 2
201 val_201 1
202 val_202 1
203 val_203 2
205 val_205 2
207 val_207 2
208 val_208 3
209 val_209 2
213 val_213 2
214 val_214 1
216 val_216 2
217 val_217 2
218 val_218 1
219 val_219 2
221 val_221 2
222 val_222 1
223 val_223 2
224 val_224 2
226 val_226 1
228 val_228 1
229 val_229 2
230 val_230 5
233 val_233 2
235 val_235 1
237 val_237 2
238 val_238 2
239 val_239 2
24 val_24 2
241 val_241 1
242 val_242 2
244 val_244 1
247 val_247 1
248 val_248 1
249 val_249 1
252 val_252 1
255 val_255 2
256 val_256 2
257 val_257 1
258 val_258 1
26 val_26 2
260 val_260 1
262 val_262 1
263 val_263 1
265 val_265 2
266 val_266 1
27 val_27 1
272 val_272 2
273 val_273 3
274 val_274 1
275 val_275 1
277 val_277 4
278 val_278 2
28 val_28 1
280 val_280 2
281 val_281 2
282 val_282 2
283 val_283 1
284 val_284 1
285 val_285 1
286 val_286 1
287 val_287 1
288 val_288 2
289 val_289 1
291 val_291 1
292 val_292 1
296 val_296 1
298 val_298 3
30 val_30 1
302 val_302 1
305 val_305 1
306 val_306 1
307 val_307 2
308 val_308 1
309 val_309 2
310 val_310 1
311 val_311 3
315 val_315 1
316 val_316 3
317 val_317 2
318 val_318 3
321 val_321 2
322 val_322 2
323 val_323 1
325 val_325 2
327 val_327 3
33 val_33 1
331 val_331 2
332 val_332 1
333 val_333 2
335 val_335 1
336 val_336 1
338 val_338 1
339 val_339 1
34 val_34 1
341 val_341 1
342 val_342 2
344 val_344 2
345 val_345 1
348 val_348 5
35 val_35 3
351 val_351 1
353 val_353 2
356 val_356 1
360 val_360 1
362 val_362 1
364 val_364 1
365 val_365 1
366 val_366 1
367 val_367 2
368 val_368 1
369 val_369 3
37 val_37 2
373 val_373 1
374 val_374 1
375 val_375 1
377 val_377 1
378 val_378 1
379 val_379 1
382 val_382 2
384 val_384 3
386 val_386 1
389 val_389 1
392 val_392 1
393 val_393 1
394 val_394 1
395 val_395 2
396 val_396 3
397 val_397 2
399 val_399 2
4 val_4 1
400 val_400 1
401 val_401 5
402 val_402 1
403 val_403 3
404 val_404 2
406 val_406 4
407 val_407 1
409 val_409 3
41 val_41 1
411 val_411 1
413 val_413 2
414 val_414 2
417 val_417 3
418 val_418 1
419 val_419 1
42 val_42 2
421 val_421 1
424 val_424 2
427 val_427 1
429 val_429 2
43 val_43 1
430 val_430 3
431 val_431 3
432 val_432 1
435 val_435 1
436 val_436 1
437 val_437 1
438 val_438 3
439 val_439 2
44 val_44 1
443 val_443 1
444 val_444 1
446 val_446 1
448 val_448 1
449 val_449 1
452 val_452 1
453 val_453 1
454 val_454 3
455 val_455 1
457 val_457 1
458 val_458 2
459 val_459 2
460 val_460 1
462 val_462 2
463 val_463 2
466 val_466 3
467 val_467 1
468 val_468 4
469 val_469 5
47 val_47 1
470 val_470 1
472 val_472 1
475 val_475 1
477 val_477 1
478 val_478 2
479 val_479 1
480 val_480 3
481 val_481 1
482 val_482 1
483 val_483 1
484 val_484 1
485 val_485 1
487 val_487 1
489 val_489 4
490 val_490 1
491 val_491 1
492 val_492 2
493 val_493 1
494 val_494 1
495 val_495 1
496 val_496 1
497 val_497 1
498 val_498 3
5 val_5 3
51 val_51 2
53 val_53 1
54 val_54 1
57 val_57 1
58 val_58 2
64 val_64 1
65 val_65 1
66 val_66 1
67 val_67 2
69 val_69 1
70 val_70 3
72 val_72 2
74 val_74 1
76 val_76 2
77 val_77 1
78 val_78 1
8 val_8 1
80 val_80 1
82 val_82 1
83 val_83 2
84 val_84 2
85 val_85 1
86 val_86 1
87 val_87 1
9 val_9 1
90 val_90 3
92 val_92 1
95 val_95 2
96 val_96 1
97 val_97 2
98 val_98 2
PREHOOK: query: explain
create table x8 as select * from (select *, count(value) from src group by key, value having key < 9) a
PREHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: query: explain
create table x8 as select * from (select *, count(value) from src group by key, value having key < 9) a
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 depends on stages: Stage-1
Stage-3 depends on stages: Stage-0
Stage-2 depends on stages: Stage-3
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (UDFToDouble(key) < 9.0D) (type: boolean)
Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(value)
keys: key (type: string), value (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: bigint)
Execution mode: vectorized
Reduce Operator Tree:
Group By Operator
aggregations: count(VALUE._col0)
keys: KEY._col0 (type: string), KEY._col1 (type: string)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x8
Stage: Stage-0
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-3
Create Table Operator:
Create Table
columns: _col0 string, _col1 string, _c1 bigint
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x8
Stage: Stage-2
Stats Work
Basic Stats Work:
PREHOOK: query: create table x8 as select * from (select *, count(value) from src group by key, value having key < 9) a
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src
PREHOOK: Output: database:default
PREHOOK: Output: default@x8
POSTHOOK: query: create table x8 as select * from (select *, count(value) from src group by key, value having key < 9) a
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src
POSTHOOK: Output: database:default
POSTHOOK: Output: default@x8
POSTHOOK: Lineage: x8._c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: x8._col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: x8._col1 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: describe formatted x8
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@x8
POSTHOOK: query: describe formatted x8
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@x8
# col_name data_type comment
_col0 string
_col1 string
_c1 bigint
# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
bucketing_version 2
numFiles 1
numRows 5
rawDataSize 45
totalSize 50
#### A masked pattern was here ####
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
PREHOOK: query: select * from x8
PREHOOK: type: QUERY
PREHOOK: Input: default@x8
#### A masked pattern was here ####
POSTHOOK: query: select * from x8
POSTHOOK: type: QUERY
POSTHOOK: Input: default@x8
#### A masked pattern was here ####
0 val_0 3
2 val_2 1
4 val_4 1
5 val_5 3
8 val_8 1
PREHOOK: query: explain
create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
PREHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: query: explain
create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 depends on stages: Stage-1
Stage-3 depends on stages: Stage-0
Stage-2 depends on stages: Stage-3
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (UDFToDouble(key) < 9.0D) (type: boolean)
Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: max(value)
keys: key (type: string)
mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Execution mode: vectorized
Reduce Operator Tree:
Group By Operator
aggregations: max(VALUE._col0)
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0, _col1
Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: _col1 is not null (type: boolean)
Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col1 (type: string), _col0 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x9
Stage: Stage-0
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-3
Create Table Operator:
Create Table
columns: _c0 string, key string
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.x9
Stage: Stage-2
Stats Work
Basic Stats Work:
PREHOOK: query: create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src
PREHOOK: Output: database:default
PREHOOK: Output: default@x9
POSTHOOK: query: create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src
POSTHOOK: Output: database:default
POSTHOOK: Output: default@x9
POSTHOOK: Lineage: x9._c0 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: x9.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
PREHOOK: query: describe formatted x9
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@x9
POSTHOOK: query: describe formatted x9
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@x9
# col_name data_type comment
_c0 string
key string
# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: MANAGED_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
bucketing_version 2
numFiles 1
numRows 5
rawDataSize 35
totalSize 40
#### A masked pattern was here ####
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
PREHOOK: query: select * from x9
PREHOOK: type: QUERY
PREHOOK: Input: default@x9
#### A masked pattern was here ####
POSTHOOK: query: select * from x9
POSTHOOK: type: QUERY
POSTHOOK: Input: default@x9
#### A masked pattern was here ####
val_0 0
val_2 2
val_4 4
val_5 5
val_8 8