blob: bc69a13d6527c3aff65867e4eefe9738be03ccae [file] [log] [blame]
PREHOOK: query: CREATE TABLE dest1_n16(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@dest1_n16
POSTHOOK: query: CREATE TABLE dest1_n16(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@dest1_n16
PREHOOK: query: EXPLAIN
FROM src
INSERT OVERWRITE TABLE dest1_n16 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
FROM src
INSERT OVERWRITE TABLE dest1_n16 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 depends on stages: Stage-1
Stage-2 depends on stages: Stage-0, Stage-3
Stage-3 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: substr(key, 1, 1) (type: string), substr(value, 5) (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(DISTINCT _col1), sum(_col1)
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
value expressions: _col3 (type: double)
Reduce Operator Tree:
Group By Operator
aggregations: count(DISTINCT KEY._col1:0._col0), sum(VALUE._col1)
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), UDFToInteger(_col1) (type: int), concat(_col0, _col2) (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1_n16
Select Operator
expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string)
outputColumnNames: key, c1, c2
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(key, 'hll'), compute_stats(c1, 'hll'), compute_stats(c2, 'hll')
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Stage: Stage-0
Move Operator
tables:
replace: true
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1_n16
Stage: Stage-2
Stats Work
Basic Stats Work:
Column Stats Desc:
Columns: key, c1, c2
Column Types: string, int, string
Table: default.dest1_n16
Stage: Stage-3
Map Reduce
Map Operator Tree:
TableScan
Reduce Output Operator
sort order:
Statistics: Num rows: 1 Data size: 1312 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
Execution mode: vectorized
Reduce Operator Tree:
Group By Operator
aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 1 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
PREHOOK: query: FROM src
INSERT OVERWRITE TABLE dest1_n16 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@dest1_n16
POSTHOOK: query: FROM src
INSERT OVERWRITE TABLE dest1_n16 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Output: default@dest1_n16
POSTHOOK: Lineage: dest1_n16.c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest1_n16.c2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest1_n16.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
PREHOOK: query: SELECT dest1_n16.* FROM dest1_n16
PREHOOK: type: QUERY
PREHOOK: Input: default@dest1_n16
#### A masked pattern was here ####
POSTHOOK: query: SELECT dest1_n16.* FROM dest1_n16
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1_n16
#### A masked pattern was here ####
0 1 00.0
1 71 116414.0
2 69 225571.0
3 62 332004.0
4 74 452763.0
5 6 5397.0
6 5 6398.0
7 6 7735.0
8 8 8762.0
9 7 91047.0