blob: d0494acfec179629d99ff5ac7b8f15b38cb09727 [file] [log] [blame]
PREHOOK: query: CREATE TABLE dest1_n32(lint_size INT, lintstring_size INT, mstringstring_size INT) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@dest1_n32
POSTHOOK: query: CREATE TABLE dest1_n32(lint_size INT, lintstring_size INT, mstringstring_size INT) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@dest1_n32
PREHOOK: query: EXPLAIN
FROM src_thrift
INSERT OVERWRITE TABLE dest1_n32 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL)
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN
FROM src_thrift
INSERT OVERWRITE TABLE dest1_n32 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL)
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
Stage-4
Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
Stage-2 depends on stages: Stage-0
Stage-3
Stage-5
Stage-6 depends on stages: Stage-5
STAGE PLANS:
Stage: Stage-1
Map Reduce
Map Operator Tree:
TableScan
alias: src_thrift
Statistics: Num rows: 11 Data size: 30700 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (lint is not null and mstringstring is not null) (type: boolean)
Statistics: Num rows: 11 Data size: 30700 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: size(lint) (type: int), size(lintstring) (type: int), size(mstringstring) (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 11 Data size: 30700 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 11 Data size: 30700 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1_n32
Select Operator
expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
outputColumnNames: lint_size, lintstring_size, mstringstring_size
Statistics: Num rows: 11 Data size: 30700 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(lint_size, 'hll'), compute_stats(lintstring_size, 'hll'), compute_stats(mstringstring_size, 'hll')
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1272 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
sort order:
Statistics: Num rows: 1 Data size: 1272 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>)
Reduce Operator Tree:
Group By Operator
aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-7
Conditional Operator
Stage: Stage-4
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-0
Move Operator
tables:
replace: true
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1_n32
Stage: Stage-2
Stats Work
Basic Stats Work:
Column Stats Desc:
Columns: lint_size, lintstring_size, mstringstring_size
Column Types: int, int, int
Table: default.dest1_n32
Stage: Stage-3
Map Reduce
Map Operator Tree:
TableScan
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1_n32
Stage: Stage-5
Map Reduce
Map Operator Tree:
TableScan
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1_n32
Stage: Stage-6
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
PREHOOK: query: FROM src_thrift
INSERT OVERWRITE TABLE dest1_n32 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL)
PREHOOK: type: QUERY
PREHOOK: Input: default@src_thrift
PREHOOK: Output: default@dest1_n32
POSTHOOK: query: FROM src_thrift
INSERT OVERWRITE TABLE dest1_n32 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_thrift
POSTHOOK: Output: default@dest1_n32
POSTHOOK: Lineage: dest1_n32.lint_size EXPRESSION [(src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), ]
POSTHOOK: Lineage: dest1_n32.lintstring_size EXPRESSION [(src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
POSTHOOK: Lineage: dest1_n32.mstringstring_size EXPRESSION [(src_thrift)src_thrift.FieldSchema(name:mstringstring, type:map<string,string>, comment:from deserializer), ]
PREHOOK: query: SELECT dest1_n32.* FROM dest1_n32
PREHOOK: type: QUERY
PREHOOK: Input: default@dest1_n32
#### A masked pattern was here ####
POSTHOOK: query: SELECT dest1_n32.* FROM dest1_n32
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1_n32
#### A masked pattern was here ####
3 1 1
3 1 1
3 1 1
3 1 1
3 1 1
3 1 1
3 1 1
3 1 1
3 1 1
3 1 1