blob: 5554bd3aa35fb443fba43efeea6f0e9f541d78e6 [file] [log] [blame]
PREHOOK: query: CREATE TABLE src_x1(key string, value string)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@src_x1
POSTHOOK: query: CREATE TABLE src_x1(key string, value string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@src_x1
PREHOOK: query: CREATE EXTERNAL TABLE src_x2(key string, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key, cf:value")
TBLPROPERTIES ("external.table.purge" = "true")
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@src_x2
POSTHOOK: query: CREATE EXTERNAL TABLE src_x2(key string, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key, cf:value")
TBLPROPERTIES ("external.table.purge" = "true")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@src_x2
PREHOOK: query: explain
from src a
insert overwrite table src_x1
select key,"" where a.key > 0 AND a.key < 50
insert overwrite table src_x2
select value,"" where a.key > 50 AND a.key < 100
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@src_x1
PREHOOK: Output: default@src_x2
POSTHOOK: query: explain
from src a
insert overwrite table src_x1
select key,"" where a.key > 0 AND a.key < 50
insert overwrite table src_x2
select value,"" where a.key > 50 AND a.key < 100
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Output: default@src_x1
POSTHOOK: Output: default@src_x2
STAGE DEPENDENCIES:
Stage-0 is a root stage
Stage-4 depends on stages: Stage-2, Stage-0, Stage-1
Stage-1 is a root stage
Stage-3 is a root stage
Stage-9 depends on stages: Stage-3 , consists of Stage-6, Stage-5, Stage-7
Stage-6
Stage-2 depends on stages: Stage-6, Stage-5, Stage-8
Stage-5
Stage-7
Stage-8 depends on stages: Stage-7
STAGE PLANS:
Stage: Stage-0
Unset Properties
table name: default.src_x2
properties:
COLUMN_STATS_ACCURATE
Stage: Stage-4
Stats Work
Basic Stats Work:
Column Stats Desc:
Columns: key, value
Column Types: string, string
Table: default.src_x1
Stage: Stage-1
Pre-Insert task
Stage: Stage-3
Map Reduce
Map Operator Tree:
TableScan
alias: a
Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
Filter Operator
predicate: ((key > 0) and (key < 50)) (type: boolean)
Statistics: Num rows: 55 Data size: 9790 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: key (type: string), '' (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 55 Data size: 9405 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
compressed: false
Statistics: Num rows: 55 Data size: 9405 Basic stats: COMPLETE Column stats: COMPLETE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_x1
Select Operator
expressions: _col0 (type: string), '' (type: string)
outputColumnNames: key, value
Statistics: Num rows: 55 Data size: 9405 Basic stats: COMPLETE Column stats: COMPLETE
Group By Operator
aggregations: max(length(key)), avg(COALESCE(length(key),0)), count(1), count(key), compute_bit_vector_hll(key), max(length(value)), avg(COALESCE(length(value),0)), count(value), compute_bit_vector_hll(value)
minReductionHashAggr: 0.99
mode: hash
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
Statistics: Num rows: 1 Data size: 472 Basic stats: COMPLETE Column stats: COMPLETE
Reduce Output Operator
null sort order:
sort order:
Statistics: Num rows: 1 Data size: 472 Basic stats: COMPLETE Column stats: COMPLETE
value expressions: _col0 (type: int), _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: bigint), _col3 (type: bigint), _col4 (type: binary), _col5 (type: int), _col6 (type: struct<count:bigint,sum:double,input:int>), _col7 (type: bigint), _col8 (type: binary)
Filter Operator
predicate: ((key > 50) and (key < 100)) (type: boolean)
Statistics: Num rows: 55 Data size: 9790 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: value (type: string), '' (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 55 Data size: 9625 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
compressed: false
Statistics: Num rows: 55 Data size: 9625 Basic stats: COMPLETE Column stats: COMPLETE
table:
input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat
output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat
serde: org.apache.hadoop.hive.hbase.HBaseSerDe
name: default.src_x2
Execution mode: vectorized
Reduce Operator Tree:
Group By Operator
aggregations: max(VALUE._col0), avg(VALUE._col1), count(VALUE._col2), count(VALUE._col3), compute_bit_vector_hll(VALUE._col4), max(VALUE._col5), avg(VALUE._col6), count(VALUE._col7), compute_bit_vector_hll(VALUE._col8)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: 'STRING' (type: string), UDFToLong(COALESCE(_col0,0)) (type: bigint), COALESCE(_col1,0) (type: double), (_col2 - _col3) (type: bigint), COALESCE(ndv_compute_bit_vector(_col4),0) (type: bigint), _col4 (type: binary), 'STRING' (type: string), UDFToLong(COALESCE(_col5,0)) (type: bigint), COALESCE(_col6,0) (type: double), (_col2 - _col7) (type: bigint), COALESCE(ndv_compute_bit_vector(_col8),0) (type: bigint), _col8 (type: binary)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
Statistics: Num rows: 1 Data size: 532 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
compressed: false
Statistics: Num rows: 1 Data size: 532 Basic stats: COMPLETE Column stats: COMPLETE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-9
Conditional Operator
Stage: Stage-6
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
Stage: Stage-2
Move Operator
tables:
replace: true
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_x1
Stage: Stage-5
Map Reduce
Map Operator Tree:
TableScan
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_x1
Stage: Stage-7
Map Reduce
Map Operator Tree:
TableScan
File Output Operator
compressed: false
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_x1
Stage: Stage-8
Move Operator
files:
hdfs directory: true
#### A masked pattern was here ####
PREHOOK: query: from src a
insert overwrite table src_x1
select key,"" where a.key > 0 AND a.key < 50
insert overwrite table src_x2
select value,"" where a.key > 50 AND a.key < 100
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@src_x1
PREHOOK: Output: default@src_x2
POSTHOOK: query: from src a
insert overwrite table src_x1
select key,"" where a.key > 0 AND a.key < 50
insert overwrite table src_x2
select value,"" where a.key > 50 AND a.key < 100
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Output: default@src_x1
POSTHOOK: Output: default@src_x2
POSTHOOK: Lineage: src_x1.key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: src_x1.value SIMPLE []
PREHOOK: query: select * from src_x1 order by key
PREHOOK: type: QUERY
PREHOOK: Input: default@src_x1
#### A masked pattern was here ####
POSTHOOK: query: select * from src_x1 order by key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_x1
#### A masked pattern was here ####
10
11
12
12
15
15
17
18
18
19
2
20
24
24
26
26
27
28
30
33
34
35
35
35
37
37
4
41
42
42
43
44
47
5
5
5
8
9
PREHOOK: query: select * from src_x2 order by key
PREHOOK: type: QUERY
PREHOOK: Input: default@src_x2
#### A masked pattern was here ####
POSTHOOK: query: select * from src_x2 order by key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_x2
#### A masked pattern was here ####
val_51
val_53
val_54
val_57
val_58
val_64
val_65
val_66
val_67
val_69
val_70
val_72
val_74
val_76
val_77
val_78
val_80
val_82
val_83
val_84
val_85
val_86
val_87
val_90
val_92
val_95
val_96
val_97
val_98