blob: e4c7c641b728553eef3779a0fe1592fe65899aa7 [file] [log] [blame]
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_n19 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@srcbucket_mapjoin_part_n19
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_n19 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@srcbucket_mapjoin_part_n19
PREHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_n19
POSTHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_n19
POSTHOOK: Output: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
PREHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
POSTHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
PREHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
POSTHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
PREHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
POSTHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_n19 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2_n16 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@srcbucket_mapjoin_part_2_n16
POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2_n16 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@srcbucket_mapjoin_part_2_n16
PREHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_2_n16
POSTHOOK: query: load data local inpath '../../data/files/bmj/000000_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_2_n16
POSTHOOK: Output: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
PREHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
POSTHOOK: query: load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
PREHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
POSTHOOK: query: load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
PREHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
PREHOOK: type: LOAD
#### A masked pattern was here ####
PREHOOK: Output: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
POSTHOOK: query: load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_2_n16 partition(ds='2008-04-08')
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
PREHOOK: query: create table bucketmapjoin_hash_result_1_n7 (key bigint , value1 bigint, value2 bigint)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@bucketmapjoin_hash_result_1_n7
POSTHOOK: query: create table bucketmapjoin_hash_result_1_n7 (key bigint , value1 bigint, value2 bigint)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@bucketmapjoin_hash_result_1_n7
PREHOOK: query: create table bucketmapjoin_hash_result_2_n7 (key bigint , value1 bigint, value2 bigint)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@bucketmapjoin_hash_result_2_n7
POSTHOOK: query: create table bucketmapjoin_hash_result_2_n7 (key bigint , value1 bigint, value2 bigint)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@bucketmapjoin_hash_result_2_n7
PREHOOK: query: create table bucketmapjoin_tmp_result_n9 (key string , value1 string, value2 string)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@bucketmapjoin_tmp_result_n9
POSTHOOK: query: create table bucketmapjoin_tmp_result_n9 (key string , value1 string, value2 string)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@bucketmapjoin_tmp_result_n9
PREHOOK: query: explain extended
insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
PREHOOK: type: QUERY
POSTHOOK: query: explain extended
insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-6 is a root stage
Stage-5 depends on stages: Stage-6
Stage-0 depends on stages: Stage-5
Stage-2 depends on stages: Stage-0, Stage-3
Stage-3 depends on stages: Stage-5
STAGE PLANS:
Stage: Stage-6
Map Reduce Local Work
Alias -> Map Local Tables:
$hdt$_0:a
Fetch Operator
limit: -1
Partition Description:
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
properties:
bucket_count 4
bucket_field_name key
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
numFiles 4
numRows 0
partition_columns ds
partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count 4
bucket_field_name key
bucketing_version 2
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
partition_columns ds
partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_n19
name: default.srcbucket_mapjoin_part_n19
Alias -> Map Local Operator Tree:
$hdt$_0:a
TableScan
alias: a
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Filter Operator
isSamplingPred: false
predicate: key is not null (type: boolean)
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
HashTable Sink Operator
keys:
0 _col0 (type: int)
1 _col0 (type: int)
Position of Big Table: 1
Stage: Stage-5
Map Reduce
Map Operator Tree:
TableScan
alias: b
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Filter Operator
isSamplingPred: false
predicate: key is not null (type: boolean)
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
Map Join Operator
condition map:
Inner Join 0 to 1
keys:
0 _col0 (type: int)
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3
Position of Big Table: 1
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 1
#### A masked pattern was here ####
NumFilesPerFileSink: 1
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}}
bucket_count -1
bucketing_version 2
column.name.delimiter ,
columns key,value1,value2
columns.comments
columns.types string:string:string
#### A masked pattern was here ####
name default.bucketmapjoin_tmp_result_n9
numFiles 0
numRows 0
rawDataSize 0
serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 0
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result_n9
TotalFiles: 1
GatherStats: true
MultiFileSpray: false
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
outputColumnNames: key, value1, value2
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(key, 'hll'), compute_stats(value1, 'hll'), compute_stats(value2, 'hll')
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
column.name.delimiter ,
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
TotalFiles: 1
GatherStats: false
MultiFileSpray: false
Local Work:
Map Reduce Local Work
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
#### A masked pattern was here ####
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
properties:
bucket_count 4
bucket_field_name key
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2_n16
numFiles 4
numRows 0
partition_columns ds
partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count 4
bucket_field_name key
bucketing_version 2
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2_n16
partition_columns ds
partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2_n16
name: default.srcbucket_mapjoin_part_2_n16
#### A masked pattern was here ####
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
properties:
bucket_count 4
bucket_field_name key
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
numFiles 4
numRows 0
partition_columns ds
partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count 4
bucket_field_name key
bucketing_version 2
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
partition_columns ds
partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_n19
name: default.srcbucket_mapjoin_part_n19
Truncated Path -> Alias:
/srcbucket_mapjoin_part_2_n16/ds=2008-04-08 [$hdt$_1:b]
Stage: Stage-0
Move Operator
tables:
replace: true
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}}
bucket_count -1
bucketing_version 2
column.name.delimiter ,
columns key,value1,value2
columns.comments
columns.types string:string:string
#### A masked pattern was here ####
name default.bucketmapjoin_tmp_result_n9
numFiles 0
numRows 0
rawDataSize 0
serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 0
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result_n9
Stage: Stage-2
Stats Work
Basic Stats Work:
#### A masked pattern was here ####
Column Stats Desc:
Columns: key, value1, value2
Column Types: string, string, string
Table: default.bucketmapjoin_tmp_result_n9
Is Table Level Stats: true
Stage: Stage-3
Map Reduce
Map Operator Tree:
TableScan
GatherStats: false
Reduce Output Operator
null sort order:
sort order:
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
#### A masked pattern was here ####
Partition
base file name: -mr-10002
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
column.name.delimiter ,
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
column.name.delimiter ,
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Truncated Path -> Alias:
#### A masked pattern was here ####
Needs Tagging: false
Reduce Operator Tree:
Group By Operator
aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>:struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>:struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>
escape.delim \
hive.serialization.extend.additional.nesting.levels true
serialization.escape.crlf true
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
TotalFiles: 1
GatherStats: false
MultiFileSpray: false
PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
PREHOOK: type: QUERY
PREHOOK: Input: default@srcbucket_mapjoin_part_2_n16
PREHOOK: Input: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
PREHOOK: Input: default@srcbucket_mapjoin_part_n19
PREHOOK: Input: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
PREHOOK: Output: default@bucketmapjoin_tmp_result_n9
POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcbucket_mapjoin_part_2_n16
POSTHOOK: Input: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
POSTHOOK: Input: default@srcbucket_mapjoin_part_n19
POSTHOOK: Input: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
POSTHOOK: Output: default@bucketmapjoin_tmp_result_n9
POSTHOOK: Lineage: bucketmapjoin_tmp_result_n9.key EXPRESSION [(srcbucket_mapjoin_part_n19)a.FieldSchema(name:key, type:int, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_tmp_result_n9.value1 SIMPLE [(srcbucket_mapjoin_part_n19)a.FieldSchema(name:value, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_tmp_result_n9.value2 SIMPLE [(srcbucket_mapjoin_part_2_n16)b.FieldSchema(name:value, type:string, comment:null), ]
PREHOOK: query: select count(1) from bucketmapjoin_tmp_result_n9
PREHOOK: type: QUERY
PREHOOK: Input: default@bucketmapjoin_tmp_result_n9
#### A masked pattern was here ####
POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result_n9
POSTHOOK: type: QUERY
POSTHOOK: Input: default@bucketmapjoin_tmp_result_n9
#### A masked pattern was here ####
1028
PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1_n7
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result_n9
PREHOOK: type: QUERY
PREHOOK: Input: default@bucketmapjoin_tmp_result_n9
PREHOOK: Output: default@bucketmapjoin_hash_result_1_n7
POSTHOOK: query: insert overwrite table bucketmapjoin_hash_result_1_n7
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result_n9
POSTHOOK: type: QUERY
POSTHOOK: Input: default@bucketmapjoin_tmp_result_n9
POSTHOOK: Output: default@bucketmapjoin_hash_result_1_n7
POSTHOOK: Lineage: bucketmapjoin_hash_result_1_n7.key EXPRESSION [(bucketmapjoin_tmp_result_n9)bucketmapjoin_tmp_result_n9.FieldSchema(name:key, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_hash_result_1_n7.value1 EXPRESSION [(bucketmapjoin_tmp_result_n9)bucketmapjoin_tmp_result_n9.FieldSchema(name:value1, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_hash_result_1_n7.value2 EXPRESSION [(bucketmapjoin_tmp_result_n9)bucketmapjoin_tmp_result_n9.FieldSchema(name:value2, type:string, comment:null), ]
PREHOOK: query: explain extended
insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
PREHOOK: type: QUERY
POSTHOOK: query: explain extended
insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-6 is a root stage
Stage-5 depends on stages: Stage-6
Stage-0 depends on stages: Stage-5
Stage-2 depends on stages: Stage-0, Stage-3
Stage-3 depends on stages: Stage-5
STAGE PLANS:
Stage: Stage-6
Map Reduce Local Work
Alias -> Map Local Tables:
$hdt$_0:a
Fetch Operator
limit: -1
Partition Description:
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
properties:
bucket_count 4
bucket_field_name key
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
numFiles 4
numRows 0
partition_columns ds
partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count 4
bucket_field_name key
bucketing_version 2
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
partition_columns ds
partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_n19
name: default.srcbucket_mapjoin_part_n19
Alias -> Map Local Operator Tree:
$hdt$_0:a
TableScan
alias: a
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Filter Operator
isSamplingPred: false
predicate: key is not null (type: boolean)
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
HashTable Sink Operator
keys:
0 _col0 (type: int)
1 _col0 (type: int)
Position of Big Table: 1
Stage: Stage-5
Map Reduce
Map Operator Tree:
TableScan
alias: b
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Filter Operator
isSamplingPred: false
predicate: key is not null (type: boolean)
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 149 Data size: 58120 Basic stats: COMPLETE Column stats: NONE
Map Join Operator
condition map:
Inner Join 0 to 1
keys:
0 _col0 (type: int)
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col3
Position of Big Table: 1
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: UDFToString(_col0) (type: string), _col1 (type: string), _col3 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 1
#### A masked pattern was here ####
NumFilesPerFileSink: 1
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}}
bucket_count -1
bucketing_version 2
column.name.delimiter ,
columns key,value1,value2
columns.comments
columns.types string:string:string
#### A masked pattern was here ####
name default.bucketmapjoin_tmp_result_n9
numFiles 1
numRows 1028
rawDataSize 19022
serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 20050
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result_n9
TotalFiles: 1
GatherStats: true
MultiFileSpray: false
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
outputColumnNames: key, value1, value2
Statistics: Num rows: 163 Data size: 63932 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(key, 'hll'), compute_stats(value1, 'hll'), compute_stats(value2, 'hll')
mode: hash
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
column.name.delimiter ,
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
TotalFiles: 1
GatherStats: false
MultiFileSpray: false
Local Work:
Map Reduce Local Work
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
#### A masked pattern was here ####
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
properties:
bucket_count 4
bucket_field_name key
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2_n16
numFiles 4
numRows 0
partition_columns ds
partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count 4
bucket_field_name key
bucketing_version 2
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_2_n16
partition_columns ds
partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_2_n16 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2_n16
name: default.srcbucket_mapjoin_part_2_n16
#### A masked pattern was here ####
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
properties:
bucket_count 4
bucket_field_name key
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
numFiles 4
numRows 0
partition_columns ds
partition_columns.types string
rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count 4
bucket_field_name key
bucketing_version 2
column.name.delimiter ,
columns key,value
columns.comments
columns.types int:string
#### A masked pattern was here ####
name default.srcbucket_mapjoin_part_n19
partition_columns ds
partition_columns.types string
serialization.ddl struct srcbucket_mapjoin_part_n19 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_n19
name: default.srcbucket_mapjoin_part_n19
Truncated Path -> Alias:
/srcbucket_mapjoin_part_2_n16/ds=2008-04-08 [$hdt$_1:b]
Stage: Stage-0
Move Operator
tables:
replace: true
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value1":"true","value2":"true"}}
bucket_count -1
bucketing_version 2
column.name.delimiter ,
columns key,value1,value2
columns.comments
columns.types string:string:string
#### A masked pattern was here ####
name default.bucketmapjoin_tmp_result_n9
numFiles 1
numRows 1028
rawDataSize 19022
serialization.ddl struct bucketmapjoin_tmp_result_n9 { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 20050
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result_n9
Stage: Stage-2
Stats Work
Basic Stats Work:
#### A masked pattern was here ####
Column Stats Desc:
Columns: key, value1, value2
Column Types: string, string, string
Table: default.bucketmapjoin_tmp_result_n9
Is Table Level Stats: true
Stage: Stage-3
Map Reduce
Map Operator Tree:
TableScan
GatherStats: false
Reduce Output Operator
null sort order:
sort order:
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
#### A masked pattern was here ####
Partition
base file name: -mr-10002
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
column.name.delimiter ,
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
column.name.delimiter ,
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>
escape.delim \
serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Truncated Path -> Alias:
#### A masked pattern was here ####
Needs Tagging: false
Reduce Operator Tree:
Group By Operator
aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
Statistics: Num rows: 1 Data size: 1320 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
properties:
columns _col0,_col1,_col2
columns.types struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>:struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>:struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>
escape.delim \
hive.serialization.extend.additional.nesting.levels true
serialization.escape.crlf true
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
TotalFiles: 1
GatherStats: false
MultiFileSpray: false
PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
PREHOOK: type: QUERY
PREHOOK: Input: default@srcbucket_mapjoin_part_2_n16
PREHOOK: Input: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
PREHOOK: Input: default@srcbucket_mapjoin_part_n19
PREHOOK: Input: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
PREHOOK: Output: default@bucketmapjoin_tmp_result_n9
POSTHOOK: query: insert overwrite table bucketmapjoin_tmp_result_n9
select a.key, a.value, b.value
from srcbucket_mapjoin_part_n19 a join srcbucket_mapjoin_part_2_n16 b
on a.key=b.key and b.ds="2008-04-08"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcbucket_mapjoin_part_2_n16
POSTHOOK: Input: default@srcbucket_mapjoin_part_2_n16@ds=2008-04-08
POSTHOOK: Input: default@srcbucket_mapjoin_part_n19
POSTHOOK: Input: default@srcbucket_mapjoin_part_n19@ds=2008-04-08
POSTHOOK: Output: default@bucketmapjoin_tmp_result_n9
POSTHOOK: Lineage: bucketmapjoin_tmp_result_n9.key EXPRESSION [(srcbucket_mapjoin_part_n19)a.FieldSchema(name:key, type:int, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_tmp_result_n9.value1 SIMPLE [(srcbucket_mapjoin_part_n19)a.FieldSchema(name:value, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_tmp_result_n9.value2 SIMPLE [(srcbucket_mapjoin_part_2_n16)b.FieldSchema(name:value, type:string, comment:null), ]
PREHOOK: query: select count(1) from bucketmapjoin_tmp_result_n9
PREHOOK: type: QUERY
PREHOOK: Input: default@bucketmapjoin_tmp_result_n9
#### A masked pattern was here ####
POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result_n9
POSTHOOK: type: QUERY
POSTHOOK: Input: default@bucketmapjoin_tmp_result_n9
#### A masked pattern was here ####
1028
PREHOOK: query: insert overwrite table bucketmapjoin_hash_result_1_n7
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result_n9
PREHOOK: type: QUERY
PREHOOK: Input: default@bucketmapjoin_tmp_result_n9
PREHOOK: Output: default@bucketmapjoin_hash_result_1_n7
POSTHOOK: query: insert overwrite table bucketmapjoin_hash_result_1_n7
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result_n9
POSTHOOK: type: QUERY
POSTHOOK: Input: default@bucketmapjoin_tmp_result_n9
POSTHOOK: Output: default@bucketmapjoin_hash_result_1_n7
POSTHOOK: Lineage: bucketmapjoin_hash_result_1_n7.key EXPRESSION [(bucketmapjoin_tmp_result_n9)bucketmapjoin_tmp_result_n9.FieldSchema(name:key, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_hash_result_1_n7.value1 EXPRESSION [(bucketmapjoin_tmp_result_n9)bucketmapjoin_tmp_result_n9.FieldSchema(name:value1, type:string, comment:null), ]
POSTHOOK: Lineage: bucketmapjoin_hash_result_1_n7.value2 EXPRESSION [(bucketmapjoin_tmp_result_n9)bucketmapjoin_tmp_result_n9.FieldSchema(name:value2, type:string, comment:null), ]