blob: f62b657b71041eb2651041e999a6bdf461922d0b [file] [log] [blame]
PREHOOK: query: -- SORT_QUERY_RESULTS
EXPLAIN EXTENDED
FROM (
FROM srcpart src
SELECT TRANSFORM(src.ds, src.key, src.value)
USING 'cat' AS (ds, tkey, tvalue)
WHERE src.ds = '2008-04-08'
CLUSTER BY tkey
) tmap
SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
PREHOOK: type: QUERY
POSTHOOK: query: -- SORT_QUERY_RESULTS
EXPLAIN EXTENDED
FROM (
FROM srcpart src
SELECT TRANSFORM(src.ds, src.key, src.value)
USING 'cat' AS (ds, tkey, tvalue)
WHERE src.ds = '2008-04-08'
CLUSTER BY tkey
) tmap
SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
POSTHOOK: type: QUERY
ABSTRACT SYNTAX TREE:
TOK_QUERY
TOK_FROM
TOK_SUBQUERY
TOK_QUERY
TOK_FROM
TOK_TABREF
TOK_TABNAME
srcpart
src
TOK_INSERT
TOK_DESTINATION
TOK_DIR
TOK_TMP_FILE
TOK_SELECT
TOK_SELEXPR
TOK_TRANSFORM
TOK_EXPLIST
.
TOK_TABLE_OR_COL
src
ds
.
TOK_TABLE_OR_COL
src
key
.
TOK_TABLE_OR_COL
src
value
TOK_SERDE
TOK_RECORDWRITER
'cat'
TOK_SERDE
TOK_RECORDREADER
TOK_ALIASLIST
ds
tkey
tvalue
TOK_WHERE
=
.
TOK_TABLE_OR_COL
src
ds
'2008-04-08'
TOK_CLUSTERBY
TOK_TABLE_OR_COL
tkey
tmap
TOK_INSERT
TOK_DESTINATION
TOK_DIR
TOK_TMP_FILE
TOK_SELECT
TOK_SELEXPR
.
TOK_TABLE_OR_COL
tmap
tkey
TOK_SELEXPR
.
TOK_TABLE_OR_COL
tmap
tvalue
TOK_WHERE
<
.
TOK_TABLE_OR_COL
tmap
tkey
100
STAGE DEPENDENCIES:
Stage-1 is a root stage
Stage-0 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-1
Tez
Edges:
Reducer 2 <- Map 1 (SIMPLE_EDGE)
#### A masked pattern was here ####
Vertices:
Map 1
Map Operator Tree:
TableScan
alias: src
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Select Operator
expressions: '2008-04-08' (type: string), key (type: string), value (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Transform Operator
command: cat
output info:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
columns _col0,_col1,_col2
columns.types string,string,string
field.delim 9
serialization.format 9
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
Filter Operator
isSamplingPred: false
predicate: (_col1 < 100) (type: boolean)
Statistics: Num rows: 333 Data size: 3537 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col1 (type: string)
sort order: +
Map-reduce partition columns: _col1 (type: string)
Statistics: Num rows: 333 Data size: 3537 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
auto parallelism: true
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
#### A masked pattern was here ####
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
hr 11
properties:
COLUMN_STATS_ACCURATE true
bucket_count -1
columns key,value
columns.comments 'default','default'
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
numFiles 1
numRows 500
partition_columns ds/hr
partition_columns.types string:string
rawDataSize 5312
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count -1
columns key,value
columns.comments 'default','default'
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
#### A masked pattern was here ####
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
partition values:
ds 2008-04-08
hr 12
properties:
COLUMN_STATS_ACCURATE true
bucket_count -1
columns key,value
columns.comments 'default','default'
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
numFiles 1
numRows 500
partition_columns ds/hr
partition_columns.types string:string
rawDataSize 5312
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 5812
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
bucket_count -1
columns key,value
columns.comments 'default','default'
columns.types string:string
#### A masked pattern was here ####
name default.srcpart
partition_columns ds/hr
partition_columns.types string:string
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
Truncated Path -> Alias:
/srcpart/ds=2008-04-08/hr=11 [src]
/srcpart/ds=2008-04-08/hr=12 [src]
Reducer 2
Execution mode: uber
Needs Tagging: false
Reduce Operator Tree:
Select Operator
expressions: VALUE._col1 (type: string), VALUE._col2 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 333 Data size: 3537 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
Statistics: Num rows: 333 Data size: 3537 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
columns _col0,_col1
columns.types string:string
escape.delim \
hive.serialization.extend.additional.nesting.levels true
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
TotalFiles: 1
GatherStats: false
MultiFileSpray: false
Stage: Stage-0
Fetch Operator
limit: -1
Processor Tree:
ListSink
PREHOOK: query: FROM (
FROM srcpart src
SELECT TRANSFORM(src.ds, src.key, src.value)
USING 'cat' AS (ds, tkey, tvalue)
WHERE src.ds = '2008-04-08'
CLUSTER BY tkey
) tmap
SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
PREHOOK: type: QUERY
PREHOOK: Input: cat
PREHOOK: Input: default@srcpart
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
#### A masked pattern was here ####
POSTHOOK: query: FROM (
FROM srcpart src
SELECT TRANSFORM(src.ds, src.key, src.value)
USING 'cat' AS (ds, tkey, tvalue)
WHERE src.ds = '2008-04-08'
CLUSTER BY tkey
) tmap
SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
POSTHOOK: type: QUERY
POSTHOOK: Input: cat
POSTHOOK: Input: default@srcpart
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
#### A masked pattern was here ####
0 val_0
0 val_0
0 val_0
0 val_0
0 val_0
0 val_0
10 val_10
10 val_10
11 val_11
11 val_11
12 val_12
12 val_12
12 val_12
12 val_12
15 val_15
15 val_15
15 val_15
15 val_15
17 val_17
17 val_17
18 val_18
18 val_18
18 val_18
18 val_18
19 val_19
19 val_19
2 val_2
2 val_2
20 val_20
20 val_20
24 val_24
24 val_24
24 val_24
24 val_24
26 val_26
26 val_26
26 val_26
26 val_26
27 val_27
27 val_27
28 val_28
28 val_28
30 val_30
30 val_30
33 val_33
33 val_33
34 val_34
34 val_34
35 val_35
35 val_35
35 val_35
35 val_35
35 val_35
35 val_35
37 val_37
37 val_37
37 val_37
37 val_37
4 val_4
4 val_4
41 val_41
41 val_41
42 val_42
42 val_42
42 val_42
42 val_42
43 val_43
43 val_43
44 val_44
44 val_44
47 val_47
47 val_47
5 val_5
5 val_5
5 val_5
5 val_5
5 val_5
5 val_5
51 val_51
51 val_51
51 val_51
51 val_51
53 val_53
53 val_53
54 val_54
54 val_54
57 val_57
57 val_57
58 val_58
58 val_58
58 val_58
58 val_58
64 val_64
64 val_64
65 val_65
65 val_65
66 val_66
66 val_66
67 val_67
67 val_67
67 val_67
67 val_67
69 val_69
69 val_69
70 val_70
70 val_70
70 val_70
70 val_70
70 val_70
70 val_70
72 val_72
72 val_72
72 val_72
72 val_72
74 val_74
74 val_74
76 val_76
76 val_76
76 val_76
76 val_76
77 val_77
77 val_77
78 val_78
78 val_78
8 val_8
8 val_8
80 val_80
80 val_80
82 val_82
82 val_82
83 val_83
83 val_83
83 val_83
83 val_83
84 val_84
84 val_84
84 val_84
84 val_84
85 val_85
85 val_85
86 val_86
86 val_86
87 val_87
87 val_87
9 val_9
9 val_9
90 val_90
90 val_90
90 val_90
90 val_90
90 val_90
90 val_90
92 val_92
92 val_92
95 val_95
95 val_95
95 val_95
95 val_95
96 val_96
96 val_96
97 val_97
97 val_97
97 val_97
97 val_97
98 val_98
98 val_98
98 val_98
98 val_98