blob: fa00859a153529a5454da313a161b992fcbe1770 [file] [log] [blame]
<!DOCTYPE HTML>
<html lang="en">
<head>
<!-- Generated by javadoc (17) -->
<title>Source code</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="source: package: org.apache.hadoop.hbase.mapreduce, class: HFileOutputFormat2, class: WriterLength">
<meta name="generator" content="javadoc/SourceToHTMLConverter">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body class="source-page">
<main role="main">
<div class="source-container">
<pre><span class="source-line-no">001</span><span id="line-1">/*</span>
<span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span>
<span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span>
<span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span>
<span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span>
<span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span>
<span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span>
<span class="source-line-no">009</span><span id="line-9"> *</span>
<span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="source-line-no">011</span><span id="line-11"> *</span>
<span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span>
<span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span>
<span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span>
<span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span>
<span class="source-line-no">017</span><span id="line-17"> */</span>
<span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.mapreduce;</span>
<span class="source-line-no">019</span><span id="line-19"></span>
<span class="source-line-no">020</span><span id="line-20">import static org.apache.hadoop.hbase.regionserver.HStoreFile.BULKLOAD_TASK_KEY;</span>
<span class="source-line-no">021</span><span id="line-21">import static org.apache.hadoop.hbase.regionserver.HStoreFile.BULKLOAD_TIME_KEY;</span>
<span class="source-line-no">022</span><span id="line-22">import static org.apache.hadoop.hbase.regionserver.HStoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY;</span>
<span class="source-line-no">023</span><span id="line-23">import static org.apache.hadoop.hbase.regionserver.HStoreFile.MAJOR_COMPACTION_KEY;</span>
<span class="source-line-no">024</span><span id="line-24"></span>
<span class="source-line-no">025</span><span id="line-25">import java.io.IOException;</span>
<span class="source-line-no">026</span><span id="line-26">import java.io.UnsupportedEncodingException;</span>
<span class="source-line-no">027</span><span id="line-27">import java.net.InetSocketAddress;</span>
<span class="source-line-no">028</span><span id="line-28">import java.net.URLDecoder;</span>
<span class="source-line-no">029</span><span id="line-29">import java.net.URLEncoder;</span>
<span class="source-line-no">030</span><span id="line-30">import java.nio.charset.Charset;</span>
<span class="source-line-no">031</span><span id="line-31">import java.util.ArrayList;</span>
<span class="source-line-no">032</span><span id="line-32">import java.util.Arrays;</span>
<span class="source-line-no">033</span><span id="line-33">import java.util.Collections;</span>
<span class="source-line-no">034</span><span id="line-34">import java.util.List;</span>
<span class="source-line-no">035</span><span id="line-35">import java.util.Map;</span>
<span class="source-line-no">036</span><span id="line-36">import java.util.Map.Entry;</span>
<span class="source-line-no">037</span><span id="line-37">import java.util.Set;</span>
<span class="source-line-no">038</span><span id="line-38">import java.util.TreeMap;</span>
<span class="source-line-no">039</span><span id="line-39">import java.util.TreeSet;</span>
<span class="source-line-no">040</span><span id="line-40">import java.util.UUID;</span>
<span class="source-line-no">041</span><span id="line-41">import java.util.function.Function;</span>
<span class="source-line-no">042</span><span id="line-42">import java.util.stream.Collectors;</span>
<span class="source-line-no">043</span><span id="line-43">import org.apache.commons.lang3.StringUtils;</span>
<span class="source-line-no">044</span><span id="line-44">import org.apache.hadoop.conf.Configuration;</span>
<span class="source-line-no">045</span><span id="line-45">import org.apache.hadoop.fs.FileSystem;</span>
<span class="source-line-no">046</span><span id="line-46">import org.apache.hadoop.fs.Path;</span>
<span class="source-line-no">047</span><span id="line-47">import org.apache.hadoop.hbase.Cell;</span>
<span class="source-line-no">048</span><span id="line-48">import org.apache.hadoop.hbase.CellUtil;</span>
<span class="source-line-no">049</span><span id="line-49">import org.apache.hadoop.hbase.ExtendedCell;</span>
<span class="source-line-no">050</span><span id="line-50">import org.apache.hadoop.hbase.HConstants;</span>
<span class="source-line-no">051</span><span id="line-51">import org.apache.hadoop.hbase.HRegionLocation;</span>
<span class="source-line-no">052</span><span id="line-52">import org.apache.hadoop.hbase.KeyValue;</span>
<span class="source-line-no">053</span><span id="line-53">import org.apache.hadoop.hbase.PrivateCellUtil;</span>
<span class="source-line-no">054</span><span id="line-54">import org.apache.hadoop.hbase.TableName;</span>
<span class="source-line-no">055</span><span id="line-55">import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;</span>
<span class="source-line-no">056</span><span id="line-56">import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;</span>
<span class="source-line-no">057</span><span id="line-57">import org.apache.hadoop.hbase.client.Connection;</span>
<span class="source-line-no">058</span><span id="line-58">import org.apache.hadoop.hbase.client.ConnectionFactory;</span>
<span class="source-line-no">059</span><span id="line-59">import org.apache.hadoop.hbase.client.Put;</span>
<span class="source-line-no">060</span><span id="line-60">import org.apache.hadoop.hbase.client.RegionLocator;</span>
<span class="source-line-no">061</span><span id="line-61">import org.apache.hadoop.hbase.client.Table;</span>
<span class="source-line-no">062</span><span id="line-62">import org.apache.hadoop.hbase.client.TableDescriptor;</span>
<span class="source-line-no">063</span><span id="line-63">import org.apache.hadoop.hbase.fs.HFileSystem;</span>
<span class="source-line-no">064</span><span id="line-64">import org.apache.hadoop.hbase.io.ImmutableBytesWritable;</span>
<span class="source-line-no">065</span><span id="line-65">import org.apache.hadoop.hbase.io.compress.Compression;</span>
<span class="source-line-no">066</span><span id="line-66">import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;</span>
<span class="source-line-no">067</span><span id="line-67">import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;</span>
<span class="source-line-no">068</span><span id="line-68">import org.apache.hadoop.hbase.io.hfile.CacheConfig;</span>
<span class="source-line-no">069</span><span id="line-69">import org.apache.hadoop.hbase.io.hfile.HFile;</span>
<span class="source-line-no">070</span><span id="line-70">import org.apache.hadoop.hbase.io.hfile.HFileContext;</span>
<span class="source-line-no">071</span><span id="line-71">import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;</span>
<span class="source-line-no">072</span><span id="line-72">import org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;</span>
<span class="source-line-no">073</span><span id="line-73">import org.apache.hadoop.hbase.regionserver.BloomType;</span>
<span class="source-line-no">074</span><span id="line-74">import org.apache.hadoop.hbase.regionserver.HStore;</span>
<span class="source-line-no">075</span><span id="line-75">import org.apache.hadoop.hbase.regionserver.StoreFileWriter;</span>
<span class="source-line-no">076</span><span id="line-76">import org.apache.hadoop.hbase.regionserver.StoreUtils;</span>
<span class="source-line-no">077</span><span id="line-77">import org.apache.hadoop.hbase.util.BloomFilterUtil;</span>
<span class="source-line-no">078</span><span id="line-78">import org.apache.hadoop.hbase.util.Bytes;</span>
<span class="source-line-no">079</span><span id="line-79">import org.apache.hadoop.hbase.util.CommonFSUtils;</span>
<span class="source-line-no">080</span><span id="line-80">import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;</span>
<span class="source-line-no">081</span><span id="line-81">import org.apache.hadoop.hbase.util.MapReduceExtendedCell;</span>
<span class="source-line-no">082</span><span id="line-82">import org.apache.hadoop.io.NullWritable;</span>
<span class="source-line-no">083</span><span id="line-83">import org.apache.hadoop.io.SequenceFile;</span>
<span class="source-line-no">084</span><span id="line-84">import org.apache.hadoop.io.Text;</span>
<span class="source-line-no">085</span><span id="line-85">import org.apache.hadoop.mapreduce.Job;</span>
<span class="source-line-no">086</span><span id="line-86">import org.apache.hadoop.mapreduce.OutputCommitter;</span>
<span class="source-line-no">087</span><span id="line-87">import org.apache.hadoop.mapreduce.OutputFormat;</span>
<span class="source-line-no">088</span><span id="line-88">import org.apache.hadoop.mapreduce.RecordWriter;</span>
<span class="source-line-no">089</span><span id="line-89">import org.apache.hadoop.mapreduce.TaskAttemptContext;</span>
<span class="source-line-no">090</span><span id="line-90">import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;</span>
<span class="source-line-no">091</span><span id="line-91">import org.apache.hadoop.mapreduce.lib.output.PathOutputCommitter;</span>
<span class="source-line-no">092</span><span id="line-92">import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;</span>
<span class="source-line-no">093</span><span id="line-93">import org.apache.yetus.audience.InterfaceAudience;</span>
<span class="source-line-no">094</span><span id="line-94">import org.slf4j.Logger;</span>
<span class="source-line-no">095</span><span id="line-95">import org.slf4j.LoggerFactory;</span>
<span class="source-line-no">096</span><span id="line-96"></span>
<span class="source-line-no">097</span><span id="line-97">/**</span>
<span class="source-line-no">098</span><span id="line-98"> * Writes HFiles. Passed Cells must arrive in order. Writes current time as the sequence id for the</span>
<span class="source-line-no">099</span><span id="line-99"> * file. Sets the major compacted attribute on created {@link HFile}s. Calling write(null,null) will</span>
<span class="source-line-no">100</span><span id="line-100"> * forcibly roll all HFiles being written.</span>
<span class="source-line-no">101</span><span id="line-101"> * &lt;p&gt;</span>
<span class="source-line-no">102</span><span id="line-102"> * Using this class as part of a MapReduce job is best done using</span>
<span class="source-line-no">103</span><span id="line-103"> * {@link #configureIncrementalLoad(Job, TableDescriptor, RegionLocator)}.</span>
<span class="source-line-no">104</span><span id="line-104"> */</span>
<span class="source-line-no">105</span><span id="line-105">@InterfaceAudience.Public</span>
<span class="source-line-no">106</span><span id="line-106">public class HFileOutputFormat2 extends FileOutputFormat&lt;ImmutableBytesWritable, Cell&gt; {</span>
<span class="source-line-no">107</span><span id="line-107"> private static final Logger LOG = LoggerFactory.getLogger(HFileOutputFormat2.class);</span>
<span class="source-line-no">108</span><span id="line-108"></span>
<span class="source-line-no">109</span><span id="line-109"> static class TableInfo {</span>
<span class="source-line-no">110</span><span id="line-110"> private TableDescriptor tableDesctiptor;</span>
<span class="source-line-no">111</span><span id="line-111"> private RegionLocator regionLocator;</span>
<span class="source-line-no">112</span><span id="line-112"></span>
<span class="source-line-no">113</span><span id="line-113"> public TableInfo(TableDescriptor tableDesctiptor, RegionLocator regionLocator) {</span>
<span class="source-line-no">114</span><span id="line-114"> this.tableDesctiptor = tableDesctiptor;</span>
<span class="source-line-no">115</span><span id="line-115"> this.regionLocator = regionLocator;</span>
<span class="source-line-no">116</span><span id="line-116"> }</span>
<span class="source-line-no">117</span><span id="line-117"></span>
<span class="source-line-no">118</span><span id="line-118"> public TableDescriptor getTableDescriptor() {</span>
<span class="source-line-no">119</span><span id="line-119"> return tableDesctiptor;</span>
<span class="source-line-no">120</span><span id="line-120"> }</span>
<span class="source-line-no">121</span><span id="line-121"></span>
<span class="source-line-no">122</span><span id="line-122"> public RegionLocator getRegionLocator() {</span>
<span class="source-line-no">123</span><span id="line-123"> return regionLocator;</span>
<span class="source-line-no">124</span><span id="line-124"> }</span>
<span class="source-line-no">125</span><span id="line-125"> }</span>
<span class="source-line-no">126</span><span id="line-126"></span>
<span class="source-line-no">127</span><span id="line-127"> protected static final byte[] tableSeparator = Bytes.toBytes(";");</span>
<span class="source-line-no">128</span><span id="line-128"></span>
<span class="source-line-no">129</span><span id="line-129"> protected static byte[] combineTableNameSuffix(byte[] tableName, byte[] suffix) {</span>
<span class="source-line-no">130</span><span id="line-130"> return Bytes.add(tableName, tableSeparator, suffix);</span>
<span class="source-line-no">131</span><span id="line-131"> }</span>
<span class="source-line-no">132</span><span id="line-132"></span>
<span class="source-line-no">133</span><span id="line-133"> // The following constants are private since these are used by</span>
<span class="source-line-no">134</span><span id="line-134"> // HFileOutputFormat2 to internally transfer data between job setup and</span>
<span class="source-line-no">135</span><span id="line-135"> // reducer run using conf.</span>
<span class="source-line-no">136</span><span id="line-136"> // These should not be changed by the client.</span>
<span class="source-line-no">137</span><span id="line-137"> static final String COMPRESSION_FAMILIES_CONF_KEY =</span>
<span class="source-line-no">138</span><span id="line-138"> "hbase.hfileoutputformat.families.compression";</span>
<span class="source-line-no">139</span><span id="line-139"> static final String BLOOM_TYPE_FAMILIES_CONF_KEY = "hbase.hfileoutputformat.families.bloomtype";</span>
<span class="source-line-no">140</span><span id="line-140"> static final String BLOOM_PARAM_FAMILIES_CONF_KEY = "hbase.hfileoutputformat.families.bloomparam";</span>
<span class="source-line-no">141</span><span id="line-141"> static final String BLOCK_SIZE_FAMILIES_CONF_KEY = "hbase.mapreduce.hfileoutputformat.blocksize";</span>
<span class="source-line-no">142</span><span id="line-142"> static final String DATABLOCK_ENCODING_FAMILIES_CONF_KEY =</span>
<span class="source-line-no">143</span><span id="line-143"> "hbase.mapreduce.hfileoutputformat.families.datablock.encoding";</span>
<span class="source-line-no">144</span><span id="line-144"></span>
<span class="source-line-no">145</span><span id="line-145"> // This constant is public since the client can modify this when setting</span>
<span class="source-line-no">146</span><span id="line-146"> // up their conf object and thus refer to this symbol.</span>
<span class="source-line-no">147</span><span id="line-147"> // It is present for backwards compatibility reasons. Use it only to</span>
<span class="source-line-no">148</span><span id="line-148"> // override the auto-detection of datablock encoding and compression.</span>
<span class="source-line-no">149</span><span id="line-149"> public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY =</span>
<span class="source-line-no">150</span><span id="line-150"> "hbase.mapreduce.hfileoutputformat.datablock.encoding";</span>
<span class="source-line-no">151</span><span id="line-151"> public static final String COMPRESSION_OVERRIDE_CONF_KEY =</span>
<span class="source-line-no">152</span><span id="line-152"> "hbase.mapreduce.hfileoutputformat.compression";</span>
<span class="source-line-no">153</span><span id="line-153"></span>
<span class="source-line-no">154</span><span id="line-154"> /**</span>
<span class="source-line-no">155</span><span id="line-155"> * Keep locality while generating HFiles for bulkload. See HBASE-12596</span>
<span class="source-line-no">156</span><span id="line-156"> */</span>
<span class="source-line-no">157</span><span id="line-157"> public static final String LOCALITY_SENSITIVE_CONF_KEY =</span>
<span class="source-line-no">158</span><span id="line-158"> "hbase.bulkload.locality.sensitive.enabled";</span>
<span class="source-line-no">159</span><span id="line-159"> private static final boolean DEFAULT_LOCALITY_SENSITIVE = true;</span>
<span class="source-line-no">160</span><span id="line-160"> static final String OUTPUT_TABLE_NAME_CONF_KEY = "hbase.mapreduce.hfileoutputformat.table.name";</span>
<span class="source-line-no">161</span><span id="line-161"> static final String MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY =</span>
<span class="source-line-no">162</span><span id="line-162"> "hbase.mapreduce.use.multi.table.hfileoutputformat";</span>
<span class="source-line-no">163</span><span id="line-163"></span>
<span class="source-line-no">164</span><span id="line-164"> /**</span>
<span class="source-line-no">165</span><span id="line-165"> * ExtendedCell and ExtendedCellSerialization are InterfaceAudience.Private. We expose this config</span>
<span class="source-line-no">166</span><span id="line-166"> * for internal usage in jobs like WALPlayer which need to use features of ExtendedCell.</span>
<span class="source-line-no">167</span><span id="line-167"> */</span>
<span class="source-line-no">168</span><span id="line-168"> @InterfaceAudience.Private</span>
<span class="source-line-no">169</span><span id="line-169"> public static final String EXTENDED_CELL_SERIALIZATION_ENABLED_KEY =</span>
<span class="source-line-no">170</span><span id="line-170"> "hbase.mapreduce.hfileoutputformat.extendedcell.enabled";</span>
<span class="source-line-no">171</span><span id="line-171"> static final boolean EXTENDED_CELL_SERIALIZATION_ENABLED_DEFULT = false;</span>
<span class="source-line-no">172</span><span id="line-172"></span>
<span class="source-line-no">173</span><span id="line-173"> public static final String REMOTE_CLUSTER_CONF_PREFIX = "hbase.hfileoutputformat.remote.cluster.";</span>
<span class="source-line-no">174</span><span id="line-174"> public static final String REMOTE_CLUSTER_ZOOKEEPER_QUORUM_CONF_KEY =</span>
<span class="source-line-no">175</span><span id="line-175"> REMOTE_CLUSTER_CONF_PREFIX + "zookeeper.quorum";</span>
<span class="source-line-no">176</span><span id="line-176"> public static final String REMOTE_CLUSTER_ZOOKEEPER_CLIENT_PORT_CONF_KEY =</span>
<span class="source-line-no">177</span><span id="line-177"> REMOTE_CLUSTER_CONF_PREFIX + "zookeeper." + HConstants.CLIENT_PORT_STR;</span>
<span class="source-line-no">178</span><span id="line-178"> public static final String REMOTE_CLUSTER_ZOOKEEPER_ZNODE_PARENT_CONF_KEY =</span>
<span class="source-line-no">179</span><span id="line-179"> REMOTE_CLUSTER_CONF_PREFIX + HConstants.ZOOKEEPER_ZNODE_PARENT;</span>
<span class="source-line-no">180</span><span id="line-180"></span>
<span class="source-line-no">181</span><span id="line-181"> public static final String STORAGE_POLICY_PROPERTY = HStore.BLOCK_STORAGE_POLICY_KEY;</span>
<span class="source-line-no">182</span><span id="line-182"> public static final String STORAGE_POLICY_PROPERTY_CF_PREFIX = STORAGE_POLICY_PROPERTY + ".";</span>
<span class="source-line-no">183</span><span id="line-183"></span>
<span class="source-line-no">184</span><span id="line-184"> @Override</span>
<span class="source-line-no">185</span><span id="line-185"> public RecordWriter&lt;ImmutableBytesWritable, Cell&gt;</span>
<span class="source-line-no">186</span><span id="line-186"> getRecordWriter(final TaskAttemptContext context) throws IOException, InterruptedException {</span>
<span class="source-line-no">187</span><span id="line-187"> return createRecordWriter(context, this.getOutputCommitter(context));</span>
<span class="source-line-no">188</span><span id="line-188"> }</span>
<span class="source-line-no">189</span><span id="line-189"></span>
<span class="source-line-no">190</span><span id="line-190"> protected static byte[] getTableNameSuffixedWithFamily(byte[] tableName, byte[] family) {</span>
<span class="source-line-no">191</span><span id="line-191"> return combineTableNameSuffix(tableName, family);</span>
<span class="source-line-no">192</span><span id="line-192"> }</span>
<span class="source-line-no">193</span><span id="line-193"></span>
<span class="source-line-no">194</span><span id="line-194"> static &lt;V extends Cell&gt; RecordWriter&lt;ImmutableBytesWritable, V&gt; createRecordWriter(</span>
<span class="source-line-no">195</span><span id="line-195"> final TaskAttemptContext context, final OutputCommitter committer) throws IOException {</span>
<span class="source-line-no">196</span><span id="line-196"></span>
<span class="source-line-no">197</span><span id="line-197"> // Get the path of the temporary output file</span>
<span class="source-line-no">198</span><span id="line-198"> final Path outputDir = ((PathOutputCommitter) committer).getWorkPath();</span>
<span class="source-line-no">199</span><span id="line-199"> final Configuration conf = context.getConfiguration();</span>
<span class="source-line-no">200</span><span id="line-200"> final boolean writeMultipleTables =</span>
<span class="source-line-no">201</span><span id="line-201"> conf.getBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, false);</span>
<span class="source-line-no">202</span><span id="line-202"> final String writeTableNames = conf.get(OUTPUT_TABLE_NAME_CONF_KEY);</span>
<span class="source-line-no">203</span><span id="line-203"> if (writeTableNames == null || writeTableNames.isEmpty()) {</span>
<span class="source-line-no">204</span><span id="line-204"> throw new IllegalArgumentException("" + OUTPUT_TABLE_NAME_CONF_KEY + " cannot be empty");</span>
<span class="source-line-no">205</span><span id="line-205"> }</span>
<span class="source-line-no">206</span><span id="line-206"> final FileSystem fs = outputDir.getFileSystem(conf);</span>
<span class="source-line-no">207</span><span id="line-207"> // These configs. are from hbase-*.xml</span>
<span class="source-line-no">208</span><span id="line-208"> final long maxsize =</span>
<span class="source-line-no">209</span><span id="line-209"> conf.getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE);</span>
<span class="source-line-no">210</span><span id="line-210"> // Invented config. Add to hbase-*.xml if other than default compression.</span>
<span class="source-line-no">211</span><span id="line-211"> final String defaultCompressionStr =</span>
<span class="source-line-no">212</span><span id="line-212"> conf.get("hfile.compression", Compression.Algorithm.NONE.getName());</span>
<span class="source-line-no">213</span><span id="line-213"> final Algorithm defaultCompression = HFileWriterImpl.compressionByName(defaultCompressionStr);</span>
<span class="source-line-no">214</span><span id="line-214"> String compressionStr = conf.get(COMPRESSION_OVERRIDE_CONF_KEY);</span>
<span class="source-line-no">215</span><span id="line-215"> final Algorithm overriddenCompression =</span>
<span class="source-line-no">216</span><span id="line-216"> compressionStr != null ? Compression.getCompressionAlgorithmByName(compressionStr) : null;</span>
<span class="source-line-no">217</span><span id="line-217"> final boolean compactionExclude =</span>
<span class="source-line-no">218</span><span id="line-218"> conf.getBoolean("hbase.mapreduce.hfileoutputformat.compaction.exclude", false);</span>
<span class="source-line-no">219</span><span id="line-219"> final Set&lt;String&gt; allTableNames = Arrays</span>
<span class="source-line-no">220</span><span id="line-220"> .stream(writeTableNames.split(Bytes.toString(tableSeparator))).collect(Collectors.toSet());</span>
<span class="source-line-no">221</span><span id="line-221"></span>
<span class="source-line-no">222</span><span id="line-222"> // create a map from column family to the compression algorithm</span>
<span class="source-line-no">223</span><span id="line-223"> final Map&lt;byte[], Algorithm&gt; compressionMap = createFamilyCompressionMap(conf);</span>
<span class="source-line-no">224</span><span id="line-224"> final Map&lt;byte[], BloomType&gt; bloomTypeMap = createFamilyBloomTypeMap(conf);</span>
<span class="source-line-no">225</span><span id="line-225"> final Map&lt;byte[], String&gt; bloomParamMap = createFamilyBloomParamMap(conf);</span>
<span class="source-line-no">226</span><span id="line-226"> final Map&lt;byte[], Integer&gt; blockSizeMap = createFamilyBlockSizeMap(conf);</span>
<span class="source-line-no">227</span><span id="line-227"></span>
<span class="source-line-no">228</span><span id="line-228"> String dataBlockEncodingStr = conf.get(DATABLOCK_ENCODING_OVERRIDE_CONF_KEY);</span>
<span class="source-line-no">229</span><span id="line-229"> final Map&lt;byte[], DataBlockEncoding&gt; datablockEncodingMap =</span>
<span class="source-line-no">230</span><span id="line-230"> createFamilyDataBlockEncodingMap(conf);</span>
<span class="source-line-no">231</span><span id="line-231"> final DataBlockEncoding overriddenEncoding =</span>
<span class="source-line-no">232</span><span id="line-232"> dataBlockEncodingStr != null ? DataBlockEncoding.valueOf(dataBlockEncodingStr) : null;</span>
<span class="source-line-no">233</span><span id="line-233"></span>
<span class="source-line-no">234</span><span id="line-234"> return new RecordWriter&lt;ImmutableBytesWritable, V&gt;() {</span>
<span class="source-line-no">235</span><span id="line-235"> // Map of families to writers and how much has been output on the writer.</span>
<span class="source-line-no">236</span><span id="line-236"> private final Map&lt;byte[], WriterLength&gt; writers = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);</span>
<span class="source-line-no">237</span><span id="line-237"> private final Map&lt;byte[], byte[]&gt; previousRows = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);</span>
<span class="source-line-no">238</span><span id="line-238"> private final long now = EnvironmentEdgeManager.currentTime();</span>
<span class="source-line-no">239</span><span id="line-239"> private byte[] tableNameBytes = writeMultipleTables ? null : Bytes.toBytes(writeTableNames);</span>
<span class="source-line-no">240</span><span id="line-240"></span>
<span class="source-line-no">241</span><span id="line-241"> @Override</span>
<span class="source-line-no">242</span><span id="line-242"> public void write(ImmutableBytesWritable row, V cell) throws IOException {</span>
<span class="source-line-no">243</span><span id="line-243"> // null input == user explicitly wants to flush</span>
<span class="source-line-no">244</span><span id="line-244"> if (row == null &amp;&amp; cell == null) {</span>
<span class="source-line-no">245</span><span id="line-245"> rollWriters(null);</span>
<span class="source-line-no">246</span><span id="line-246"> return;</span>
<span class="source-line-no">247</span><span id="line-247"> }</span>
<span class="source-line-no">248</span><span id="line-248"></span>
<span class="source-line-no">249</span><span id="line-249"> ExtendedCell kv = PrivateCellUtil.ensureExtendedCell(cell);</span>
<span class="source-line-no">250</span><span id="line-250"> byte[] rowKey = CellUtil.cloneRow(kv);</span>
<span class="source-line-no">251</span><span id="line-251"> int length = (PrivateCellUtil.estimatedSerializedSizeOf(kv)) - Bytes.SIZEOF_INT;</span>
<span class="source-line-no">252</span><span id="line-252"> byte[] family = CellUtil.cloneFamily(kv);</span>
<span class="source-line-no">253</span><span id="line-253"> if (writeMultipleTables) {</span>
<span class="source-line-no">254</span><span id="line-254"> tableNameBytes = MultiTableHFileOutputFormat.getTableName(row.get());</span>
<span class="source-line-no">255</span><span id="line-255"> tableNameBytes = TableName.valueOf(tableNameBytes).getNameWithNamespaceInclAsString()</span>
<span class="source-line-no">256</span><span id="line-256"> .getBytes(Charset.defaultCharset());</span>
<span class="source-line-no">257</span><span id="line-257"> if (!allTableNames.contains(Bytes.toString(tableNameBytes))) {</span>
<span class="source-line-no">258</span><span id="line-258"> throw new IllegalArgumentException(</span>
<span class="source-line-no">259</span><span id="line-259"> "TableName " + Bytes.toString(tableNameBytes) + " not expected");</span>
<span class="source-line-no">260</span><span id="line-260"> }</span>
<span class="source-line-no">261</span><span id="line-261"> }</span>
<span class="source-line-no">262</span><span id="line-262"> byte[] tableAndFamily = getTableNameSuffixedWithFamily(tableNameBytes, family);</span>
<span class="source-line-no">263</span><span id="line-263"></span>
<span class="source-line-no">264</span><span id="line-264"> WriterLength wl = this.writers.get(tableAndFamily);</span>
<span class="source-line-no">265</span><span id="line-265"></span>
<span class="source-line-no">266</span><span id="line-266"> // If this is a new column family, verify that the directory exists</span>
<span class="source-line-no">267</span><span id="line-267"> if (wl == null) {</span>
<span class="source-line-no">268</span><span id="line-268"> Path writerPath = null;</span>
<span class="source-line-no">269</span><span id="line-269"> if (writeMultipleTables) {</span>
<span class="source-line-no">270</span><span id="line-270"> Path tableRelPath = getTableRelativePath(tableNameBytes);</span>
<span class="source-line-no">271</span><span id="line-271"> writerPath = new Path(outputDir, new Path(tableRelPath, Bytes.toString(family)));</span>
<span class="source-line-no">272</span><span id="line-272"> } else {</span>
<span class="source-line-no">273</span><span id="line-273"> writerPath = new Path(outputDir, Bytes.toString(family));</span>
<span class="source-line-no">274</span><span id="line-274"> }</span>
<span class="source-line-no">275</span><span id="line-275"> fs.mkdirs(writerPath);</span>
<span class="source-line-no">276</span><span id="line-276"> configureStoragePolicy(conf, fs, tableAndFamily, writerPath);</span>
<span class="source-line-no">277</span><span id="line-277"> }</span>
<span class="source-line-no">278</span><span id="line-278"></span>
<span class="source-line-no">279</span><span id="line-279"> // This can only happen once a row is finished though</span>
<span class="source-line-no">280</span><span id="line-280"> if (</span>
<span class="source-line-no">281</span><span id="line-281"> wl != null &amp;&amp; wl.written + length &gt;= maxsize</span>
<span class="source-line-no">282</span><span id="line-282"> &amp;&amp; Bytes.compareTo(this.previousRows.get(family), rowKey) != 0</span>
<span class="source-line-no">283</span><span id="line-283"> ) {</span>
<span class="source-line-no">284</span><span id="line-284"> rollWriters(wl);</span>
<span class="source-line-no">285</span><span id="line-285"> }</span>
<span class="source-line-no">286</span><span id="line-286"></span>
<span class="source-line-no">287</span><span id="line-287"> // create a new WAL writer, if necessary</span>
<span class="source-line-no">288</span><span id="line-288"> if (wl == null || wl.writer == null) {</span>
<span class="source-line-no">289</span><span id="line-289"> InetSocketAddress[] favoredNodes = null;</span>
<span class="source-line-no">290</span><span id="line-290"> if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {</span>
<span class="source-line-no">291</span><span id="line-291"> HRegionLocation loc = null;</span>
<span class="source-line-no">292</span><span id="line-292"> String tableName = Bytes.toString(tableNameBytes);</span>
<span class="source-line-no">293</span><span id="line-293"> if (tableName != null) {</span>
<span class="source-line-no">294</span><span id="line-294"> try (</span>
<span class="source-line-no">295</span><span id="line-295"> Connection connection =</span>
<span class="source-line-no">296</span><span id="line-296"> ConnectionFactory.createConnection(createRemoteClusterConf(conf));</span>
<span class="source-line-no">297</span><span id="line-297"> RegionLocator locator = connection.getRegionLocator(TableName.valueOf(tableName))) {</span>
<span class="source-line-no">298</span><span id="line-298"> loc = locator.getRegionLocation(rowKey);</span>
<span class="source-line-no">299</span><span id="line-299"> } catch (Throwable e) {</span>
<span class="source-line-no">300</span><span id="line-300"> LOG.warn("Something wrong locating rowkey {} in {}", Bytes.toString(rowKey),</span>
<span class="source-line-no">301</span><span id="line-301"> tableName, e);</span>
<span class="source-line-no">302</span><span id="line-302"> loc = null;</span>
<span class="source-line-no">303</span><span id="line-303"> }</span>
<span class="source-line-no">304</span><span id="line-304"> }</span>
<span class="source-line-no">305</span><span id="line-305"> if (null == loc) {</span>
<span class="source-line-no">306</span><span id="line-306"> LOG.trace("Failed get of location, use default writer {}", Bytes.toString(rowKey));</span>
<span class="source-line-no">307</span><span id="line-307"> } else {</span>
<span class="source-line-no">308</span><span id="line-308"> LOG.debug("First rowkey: [{}]", Bytes.toString(rowKey));</span>
<span class="source-line-no">309</span><span id="line-309"> InetSocketAddress initialIsa =</span>
<span class="source-line-no">310</span><span id="line-310"> new InetSocketAddress(loc.getHostname(), loc.getPort());</span>
<span class="source-line-no">311</span><span id="line-311"> if (initialIsa.isUnresolved()) {</span>
<span class="source-line-no">312</span><span id="line-312"> LOG.trace("Failed resolve address {}, use default writer", loc.getHostnamePort());</span>
<span class="source-line-no">313</span><span id="line-313"> } else {</span>
<span class="source-line-no">314</span><span id="line-314"> LOG.debug("Use favored nodes writer: {}", initialIsa.getHostString());</span>
<span class="source-line-no">315</span><span id="line-315"> favoredNodes = new InetSocketAddress[] { initialIsa };</span>
<span class="source-line-no">316</span><span id="line-316"> }</span>
<span class="source-line-no">317</span><span id="line-317"> }</span>
<span class="source-line-no">318</span><span id="line-318"> }</span>
<span class="source-line-no">319</span><span id="line-319"> wl = getNewWriter(tableNameBytes, family, conf, favoredNodes);</span>
<span class="source-line-no">320</span><span id="line-320"></span>
<span class="source-line-no">321</span><span id="line-321"> }</span>
<span class="source-line-no">322</span><span id="line-322"></span>
<span class="source-line-no">323</span><span id="line-323"> // we now have the proper WAL writer. full steam ahead</span>
<span class="source-line-no">324</span><span id="line-324"> PrivateCellUtil.updateLatestStamp(kv, this.now);</span>
<span class="source-line-no">325</span><span id="line-325"> wl.writer.append((ExtendedCell) kv);</span>
<span class="source-line-no">326</span><span id="line-326"> wl.written += length;</span>
<span class="source-line-no">327</span><span id="line-327"></span>
<span class="source-line-no">328</span><span id="line-328"> // Copy the row so we know when a row transition.</span>
<span class="source-line-no">329</span><span id="line-329"> this.previousRows.put(family, rowKey);</span>
<span class="source-line-no">330</span><span id="line-330"> }</span>
<span class="source-line-no">331</span><span id="line-331"></span>
<span class="source-line-no">332</span><span id="line-332"> private Path getTableRelativePath(byte[] tableNameBytes) {</span>
<span class="source-line-no">333</span><span id="line-333"> String tableName = Bytes.toString(tableNameBytes);</span>
<span class="source-line-no">334</span><span id="line-334"> String[] tableNameParts = tableName.split(":");</span>
<span class="source-line-no">335</span><span id="line-335"> Path tableRelPath = new Path(tableNameParts[0]);</span>
<span class="source-line-no">336</span><span id="line-336"> if (tableNameParts.length &gt; 1) {</span>
<span class="source-line-no">337</span><span id="line-337"> tableRelPath = new Path(tableRelPath, tableNameParts[1]);</span>
<span class="source-line-no">338</span><span id="line-338"> }</span>
<span class="source-line-no">339</span><span id="line-339"> return tableRelPath;</span>
<span class="source-line-no">340</span><span id="line-340"> }</span>
<span class="source-line-no">341</span><span id="line-341"></span>
<span class="source-line-no">342</span><span id="line-342"> private void rollWriters(WriterLength writerLength) throws IOException {</span>
<span class="source-line-no">343</span><span id="line-343"> if (writerLength != null) {</span>
<span class="source-line-no">344</span><span id="line-344"> closeWriter(writerLength);</span>
<span class="source-line-no">345</span><span id="line-345"> } else {</span>
<span class="source-line-no">346</span><span id="line-346"> for (WriterLength wl : this.writers.values()) {</span>
<span class="source-line-no">347</span><span id="line-347"> closeWriter(wl);</span>
<span class="source-line-no">348</span><span id="line-348"> }</span>
<span class="source-line-no">349</span><span id="line-349"> }</span>
<span class="source-line-no">350</span><span id="line-350"> }</span>
<span class="source-line-no">351</span><span id="line-351"></span>
<span class="source-line-no">352</span><span id="line-352"> private void closeWriter(WriterLength wl) throws IOException {</span>
<span class="source-line-no">353</span><span id="line-353"> if (wl.writer != null) {</span>
<span class="source-line-no">354</span><span id="line-354"> LOG.info(</span>
<span class="source-line-no">355</span><span id="line-355"> "Writer=" + wl.writer.getPath() + ((wl.written == 0) ? "" : ", wrote=" + wl.written));</span>
<span class="source-line-no">356</span><span id="line-356"> close(wl.writer);</span>
<span class="source-line-no">357</span><span id="line-357"> wl.writer = null;</span>
<span class="source-line-no">358</span><span id="line-358"> }</span>
<span class="source-line-no">359</span><span id="line-359"> wl.written = 0;</span>
<span class="source-line-no">360</span><span id="line-360"> }</span>
<span class="source-line-no">361</span><span id="line-361"></span>
<span class="source-line-no">362</span><span id="line-362"> private Configuration createRemoteClusterConf(Configuration conf) {</span>
<span class="source-line-no">363</span><span id="line-363"> final Configuration newConf = new Configuration(conf);</span>
<span class="source-line-no">364</span><span id="line-364"></span>
<span class="source-line-no">365</span><span id="line-365"> final String quorum = conf.get(REMOTE_CLUSTER_ZOOKEEPER_QUORUM_CONF_KEY);</span>
<span class="source-line-no">366</span><span id="line-366"> final String clientPort = conf.get(REMOTE_CLUSTER_ZOOKEEPER_CLIENT_PORT_CONF_KEY);</span>
<span class="source-line-no">367</span><span id="line-367"> final String parent = conf.get(REMOTE_CLUSTER_ZOOKEEPER_ZNODE_PARENT_CONF_KEY);</span>
<span class="source-line-no">368</span><span id="line-368"></span>
<span class="source-line-no">369</span><span id="line-369"> if (quorum != null &amp;&amp; clientPort != null &amp;&amp; parent != null) {</span>
<span class="source-line-no">370</span><span id="line-370"> newConf.set(HConstants.ZOOKEEPER_QUORUM, quorum);</span>
<span class="source-line-no">371</span><span id="line-371"> newConf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, Integer.parseInt(clientPort));</span>
<span class="source-line-no">372</span><span id="line-372"> newConf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, parent);</span>
<span class="source-line-no">373</span><span id="line-373"> }</span>
<span class="source-line-no">374</span><span id="line-374"></span>
<span class="source-line-no">375</span><span id="line-375"> for (Entry&lt;String, String&gt; entry : conf) {</span>
<span class="source-line-no">376</span><span id="line-376"> String key = entry.getKey();</span>
<span class="source-line-no">377</span><span id="line-377"> if (</span>
<span class="source-line-no">378</span><span id="line-378"> REMOTE_CLUSTER_ZOOKEEPER_QUORUM_CONF_KEY.equals(key)</span>
<span class="source-line-no">379</span><span id="line-379"> || REMOTE_CLUSTER_ZOOKEEPER_CLIENT_PORT_CONF_KEY.equals(key)</span>
<span class="source-line-no">380</span><span id="line-380"> || REMOTE_CLUSTER_ZOOKEEPER_ZNODE_PARENT_CONF_KEY.equals(key)</span>
<span class="source-line-no">381</span><span id="line-381"> ) {</span>
<span class="source-line-no">382</span><span id="line-382"> // Handled them above</span>
<span class="source-line-no">383</span><span id="line-383"> continue;</span>
<span class="source-line-no">384</span><span id="line-384"> }</span>
<span class="source-line-no">385</span><span id="line-385"></span>
<span class="source-line-no">386</span><span id="line-386"> if (entry.getKey().startsWith(REMOTE_CLUSTER_CONF_PREFIX)) {</span>
<span class="source-line-no">387</span><span id="line-387"> String originalKey = entry.getKey().substring(REMOTE_CLUSTER_CONF_PREFIX.length());</span>
<span class="source-line-no">388</span><span id="line-388"> if (!originalKey.isEmpty()) {</span>
<span class="source-line-no">389</span><span id="line-389"> newConf.set(originalKey, entry.getValue());</span>
<span class="source-line-no">390</span><span id="line-390"> }</span>
<span class="source-line-no">391</span><span id="line-391"> }</span>
<span class="source-line-no">392</span><span id="line-392"> }</span>
<span class="source-line-no">393</span><span id="line-393"></span>
<span class="source-line-no">394</span><span id="line-394"> return newConf;</span>
<span class="source-line-no">395</span><span id="line-395"> }</span>
<span class="source-line-no">396</span><span id="line-396"></span>
<span class="source-line-no">397</span><span id="line-397"> /*</span>
<span class="source-line-no">398</span><span id="line-398"> * Create a new StoreFile.Writer.</span>
<span class="source-line-no">399</span><span id="line-399"> * @return A WriterLength, containing a new StoreFile.Writer.</span>
<span class="source-line-no">400</span><span id="line-400"> */</span>
<span class="source-line-no">401</span><span id="line-401"> @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "BX_UNBOXING_IMMEDIATELY_REBOXED",</span>
<span class="source-line-no">402</span><span id="line-402"> justification = "Not important")</span>
<span class="source-line-no">403</span><span id="line-403"> private WriterLength getNewWriter(byte[] tableName, byte[] family, Configuration conf,</span>
<span class="source-line-no">404</span><span id="line-404"> InetSocketAddress[] favoredNodes) throws IOException {</span>
<span class="source-line-no">405</span><span id="line-405"> byte[] tableAndFamily = getTableNameSuffixedWithFamily(tableName, family);</span>
<span class="source-line-no">406</span><span id="line-406"> Path familydir = new Path(outputDir, Bytes.toString(family));</span>
<span class="source-line-no">407</span><span id="line-407"> if (writeMultipleTables) {</span>
<span class="source-line-no">408</span><span id="line-408"> familydir =</span>
<span class="source-line-no">409</span><span id="line-409"> new Path(outputDir, new Path(getTableRelativePath(tableName), Bytes.toString(family)));</span>
<span class="source-line-no">410</span><span id="line-410"> }</span>
<span class="source-line-no">411</span><span id="line-411"> WriterLength wl = new WriterLength();</span>
<span class="source-line-no">412</span><span id="line-412"> Algorithm compression = overriddenCompression;</span>
<span class="source-line-no">413</span><span id="line-413"> compression = compression == null ? compressionMap.get(tableAndFamily) : compression;</span>
<span class="source-line-no">414</span><span id="line-414"> compression = compression == null ? defaultCompression : compression;</span>
<span class="source-line-no">415</span><span id="line-415"> BloomType bloomType = bloomTypeMap.get(tableAndFamily);</span>
<span class="source-line-no">416</span><span id="line-416"> bloomType = bloomType == null ? BloomType.NONE : bloomType;</span>
<span class="source-line-no">417</span><span id="line-417"> String bloomParam = bloomParamMap.get(tableAndFamily);</span>
<span class="source-line-no">418</span><span id="line-418"> if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {</span>
<span class="source-line-no">419</span><span id="line-419"> conf.set(BloomFilterUtil.PREFIX_LENGTH_KEY, bloomParam);</span>
<span class="source-line-no">420</span><span id="line-420"> }</span>
<span class="source-line-no">421</span><span id="line-421"> Integer blockSize = blockSizeMap.get(tableAndFamily);</span>
<span class="source-line-no">422</span><span id="line-422"> blockSize = blockSize == null ? HConstants.DEFAULT_BLOCKSIZE : blockSize;</span>
<span class="source-line-no">423</span><span id="line-423"> DataBlockEncoding encoding = overriddenEncoding;</span>
<span class="source-line-no">424</span><span id="line-424"> encoding = encoding == null ? datablockEncodingMap.get(tableAndFamily) : encoding;</span>
<span class="source-line-no">425</span><span id="line-425"> encoding = encoding == null ? DataBlockEncoding.NONE : encoding;</span>
<span class="source-line-no">426</span><span id="line-426"> HFileContextBuilder contextBuilder = new HFileContextBuilder().withCompression(compression)</span>
<span class="source-line-no">427</span><span id="line-427"> .withDataBlockEncoding(encoding).withChecksumType(StoreUtils.getChecksumType(conf))</span>
<span class="source-line-no">428</span><span id="line-428"> .withBytesPerCheckSum(StoreUtils.getBytesPerChecksum(conf)).withBlockSize(blockSize)</span>
<span class="source-line-no">429</span><span id="line-429"> .withColumnFamily(family).withTableName(tableName)</span>
<span class="source-line-no">430</span><span id="line-430"> .withCreateTime(EnvironmentEdgeManager.currentTime());</span>
<span class="source-line-no">431</span><span id="line-431"></span>
<span class="source-line-no">432</span><span id="line-432"> if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {</span>
<span class="source-line-no">433</span><span id="line-433"> contextBuilder.withIncludesTags(true);</span>
<span class="source-line-no">434</span><span id="line-434"> }</span>
<span class="source-line-no">435</span><span id="line-435"></span>
<span class="source-line-no">436</span><span id="line-436"> HFileContext hFileContext = contextBuilder.build();</span>
<span class="source-line-no">437</span><span id="line-437"> if (null == favoredNodes) {</span>
<span class="source-line-no">438</span><span id="line-438"> wl.writer =</span>
<span class="source-line-no">439</span><span id="line-439"> new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, fs).withOutputDir(familydir)</span>
<span class="source-line-no">440</span><span id="line-440"> .withBloomType(bloomType).withFileContext(hFileContext).build();</span>
<span class="source-line-no">441</span><span id="line-441"> } else {</span>
<span class="source-line-no">442</span><span id="line-442"> wl.writer = new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, new HFileSystem(fs))</span>
<span class="source-line-no">443</span><span id="line-443"> .withOutputDir(familydir).withBloomType(bloomType).withFileContext(hFileContext)</span>
<span class="source-line-no">444</span><span id="line-444"> .withFavoredNodes(favoredNodes).build();</span>
<span class="source-line-no">445</span><span id="line-445"> }</span>
<span class="source-line-no">446</span><span id="line-446"></span>
<span class="source-line-no">447</span><span id="line-447"> this.writers.put(tableAndFamily, wl);</span>
<span class="source-line-no">448</span><span id="line-448"> return wl;</span>
<span class="source-line-no">449</span><span id="line-449"> }</span>
<span class="source-line-no">450</span><span id="line-450"></span>
<span class="source-line-no">451</span><span id="line-451"> private void close(final StoreFileWriter w) throws IOException {</span>
<span class="source-line-no">452</span><span id="line-452"> if (w != null) {</span>
<span class="source-line-no">453</span><span id="line-453"> w.appendFileInfo(BULKLOAD_TIME_KEY, Bytes.toBytes(EnvironmentEdgeManager.currentTime()));</span>
<span class="source-line-no">454</span><span id="line-454"> w.appendFileInfo(BULKLOAD_TASK_KEY, Bytes.toBytes(context.getTaskAttemptID().toString()));</span>
<span class="source-line-no">455</span><span id="line-455"> w.appendFileInfo(MAJOR_COMPACTION_KEY, Bytes.toBytes(true));</span>
<span class="source-line-no">456</span><span id="line-456"> w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY, Bytes.toBytes(compactionExclude));</span>
<span class="source-line-no">457</span><span id="line-457"> w.appendTrackedTimestampsToMetadata();</span>
<span class="source-line-no">458</span><span id="line-458"> w.close();</span>
<span class="source-line-no">459</span><span id="line-459"> }</span>
<span class="source-line-no">460</span><span id="line-460"> }</span>
<span class="source-line-no">461</span><span id="line-461"></span>
<span class="source-line-no">462</span><span id="line-462"> @Override</span>
<span class="source-line-no">463</span><span id="line-463"> public void close(TaskAttemptContext c) throws IOException, InterruptedException {</span>
<span class="source-line-no">464</span><span id="line-464"> for (WriterLength wl : this.writers.values()) {</span>
<span class="source-line-no">465</span><span id="line-465"> close(wl.writer);</span>
<span class="source-line-no">466</span><span id="line-466"> }</span>
<span class="source-line-no">467</span><span id="line-467"> }</span>
<span class="source-line-no">468</span><span id="line-468"> };</span>
<span class="source-line-no">469</span><span id="line-469"> }</span>
<span class="source-line-no">470</span><span id="line-470"></span>
<span class="source-line-no">471</span><span id="line-471"> /**</span>
<span class="source-line-no">472</span><span id="line-472"> * Configure block storage policy for CF after the directory is created.</span>
<span class="source-line-no">473</span><span id="line-473"> */</span>
<span class="source-line-no">474</span><span id="line-474"> static void configureStoragePolicy(final Configuration conf, final FileSystem fs,</span>
<span class="source-line-no">475</span><span id="line-475"> byte[] tableAndFamily, Path cfPath) {</span>
<span class="source-line-no">476</span><span id="line-476"> if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {</span>
<span class="source-line-no">477</span><span id="line-477"> return;</span>
<span class="source-line-no">478</span><span id="line-478"> }</span>
<span class="source-line-no">479</span><span id="line-479"></span>
<span class="source-line-no">480</span><span id="line-480"> String policy = conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),</span>
<span class="source-line-no">481</span><span id="line-481"> conf.get(STORAGE_POLICY_PROPERTY));</span>
<span class="source-line-no">482</span><span id="line-482"> CommonFSUtils.setStoragePolicy(fs, cfPath, policy);</span>
<span class="source-line-no">483</span><span id="line-483"> }</span>
<span class="source-line-no">484</span><span id="line-484"></span>
<span class="source-line-no">485</span><span id="line-485"> /*</span>
<span class="source-line-no">486</span><span id="line-486"> * Data structure to hold a Writer and amount of data written on it.</span>
<span class="source-line-no">487</span><span id="line-487"> */</span>
<span class="source-line-no">488</span><span id="line-488"> static class WriterLength {</span>
<span class="source-line-no">489</span><span id="line-489"> long written = 0;</span>
<span class="source-line-no">490</span><span id="line-490"> StoreFileWriter writer = null;</span>
<span class="source-line-no">491</span><span id="line-491"> }</span>
<span class="source-line-no">492</span><span id="line-492"></span>
<span class="source-line-no">493</span><span id="line-493"> /**</span>
<span class="source-line-no">494</span><span id="line-494"> * Return the start keys of all of the regions in this table, as a list of ImmutableBytesWritable.</span>
<span class="source-line-no">495</span><span id="line-495"> */</span>
<span class="source-line-no">496</span><span id="line-496"> private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,</span>
<span class="source-line-no">497</span><span id="line-497"> boolean writeMultipleTables) throws IOException {</span>
<span class="source-line-no">498</span><span id="line-498"></span>
<span class="source-line-no">499</span><span id="line-499"> ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();</span>
<span class="source-line-no">500</span><span id="line-500"> for (RegionLocator regionLocator : regionLocators) {</span>
<span class="source-line-no">501</span><span id="line-501"> TableName tableName = regionLocator.getName();</span>
<span class="source-line-no">502</span><span id="line-502"> LOG.info("Looking up current regions for table " + tableName);</span>
<span class="source-line-no">503</span><span id="line-503"> byte[][] byteKeys = regionLocator.getStartKeys();</span>
<span class="source-line-no">504</span><span id="line-504"> for (byte[] byteKey : byteKeys) {</span>
<span class="source-line-no">505</span><span id="line-505"> byte[] fullKey = byteKey; // HFileOutputFormat2 use case</span>
<span class="source-line-no">506</span><span id="line-506"> if (writeMultipleTables) {</span>
<span class="source-line-no">507</span><span id="line-507"> // MultiTableHFileOutputFormat use case</span>
<span class="source-line-no">508</span><span id="line-508"> fullKey = combineTableNameSuffix(tableName.getName(), byteKey);</span>
<span class="source-line-no">509</span><span id="line-509"> }</span>
<span class="source-line-no">510</span><span id="line-510"> if (LOG.isDebugEnabled()) {</span>
<span class="source-line-no">511</span><span id="line-511"> LOG.debug("SplitPoint startkey for " + tableName + ": " + Bytes.toStringBinary(fullKey));</span>
<span class="source-line-no">512</span><span id="line-512"> }</span>
<span class="source-line-no">513</span><span id="line-513"> ret.add(new ImmutableBytesWritable(fullKey));</span>
<span class="source-line-no">514</span><span id="line-514"> }</span>
<span class="source-line-no">515</span><span id="line-515"> }</span>
<span class="source-line-no">516</span><span id="line-516"> return ret;</span>
<span class="source-line-no">517</span><span id="line-517"> }</span>
<span class="source-line-no">518</span><span id="line-518"></span>
<span class="source-line-no">519</span><span id="line-519"> /**</span>
<span class="source-line-no">520</span><span id="line-520"> * Write out a {@link SequenceFile} that can be read by {@link TotalOrderPartitioner} that</span>
<span class="source-line-no">521</span><span id="line-521"> * contains the split points in startKeys.</span>
<span class="source-line-no">522</span><span id="line-522"> */</span>
<span class="source-line-no">523</span><span id="line-523"> @SuppressWarnings("deprecation")</span>
<span class="source-line-no">524</span><span id="line-524"> private static void writePartitions(Configuration conf, Path partitionsPath,</span>
<span class="source-line-no">525</span><span id="line-525"> List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {</span>
<span class="source-line-no">526</span><span id="line-526"> LOG.info("Writing partition information to " + partitionsPath);</span>
<span class="source-line-no">527</span><span id="line-527"> if (startKeys.isEmpty()) {</span>
<span class="source-line-no">528</span><span id="line-528"> throw new IllegalArgumentException("No regions passed");</span>
<span class="source-line-no">529</span><span id="line-529"> }</span>
<span class="source-line-no">530</span><span id="line-530"></span>
<span class="source-line-no">531</span><span id="line-531"> // We're generating a list of split points, and we don't ever</span>
<span class="source-line-no">532</span><span id="line-532"> // have keys &lt; the first region (which has an empty start key)</span>
<span class="source-line-no">533</span><span id="line-533"> // so we need to remove it. Otherwise we would end up with an</span>
<span class="source-line-no">534</span><span id="line-534"> // empty reducer with index 0</span>
<span class="source-line-no">535</span><span id="line-535"> TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);</span>
<span class="source-line-no">536</span><span id="line-536"> ImmutableBytesWritable first = sorted.first();</span>
<span class="source-line-no">537</span><span id="line-537"> if (writeMultipleTables) {</span>
<span class="source-line-no">538</span><span id="line-538"> first =</span>
<span class="source-line-no">539</span><span id="line-539"> new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first().get()));</span>
<span class="source-line-no">540</span><span id="line-540"> }</span>
<span class="source-line-no">541</span><span id="line-541"> if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {</span>
<span class="source-line-no">542</span><span id="line-542"> throw new IllegalArgumentException(</span>
<span class="source-line-no">543</span><span id="line-543"> "First region of table should have empty start key. Instead has: "</span>
<span class="source-line-no">544</span><span id="line-544"> + Bytes.toStringBinary(first.get()));</span>
<span class="source-line-no">545</span><span id="line-545"> }</span>
<span class="source-line-no">546</span><span id="line-546"> sorted.remove(sorted.first());</span>
<span class="source-line-no">547</span><span id="line-547"></span>
<span class="source-line-no">548</span><span id="line-548"> // Write the actual file</span>
<span class="source-line-no">549</span><span id="line-549"> FileSystem fs = partitionsPath.getFileSystem(conf);</span>
<span class="source-line-no">550</span><span id="line-550"> SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, partitionsPath,</span>
<span class="source-line-no">551</span><span id="line-551"> ImmutableBytesWritable.class, NullWritable.class);</span>
<span class="source-line-no">552</span><span id="line-552"></span>
<span class="source-line-no">553</span><span id="line-553"> try {</span>
<span class="source-line-no">554</span><span id="line-554"> for (ImmutableBytesWritable startKey : sorted) {</span>
<span class="source-line-no">555</span><span id="line-555"> writer.append(startKey, NullWritable.get());</span>
<span class="source-line-no">556</span><span id="line-556"> }</span>
<span class="source-line-no">557</span><span id="line-557"> } finally {</span>
<span class="source-line-no">558</span><span id="line-558"> writer.close();</span>
<span class="source-line-no">559</span><span id="line-559"> }</span>
<span class="source-line-no">560</span><span id="line-560"> }</span>
<span class="source-line-no">561</span><span id="line-561"></span>
<span class="source-line-no">562</span><span id="line-562"> /**</span>
<span class="source-line-no">563</span><span id="line-563"> * Configure a MapReduce Job to perform an incremental load into the given table. This</span>
<span class="source-line-no">564</span><span id="line-564"> * &lt;ul&gt;</span>
<span class="source-line-no">565</span><span id="line-565"> * &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;</span>
<span class="source-line-no">566</span><span id="line-566"> * &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;</span>
<span class="source-line-no">567</span><span id="line-567"> * &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;</span>
<span class="source-line-no">568</span><span id="line-568"> * &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;</span>
<span class="source-line-no">569</span><span id="line-569"> * &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or</span>
<span class="source-line-no">570</span><span id="line-570"> * PutSortReducer)&lt;/li&gt;</span>
<span class="source-line-no">571</span><span id="line-571"> * &lt;li&gt;Sets the HBase cluster key to load region locations for locality-sensitive&lt;/li&gt;</span>
<span class="source-line-no">572</span><span id="line-572"> * &lt;/ul&gt;</span>
<span class="source-line-no">573</span><span id="line-573"> * The user should be sure to set the map output value class to either KeyValue or Put before</span>
<span class="source-line-no">574</span><span id="line-574"> * running this function.</span>
<span class="source-line-no">575</span><span id="line-575"> */</span>
<span class="source-line-no">576</span><span id="line-576"> public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)</span>
<span class="source-line-no">577</span><span id="line-577"> throws IOException {</span>
<span class="source-line-no">578</span><span id="line-578"> configureIncrementalLoad(job, table.getDescriptor(), regionLocator);</span>
<span class="source-line-no">579</span><span id="line-579"> configureRemoteCluster(job, table.getConfiguration());</span>
<span class="source-line-no">580</span><span id="line-580"> }</span>
<span class="source-line-no">581</span><span id="line-581"></span>
<span class="source-line-no">582</span><span id="line-582"> /**</span>
<span class="source-line-no">583</span><span id="line-583"> * Configure a MapReduce Job to perform an incremental load into the given table. This</span>
<span class="source-line-no">584</span><span id="line-584"> * &lt;ul&gt;</span>
<span class="source-line-no">585</span><span id="line-585"> * &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;</span>
<span class="source-line-no">586</span><span id="line-586"> * &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;</span>
<span class="source-line-no">587</span><span id="line-587"> * &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;</span>
<span class="source-line-no">588</span><span id="line-588"> * &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;</span>
<span class="source-line-no">589</span><span id="line-589"> * &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or</span>
<span class="source-line-no">590</span><span id="line-590"> * PutSortReducer)&lt;/li&gt;</span>
<span class="source-line-no">591</span><span id="line-591"> * &lt;/ul&gt;</span>
<span class="source-line-no">592</span><span id="line-592"> * The user should be sure to set the map output value class to either KeyValue or Put before</span>
<span class="source-line-no">593</span><span id="line-593"> * running this function.</span>
<span class="source-line-no">594</span><span id="line-594"> */</span>
<span class="source-line-no">595</span><span id="line-595"> public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,</span>
<span class="source-line-no">596</span><span id="line-596"> RegionLocator regionLocator) throws IOException {</span>
<span class="source-line-no">597</span><span id="line-597"> ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();</span>
<span class="source-line-no">598</span><span id="line-598"> singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));</span>
<span class="source-line-no">599</span><span id="line-599"> configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);</span>
<span class="source-line-no">600</span><span id="line-600"> }</span>
<span class="source-line-no">601</span><span id="line-601"></span>
<span class="source-line-no">602</span><span id="line-602"> static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,</span>
<span class="source-line-no">603</span><span id="line-603"> Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {</span>
<span class="source-line-no">604</span><span id="line-604"> Configuration conf = job.getConfiguration();</span>
<span class="source-line-no">605</span><span id="line-605"> job.setOutputKeyClass(ImmutableBytesWritable.class);</span>
<span class="source-line-no">606</span><span id="line-606"> job.setOutputValueClass(MapReduceExtendedCell.class);</span>
<span class="source-line-no">607</span><span id="line-607"> job.setOutputFormatClass(cls);</span>
<span class="source-line-no">608</span><span id="line-608"></span>
<span class="source-line-no">609</span><span id="line-609"> if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {</span>
<span class="source-line-no">610</span><span id="line-610"> throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");</span>
<span class="source-line-no">611</span><span id="line-611"> }</span>
<span class="source-line-no">612</span><span id="line-612"> boolean writeMultipleTables = false;</span>
<span class="source-line-no">613</span><span id="line-613"> if (MultiTableHFileOutputFormat.class.equals(cls)) {</span>
<span class="source-line-no">614</span><span id="line-614"> writeMultipleTables = true;</span>
<span class="source-line-no">615</span><span id="line-615"> conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);</span>
<span class="source-line-no">616</span><span id="line-616"> }</span>
<span class="source-line-no">617</span><span id="line-617"> // Based on the configured map output class, set the correct reducer to properly</span>
<span class="source-line-no">618</span><span id="line-618"> // sort the incoming values.</span>
<span class="source-line-no">619</span><span id="line-619"> // TODO it would be nice to pick one or the other of these formats.</span>
<span class="source-line-no">620</span><span id="line-620"> if (</span>
<span class="source-line-no">621</span><span id="line-621"> KeyValue.class.equals(job.getMapOutputValueClass())</span>
<span class="source-line-no">622</span><span id="line-622"> || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())</span>
<span class="source-line-no">623</span><span id="line-623"> ) {</span>
<span class="source-line-no">624</span><span id="line-624"> job.setReducerClass(CellSortReducer.class);</span>
<span class="source-line-no">625</span><span id="line-625"> } else if (Put.class.equals(job.getMapOutputValueClass())) {</span>
<span class="source-line-no">626</span><span id="line-626"> job.setReducerClass(PutSortReducer.class);</span>
<span class="source-line-no">627</span><span id="line-627"> } else if (Text.class.equals(job.getMapOutputValueClass())) {</span>
<span class="source-line-no">628</span><span id="line-628"> job.setReducerClass(TextSortReducer.class);</span>
<span class="source-line-no">629</span><span id="line-629"> } else {</span>
<span class="source-line-no">630</span><span id="line-630"> LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());</span>
<span class="source-line-no">631</span><span id="line-631"> }</span>
<span class="source-line-no">632</span><span id="line-632"></span>
<span class="source-line-no">633</span><span id="line-633"> mergeSerializations(conf);</span>
<span class="source-line-no">634</span><span id="line-634"></span>
<span class="source-line-no">635</span><span id="line-635"> if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {</span>
<span class="source-line-no">636</span><span id="line-636"> LOG.info("bulkload locality sensitive enabled");</span>
<span class="source-line-no">637</span><span id="line-637"> }</span>
<span class="source-line-no">638</span><span id="line-638"></span>
<span class="source-line-no">639</span><span id="line-639"> /* Now get the region start keys for every table required */</span>
<span class="source-line-no">640</span><span id="line-640"> List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());</span>
<span class="source-line-no">641</span><span id="line-641"> List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;(multiTableInfo.size());</span>
<span class="source-line-no">642</span><span id="line-642"> List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;(multiTableInfo.size());</span>
<span class="source-line-no">643</span><span id="line-643"></span>
<span class="source-line-no">644</span><span id="line-644"> for (TableInfo tableInfo : multiTableInfo) {</span>
<span class="source-line-no">645</span><span id="line-645"> regionLocators.add(tableInfo.getRegionLocator());</span>
<span class="source-line-no">646</span><span id="line-646"> String tn = writeMultipleTables</span>
<span class="source-line-no">647</span><span id="line-647"> ? tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString()</span>
<span class="source-line-no">648</span><span id="line-648"> : tableInfo.getRegionLocator().getName().getNameAsString();</span>
<span class="source-line-no">649</span><span id="line-649"> allTableNames.add(tn);</span>
<span class="source-line-no">650</span><span id="line-650"> tableDescriptors.add(tableInfo.getTableDescriptor());</span>
<span class="source-line-no">651</span><span id="line-651"> }</span>
<span class="source-line-no">652</span><span id="line-652"> // Record tablenames for creating writer by favored nodes, and decoding compression,</span>
<span class="source-line-no">653</span><span id="line-653"> // block size and other attributes of columnfamily per table</span>
<span class="source-line-no">654</span><span id="line-654"> conf.set(OUTPUT_TABLE_NAME_CONF_KEY,</span>
<span class="source-line-no">655</span><span id="line-655"> StringUtils.join(allTableNames, Bytes.toString(tableSeparator)));</span>
<span class="source-line-no">656</span><span id="line-656"> List&lt;ImmutableBytesWritable&gt; startKeys =</span>
<span class="source-line-no">657</span><span id="line-657"> getRegionStartKeys(regionLocators, writeMultipleTables);</span>
<span class="source-line-no">658</span><span id="line-658"> // Use table's region boundaries for TOP split points.</span>
<span class="source-line-no">659</span><span id="line-659"> LOG.info("Configuring " + startKeys.size() + " reduce partitions "</span>
<span class="source-line-no">660</span><span id="line-660"> + "to match current region count for all tables");</span>
<span class="source-line-no">661</span><span id="line-661"> job.setNumReduceTasks(startKeys.size());</span>
<span class="source-line-no">662</span><span id="line-662"></span>
<span class="source-line-no">663</span><span id="line-663"> configurePartitioner(job, startKeys, writeMultipleTables);</span>
<span class="source-line-no">664</span><span id="line-664"> // Set compression algorithms based on column families</span>
<span class="source-line-no">665</span><span id="line-665"></span>
<span class="source-line-no">666</span><span id="line-666"> conf.set(COMPRESSION_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">667</span><span id="line-667"> serializeColumnFamilyAttribute(compressionDetails, tableDescriptors));</span>
<span class="source-line-no">668</span><span id="line-668"> conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">669</span><span id="line-669"> serializeColumnFamilyAttribute(blockSizeDetails, tableDescriptors));</span>
<span class="source-line-no">670</span><span id="line-670"> conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">671</span><span id="line-671"> serializeColumnFamilyAttribute(bloomTypeDetails, tableDescriptors));</span>
<span class="source-line-no">672</span><span id="line-672"> conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">673</span><span id="line-673"> serializeColumnFamilyAttribute(bloomParamDetails, tableDescriptors));</span>
<span class="source-line-no">674</span><span id="line-674"> conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">675</span><span id="line-675"> serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));</span>
<span class="source-line-no">676</span><span id="line-676"></span>
<span class="source-line-no">677</span><span id="line-677"> TableMapReduceUtil.addDependencyJars(job);</span>
<span class="source-line-no">678</span><span id="line-678"> TableMapReduceUtil.initCredentials(job);</span>
<span class="source-line-no">679</span><span id="line-679"> LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));</span>
<span class="source-line-no">680</span><span id="line-680"> }</span>
<span class="source-line-no">681</span><span id="line-681"></span>
<span class="source-line-no">682</span><span id="line-682"> private static void mergeSerializations(Configuration conf) {</span>
<span class="source-line-no">683</span><span id="line-683"> List&lt;String&gt; serializations = new ArrayList&lt;&gt;();</span>
<span class="source-line-no">684</span><span id="line-684"></span>
<span class="source-line-no">685</span><span id="line-685"> // add any existing values that have been set</span>
<span class="source-line-no">686</span><span id="line-686"> String[] existing = conf.getStrings("io.serializations");</span>
<span class="source-line-no">687</span><span id="line-687"> if (existing != null) {</span>
<span class="source-line-no">688</span><span id="line-688"> Collections.addAll(serializations, existing);</span>
<span class="source-line-no">689</span><span id="line-689"> }</span>
<span class="source-line-no">690</span><span id="line-690"></span>
<span class="source-line-no">691</span><span id="line-691"> serializations.add(MutationSerialization.class.getName());</span>
<span class="source-line-no">692</span><span id="line-692"> serializations.add(ResultSerialization.class.getName());</span>
<span class="source-line-no">693</span><span id="line-693"></span>
<span class="source-line-no">694</span><span id="line-694"> // Add ExtendedCellSerialization, if configured. Order matters here. Hadoop's</span>
<span class="source-line-no">695</span><span id="line-695"> // SerializationFactory runs through serializations in the order they are registered.</span>
<span class="source-line-no">696</span><span id="line-696"> // We want to register ExtendedCellSerialization before CellSerialization because both</span>
<span class="source-line-no">697</span><span id="line-697"> // work for ExtendedCells but only ExtendedCellSerialization handles them properly.</span>
<span class="source-line-no">698</span><span id="line-698"> if (</span>
<span class="source-line-no">699</span><span id="line-699"> conf.getBoolean(EXTENDED_CELL_SERIALIZATION_ENABLED_KEY,</span>
<span class="source-line-no">700</span><span id="line-700"> EXTENDED_CELL_SERIALIZATION_ENABLED_DEFULT)</span>
<span class="source-line-no">701</span><span id="line-701"> ) {</span>
<span class="source-line-no">702</span><span id="line-702"> serializations.add(ExtendedCellSerialization.class.getName());</span>
<span class="source-line-no">703</span><span id="line-703"> }</span>
<span class="source-line-no">704</span><span id="line-704"> serializations.add(CellSerialization.class.getName());</span>
<span class="source-line-no">705</span><span id="line-705"></span>
<span class="source-line-no">706</span><span id="line-706"> conf.setStrings("io.serializations", serializations.toArray(new String[0]));</span>
<span class="source-line-no">707</span><span id="line-707"> }</span>
<span class="source-line-no">708</span><span id="line-708"></span>
<span class="source-line-no">709</span><span id="line-709"> public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor)</span>
<span class="source-line-no">710</span><span id="line-710"> throws IOException {</span>
<span class="source-line-no">711</span><span id="line-711"> Configuration conf = job.getConfiguration();</span>
<span class="source-line-no">712</span><span id="line-712"></span>
<span class="source-line-no">713</span><span id="line-713"> job.setOutputKeyClass(ImmutableBytesWritable.class);</span>
<span class="source-line-no">714</span><span id="line-714"> job.setOutputValueClass(MapReduceExtendedCell.class);</span>
<span class="source-line-no">715</span><span id="line-715"> job.setOutputFormatClass(HFileOutputFormat2.class);</span>
<span class="source-line-no">716</span><span id="line-716"></span>
<span class="source-line-no">717</span><span id="line-717"> ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);</span>
<span class="source-line-no">718</span><span id="line-718"> singleTableDescriptor.add(tableDescriptor);</span>
<span class="source-line-no">719</span><span id="line-719"></span>
<span class="source-line-no">720</span><span id="line-720"> conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());</span>
<span class="source-line-no">721</span><span id="line-721"> // Set compression algorithms based on column families</span>
<span class="source-line-no">722</span><span id="line-722"> conf.set(COMPRESSION_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">723</span><span id="line-723"> serializeColumnFamilyAttribute(compressionDetails, singleTableDescriptor));</span>
<span class="source-line-no">724</span><span id="line-724"> conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">725</span><span id="line-725"> serializeColumnFamilyAttribute(blockSizeDetails, singleTableDescriptor));</span>
<span class="source-line-no">726</span><span id="line-726"> conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">727</span><span id="line-727"> serializeColumnFamilyAttribute(bloomTypeDetails, singleTableDescriptor));</span>
<span class="source-line-no">728</span><span id="line-728"> conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">729</span><span id="line-729"> serializeColumnFamilyAttribute(bloomParamDetails, singleTableDescriptor));</span>
<span class="source-line-no">730</span><span id="line-730"> conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,</span>
<span class="source-line-no">731</span><span id="line-731"> serializeColumnFamilyAttribute(dataBlockEncodingDetails, singleTableDescriptor));</span>
<span class="source-line-no">732</span><span id="line-732"></span>
<span class="source-line-no">733</span><span id="line-733"> TableMapReduceUtil.addDependencyJars(job);</span>
<span class="source-line-no">734</span><span id="line-734"> TableMapReduceUtil.initCredentials(job);</span>
<span class="source-line-no">735</span><span id="line-735"> LOG.info("Incremental table " + tableDescriptor.getTableName() + " output configured.");</span>
<span class="source-line-no">736</span><span id="line-736"> }</span>
<span class="source-line-no">737</span><span id="line-737"></span>
<span class="source-line-no">738</span><span id="line-738"> /**</span>
<span class="source-line-no">739</span><span id="line-739"> * Configure HBase cluster key for remote cluster to load region location for locality-sensitive</span>
<span class="source-line-no">740</span><span id="line-740"> * if it's enabled. It's not necessary to call this method explicitly when the cluster key for</span>
<span class="source-line-no">741</span><span id="line-741"> * HBase cluster to be used to load region location is configured in the job configuration. Call</span>
<span class="source-line-no">742</span><span id="line-742"> * this method when another HBase cluster key is configured in the job configuration. For example,</span>
<span class="source-line-no">743</span><span id="line-743"> * you should call when you load data from HBase cluster A using {@link TableInputFormat} and</span>
<span class="source-line-no">744</span><span id="line-744"> * generate hfiles for HBase cluster B. Otherwise, HFileOutputFormat2 fetch location from cluster</span>
<span class="source-line-no">745</span><span id="line-745"> * A and locality-sensitive won't working correctly.</span>
<span class="source-line-no">746</span><span id="line-746"> * {@link #configureIncrementalLoad(Job, Table, RegionLocator)} calls this method using</span>
<span class="source-line-no">747</span><span id="line-747"> * {@link Table#getConfiguration} as clusterConf. See HBASE-25608.</span>
<span class="source-line-no">748</span><span id="line-748"> * @param job which has configuration to be updated</span>
<span class="source-line-no">749</span><span id="line-749"> * @param clusterConf which contains cluster key of the HBase cluster to be locality-sensitive</span>
<span class="source-line-no">750</span><span id="line-750"> * @see #configureIncrementalLoad(Job, Table, RegionLocator)</span>
<span class="source-line-no">751</span><span id="line-751"> * @see #LOCALITY_SENSITIVE_CONF_KEY</span>
<span class="source-line-no">752</span><span id="line-752"> * @see #REMOTE_CLUSTER_ZOOKEEPER_QUORUM_CONF_KEY</span>
<span class="source-line-no">753</span><span id="line-753"> * @see #REMOTE_CLUSTER_ZOOKEEPER_CLIENT_PORT_CONF_KEY</span>
<span class="source-line-no">754</span><span id="line-754"> * @see #REMOTE_CLUSTER_ZOOKEEPER_ZNODE_PARENT_CONF_KEY</span>
<span class="source-line-no">755</span><span id="line-755"> */</span>
<span class="source-line-no">756</span><span id="line-756"> public static void configureRemoteCluster(Job job, Configuration clusterConf) {</span>
<span class="source-line-no">757</span><span id="line-757"> Configuration conf = job.getConfiguration();</span>
<span class="source-line-no">758</span><span id="line-758"></span>
<span class="source-line-no">759</span><span id="line-759"> if (!conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {</span>
<span class="source-line-no">760</span><span id="line-760"> return;</span>
<span class="source-line-no">761</span><span id="line-761"> }</span>
<span class="source-line-no">762</span><span id="line-762"></span>
<span class="source-line-no">763</span><span id="line-763"> final String quorum = clusterConf.get(HConstants.ZOOKEEPER_QUORUM);</span>
<span class="source-line-no">764</span><span id="line-764"> final int clientPort = clusterConf.getInt(HConstants.ZOOKEEPER_CLIENT_PORT,</span>
<span class="source-line-no">765</span><span id="line-765"> HConstants.DEFAULT_ZOOKEEPER_CLIENT_PORT);</span>
<span class="source-line-no">766</span><span id="line-766"> final String parent =</span>
<span class="source-line-no">767</span><span id="line-767"> clusterConf.get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);</span>
<span class="source-line-no">768</span><span id="line-768"></span>
<span class="source-line-no">769</span><span id="line-769"> conf.set(REMOTE_CLUSTER_ZOOKEEPER_QUORUM_CONF_KEY, quorum);</span>
<span class="source-line-no">770</span><span id="line-770"> conf.setInt(REMOTE_CLUSTER_ZOOKEEPER_CLIENT_PORT_CONF_KEY, clientPort);</span>
<span class="source-line-no">771</span><span id="line-771"> conf.set(REMOTE_CLUSTER_ZOOKEEPER_ZNODE_PARENT_CONF_KEY, parent);</span>
<span class="source-line-no">772</span><span id="line-772"></span>
<span class="source-line-no">773</span><span id="line-773"> LOG.info("ZK configs for remote cluster of bulkload is configured: " + quorum + ":" + clientPort</span>
<span class="source-line-no">774</span><span id="line-774"> + "/" + parent);</span>
<span class="source-line-no">775</span><span id="line-775"> }</span>
<span class="source-line-no">776</span><span id="line-776"></span>
<span class="source-line-no">777</span><span id="line-777"> /**</span>
<span class="source-line-no">778</span><span id="line-778"> * Runs inside the task to deserialize column family to compression algorithm map from the</span>
<span class="source-line-no">779</span><span id="line-779"> * configuration.</span>
<span class="source-line-no">780</span><span id="line-780"> * @param conf to read the serialized values from</span>
<span class="source-line-no">781</span><span id="line-781"> * @return a map from column family to the configured compression algorithm</span>
<span class="source-line-no">782</span><span id="line-782"> */</span>
<span class="source-line-no">783</span><span id="line-783"> @InterfaceAudience.Private</span>
<span class="source-line-no">784</span><span id="line-784"> static Map&lt;byte[], Algorithm&gt; createFamilyCompressionMap(Configuration conf) {</span>
<span class="source-line-no">785</span><span id="line-785"> Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf, COMPRESSION_FAMILIES_CONF_KEY);</span>
<span class="source-line-no">786</span><span id="line-786"> Map&lt;byte[], Algorithm&gt; compressionMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);</span>
<span class="source-line-no">787</span><span id="line-787"> for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {</span>
<span class="source-line-no">788</span><span id="line-788"> Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue());</span>
<span class="source-line-no">789</span><span id="line-789"> compressionMap.put(e.getKey(), algorithm);</span>
<span class="source-line-no">790</span><span id="line-790"> }</span>
<span class="source-line-no">791</span><span id="line-791"> return compressionMap;</span>
<span class="source-line-no">792</span><span id="line-792"> }</span>
<span class="source-line-no">793</span><span id="line-793"></span>
<span class="source-line-no">794</span><span id="line-794"> /**</span>
<span class="source-line-no">795</span><span id="line-795"> * Runs inside the task to deserialize column family to bloom filter type map from the</span>
<span class="source-line-no">796</span><span id="line-796"> * configuration.</span>
<span class="source-line-no">797</span><span id="line-797"> * @param conf to read the serialized values from</span>
<span class="source-line-no">798</span><span id="line-798"> * @return a map from column family to the the configured bloom filter type</span>
<span class="source-line-no">799</span><span id="line-799"> */</span>
<span class="source-line-no">800</span><span id="line-800"> @InterfaceAudience.Private</span>
<span class="source-line-no">801</span><span id="line-801"> static Map&lt;byte[], BloomType&gt; createFamilyBloomTypeMap(Configuration conf) {</span>
<span class="source-line-no">802</span><span id="line-802"> Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf, BLOOM_TYPE_FAMILIES_CONF_KEY);</span>
<span class="source-line-no">803</span><span id="line-803"> Map&lt;byte[], BloomType&gt; bloomTypeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);</span>
<span class="source-line-no">804</span><span id="line-804"> for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {</span>
<span class="source-line-no">805</span><span id="line-805"> BloomType bloomType = BloomType.valueOf(e.getValue());</span>
<span class="source-line-no">806</span><span id="line-806"> bloomTypeMap.put(e.getKey(), bloomType);</span>
<span class="source-line-no">807</span><span id="line-807"> }</span>
<span class="source-line-no">808</span><span id="line-808"> return bloomTypeMap;</span>
<span class="source-line-no">809</span><span id="line-809"> }</span>
<span class="source-line-no">810</span><span id="line-810"></span>
<span class="source-line-no">811</span><span id="line-811"> /**</span>
<span class="source-line-no">812</span><span id="line-812"> * Runs inside the task to deserialize column family to bloom filter param map from the</span>
<span class="source-line-no">813</span><span id="line-813"> * configuration.</span>
<span class="source-line-no">814</span><span id="line-814"> * @param conf to read the serialized values from</span>
<span class="source-line-no">815</span><span id="line-815"> * @return a map from column family to the the configured bloom filter param</span>
<span class="source-line-no">816</span><span id="line-816"> */</span>
<span class="source-line-no">817</span><span id="line-817"> @InterfaceAudience.Private</span>
<span class="source-line-no">818</span><span id="line-818"> static Map&lt;byte[], String&gt; createFamilyBloomParamMap(Configuration conf) {</span>
<span class="source-line-no">819</span><span id="line-819"> return createFamilyConfValueMap(conf, BLOOM_PARAM_FAMILIES_CONF_KEY);</span>
<span class="source-line-no">820</span><span id="line-820"> }</span>
<span class="source-line-no">821</span><span id="line-821"></span>
<span class="source-line-no">822</span><span id="line-822"> /**</span>
<span class="source-line-no">823</span><span id="line-823"> * Runs inside the task to deserialize column family to block size map from the configuration.</span>
<span class="source-line-no">824</span><span id="line-824"> * @param conf to read the serialized values from</span>
<span class="source-line-no">825</span><span id="line-825"> * @return a map from column family to the configured block size</span>
<span class="source-line-no">826</span><span id="line-826"> */</span>
<span class="source-line-no">827</span><span id="line-827"> @InterfaceAudience.Private</span>
<span class="source-line-no">828</span><span id="line-828"> static Map&lt;byte[], Integer&gt; createFamilyBlockSizeMap(Configuration conf) {</span>
<span class="source-line-no">829</span><span id="line-829"> Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf, BLOCK_SIZE_FAMILIES_CONF_KEY);</span>
<span class="source-line-no">830</span><span id="line-830"> Map&lt;byte[], Integer&gt; blockSizeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);</span>
<span class="source-line-no">831</span><span id="line-831"> for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {</span>
<span class="source-line-no">832</span><span id="line-832"> Integer blockSize = Integer.parseInt(e.getValue());</span>
<span class="source-line-no">833</span><span id="line-833"> blockSizeMap.put(e.getKey(), blockSize);</span>
<span class="source-line-no">834</span><span id="line-834"> }</span>
<span class="source-line-no">835</span><span id="line-835"> return blockSizeMap;</span>
<span class="source-line-no">836</span><span id="line-836"> }</span>
<span class="source-line-no">837</span><span id="line-837"></span>
<span class="source-line-no">838</span><span id="line-838"> /**</span>
<span class="source-line-no">839</span><span id="line-839"> * Runs inside the task to deserialize column family to data block encoding type map from the</span>
<span class="source-line-no">840</span><span id="line-840"> * configuration.</span>
<span class="source-line-no">841</span><span id="line-841"> * @param conf to read the serialized values from</span>
<span class="source-line-no">842</span><span id="line-842"> * @return a map from column family to HFileDataBlockEncoder for the configured data block type</span>
<span class="source-line-no">843</span><span id="line-843"> * for the family</span>
<span class="source-line-no">844</span><span id="line-844"> */</span>
<span class="source-line-no">845</span><span id="line-845"> @InterfaceAudience.Private</span>
<span class="source-line-no">846</span><span id="line-846"> static Map&lt;byte[], DataBlockEncoding&gt; createFamilyDataBlockEncodingMap(Configuration conf) {</span>
<span class="source-line-no">847</span><span id="line-847"> Map&lt;byte[], String&gt; stringMap =</span>
<span class="source-line-no">848</span><span id="line-848"> createFamilyConfValueMap(conf, DATABLOCK_ENCODING_FAMILIES_CONF_KEY);</span>
<span class="source-line-no">849</span><span id="line-849"> Map&lt;byte[], DataBlockEncoding&gt; encoderMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);</span>
<span class="source-line-no">850</span><span id="line-850"> for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {</span>
<span class="source-line-no">851</span><span id="line-851"> encoderMap.put(e.getKey(), DataBlockEncoding.valueOf((e.getValue())));</span>
<span class="source-line-no">852</span><span id="line-852"> }</span>
<span class="source-line-no">853</span><span id="line-853"> return encoderMap;</span>
<span class="source-line-no">854</span><span id="line-854"> }</span>
<span class="source-line-no">855</span><span id="line-855"></span>
<span class="source-line-no">856</span><span id="line-856"> /**</span>
<span class="source-line-no">857</span><span id="line-857"> * Run inside the task to deserialize column family to given conf value map.</span>
<span class="source-line-no">858</span><span id="line-858"> * @param conf to read the serialized values from</span>
<span class="source-line-no">859</span><span id="line-859"> * @param confName conf key to read from the configuration</span>
<span class="source-line-no">860</span><span id="line-860"> * @return a map of column family to the given configuration value</span>
<span class="source-line-no">861</span><span id="line-861"> */</span>
<span class="source-line-no">862</span><span id="line-862"> private static Map&lt;byte[], String&gt; createFamilyConfValueMap(Configuration conf, String confName) {</span>
<span class="source-line-no">863</span><span id="line-863"> Map&lt;byte[], String&gt; confValMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);</span>
<span class="source-line-no">864</span><span id="line-864"> String confVal = conf.get(confName, "");</span>
<span class="source-line-no">865</span><span id="line-865"> for (String familyConf : confVal.split("&amp;")) {</span>
<span class="source-line-no">866</span><span id="line-866"> String[] familySplit = familyConf.split("=");</span>
<span class="source-line-no">867</span><span id="line-867"> if (familySplit.length != 2) {</span>
<span class="source-line-no">868</span><span id="line-868"> continue;</span>
<span class="source-line-no">869</span><span id="line-869"> }</span>
<span class="source-line-no">870</span><span id="line-870"> try {</span>
<span class="source-line-no">871</span><span id="line-871"> confValMap.put(Bytes.toBytes(URLDecoder.decode(familySplit[0], "UTF-8")),</span>
<span class="source-line-no">872</span><span id="line-872"> URLDecoder.decode(familySplit[1], "UTF-8"));</span>
<span class="source-line-no">873</span><span id="line-873"> } catch (UnsupportedEncodingException e) {</span>
<span class="source-line-no">874</span><span id="line-874"> // will not happen with UTF-8 encoding</span>
<span class="source-line-no">875</span><span id="line-875"> throw new AssertionError(e);</span>
<span class="source-line-no">876</span><span id="line-876"> }</span>
<span class="source-line-no">877</span><span id="line-877"> }</span>
<span class="source-line-no">878</span><span id="line-878"> return confValMap;</span>
<span class="source-line-no">879</span><span id="line-879"> }</span>
<span class="source-line-no">880</span><span id="line-880"></span>
<span class="source-line-no">881</span><span id="line-881"> /**</span>
<span class="source-line-no">882</span><span id="line-882"> * Configure &lt;code&gt;job&lt;/code&gt; with a TotalOrderPartitioner, partitioning against</span>
<span class="source-line-no">883</span><span id="line-883"> * &lt;code&gt;splitPoints&lt;/code&gt;. Cleans up the partitions file after job exists.</span>
<span class="source-line-no">884</span><span id="line-884"> */</span>
<span class="source-line-no">885</span><span id="line-885"> static void configurePartitioner(Job job, List&lt;ImmutableBytesWritable&gt; splitPoints,</span>
<span class="source-line-no">886</span><span id="line-886"> boolean writeMultipleTables) throws IOException {</span>
<span class="source-line-no">887</span><span id="line-887"> Configuration conf = job.getConfiguration();</span>
<span class="source-line-no">888</span><span id="line-888"> // create the partitions file</span>
<span class="source-line-no">889</span><span id="line-889"> FileSystem fs = FileSystem.get(conf);</span>
<span class="source-line-no">890</span><span id="line-890"> String hbaseTmpFsDir =</span>
<span class="source-line-no">891</span><span id="line-891"> conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, fs.getHomeDirectory() + "/hbase-staging");</span>
<span class="source-line-no">892</span><span id="line-892"> Path partitionsPath = new Path(hbaseTmpFsDir, "partitions_" + UUID.randomUUID());</span>
<span class="source-line-no">893</span><span id="line-893"> fs.makeQualified(partitionsPath);</span>
<span class="source-line-no">894</span><span id="line-894"> writePartitions(conf, partitionsPath, splitPoints, writeMultipleTables);</span>
<span class="source-line-no">895</span><span id="line-895"> fs.deleteOnExit(partitionsPath);</span>
<span class="source-line-no">896</span><span id="line-896"></span>
<span class="source-line-no">897</span><span id="line-897"> // configure job to use it</span>
<span class="source-line-no">898</span><span id="line-898"> job.setPartitionerClass(TotalOrderPartitioner.class);</span>
<span class="source-line-no">899</span><span id="line-899"> TotalOrderPartitioner.setPartitionFile(conf, partitionsPath);</span>
<span class="source-line-no">900</span><span id="line-900"> }</span>
<span class="source-line-no">901</span><span id="line-901"></span>
<span class="source-line-no">902</span><span id="line-902"> @edu.umd.cs.findbugs.annotations.SuppressWarnings(</span>
<span class="source-line-no">903</span><span id="line-903"> value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")</span>
<span class="source-line-no">904</span><span id="line-904"> @InterfaceAudience.Private</span>
<span class="source-line-no">905</span><span id="line-905"> static String serializeColumnFamilyAttribute(Function&lt;ColumnFamilyDescriptor, String&gt; fn,</span>
<span class="source-line-no">906</span><span id="line-906"> List&lt;TableDescriptor&gt; allTables) throws UnsupportedEncodingException {</span>
<span class="source-line-no">907</span><span id="line-907"> StringBuilder attributeValue = new StringBuilder();</span>
<span class="source-line-no">908</span><span id="line-908"> int i = 0;</span>
<span class="source-line-no">909</span><span id="line-909"> for (TableDescriptor tableDescriptor : allTables) {</span>
<span class="source-line-no">910</span><span id="line-910"> if (tableDescriptor == null) {</span>
<span class="source-line-no">911</span><span id="line-911"> // could happen with mock table instance</span>
<span class="source-line-no">912</span><span id="line-912"> // CODEREVIEW: Can I set an empty string in conf if mock table instance?</span>
<span class="source-line-no">913</span><span id="line-913"> return "";</span>
<span class="source-line-no">914</span><span id="line-914"> }</span>
<span class="source-line-no">915</span><span id="line-915"> for (ColumnFamilyDescriptor familyDescriptor : tableDescriptor.getColumnFamilies()) {</span>
<span class="source-line-no">916</span><span id="line-916"> if (i++ &gt; 0) {</span>
<span class="source-line-no">917</span><span id="line-917"> attributeValue.append('&amp;');</span>
<span class="source-line-no">918</span><span id="line-918"> }</span>
<span class="source-line-no">919</span><span id="line-919"> attributeValue.append(URLEncoder</span>
<span class="source-line-no">920</span><span id="line-920"> .encode(Bytes.toString(combineTableNameSuffix(tableDescriptor.getTableName().getName(),</span>
<span class="source-line-no">921</span><span id="line-921"> familyDescriptor.getName())), "UTF-8"));</span>
<span class="source-line-no">922</span><span id="line-922"> attributeValue.append('=');</span>
<span class="source-line-no">923</span><span id="line-923"> attributeValue.append(URLEncoder.encode(fn.apply(familyDescriptor), "UTF-8"));</span>
<span class="source-line-no">924</span><span id="line-924"> }</span>
<span class="source-line-no">925</span><span id="line-925"> }</span>
<span class="source-line-no">926</span><span id="line-926"> // Get rid of the last ampersand</span>
<span class="source-line-no">927</span><span id="line-927"> return attributeValue.toString();</span>
<span class="source-line-no">928</span><span id="line-928"> }</span>
<span class="source-line-no">929</span><span id="line-929"></span>
<span class="source-line-no">930</span><span id="line-930"> /**</span>
<span class="source-line-no">931</span><span id="line-931"> * Serialize column family to compression algorithm map to configuration. Invoked while</span>
<span class="source-line-no">932</span><span id="line-932"> * configuring the MR job for incremental load.</span>
<span class="source-line-no">933</span><span id="line-933"> */</span>
<span class="source-line-no">934</span><span id="line-934"> @InterfaceAudience.Private</span>
<span class="source-line-no">935</span><span id="line-935"> static Function&lt;ColumnFamilyDescriptor, String&gt; compressionDetails =</span>
<span class="source-line-no">936</span><span id="line-936"> familyDescriptor -&gt; familyDescriptor.getCompressionType().getName();</span>
<span class="source-line-no">937</span><span id="line-937"></span>
<span class="source-line-no">938</span><span id="line-938"> /**</span>
<span class="source-line-no">939</span><span id="line-939"> * Serialize column family to block size map to configuration. Invoked while configuring the MR</span>
<span class="source-line-no">940</span><span id="line-940"> * job for incremental load.</span>
<span class="source-line-no">941</span><span id="line-941"> */</span>
<span class="source-line-no">942</span><span id="line-942"> @InterfaceAudience.Private</span>
<span class="source-line-no">943</span><span id="line-943"> static Function&lt;ColumnFamilyDescriptor, String&gt; blockSizeDetails =</span>
<span class="source-line-no">944</span><span id="line-944"> familyDescriptor -&gt; String.valueOf(familyDescriptor.getBlocksize());</span>
<span class="source-line-no">945</span><span id="line-945"></span>
<span class="source-line-no">946</span><span id="line-946"> /**</span>
<span class="source-line-no">947</span><span id="line-947"> * Serialize column family to bloom type map to configuration. Invoked while configuring the MR</span>
<span class="source-line-no">948</span><span id="line-948"> * job for incremental load.</span>
<span class="source-line-no">949</span><span id="line-949"> */</span>
<span class="source-line-no">950</span><span id="line-950"> @InterfaceAudience.Private</span>
<span class="source-line-no">951</span><span id="line-951"> static Function&lt;ColumnFamilyDescriptor, String&gt; bloomTypeDetails = familyDescriptor -&gt; {</span>
<span class="source-line-no">952</span><span id="line-952"> String bloomType = familyDescriptor.getBloomFilterType().toString();</span>
<span class="source-line-no">953</span><span id="line-953"> if (bloomType == null) {</span>
<span class="source-line-no">954</span><span id="line-954"> bloomType = ColumnFamilyDescriptorBuilder.DEFAULT_BLOOMFILTER.name();</span>
<span class="source-line-no">955</span><span id="line-955"> }</span>
<span class="source-line-no">956</span><span id="line-956"> return bloomType;</span>
<span class="source-line-no">957</span><span id="line-957"> };</span>
<span class="source-line-no">958</span><span id="line-958"></span>
<span class="source-line-no">959</span><span id="line-959"> /**</span>
<span class="source-line-no">960</span><span id="line-960"> * Serialize column family to bloom param map to configuration. Invoked while configuring the MR</span>
<span class="source-line-no">961</span><span id="line-961"> * job for incremental load.</span>
<span class="source-line-no">962</span><span id="line-962"> */</span>
<span class="source-line-no">963</span><span id="line-963"> @InterfaceAudience.Private</span>
<span class="source-line-no">964</span><span id="line-964"> static Function&lt;ColumnFamilyDescriptor, String&gt; bloomParamDetails = familyDescriptor -&gt; {</span>
<span class="source-line-no">965</span><span id="line-965"> BloomType bloomType = familyDescriptor.getBloomFilterType();</span>
<span class="source-line-no">966</span><span id="line-966"> String bloomParam = "";</span>
<span class="source-line-no">967</span><span id="line-967"> if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {</span>
<span class="source-line-no">968</span><span id="line-968"> bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.PREFIX_LENGTH_KEY);</span>
<span class="source-line-no">969</span><span id="line-969"> }</span>
<span class="source-line-no">970</span><span id="line-970"> return bloomParam;</span>
<span class="source-line-no">971</span><span id="line-971"> };</span>
<span class="source-line-no">972</span><span id="line-972"></span>
<span class="source-line-no">973</span><span id="line-973"> /**</span>
<span class="source-line-no">974</span><span id="line-974"> * Serialize column family to data block encoding map to configuration. Invoked while configuring</span>
<span class="source-line-no">975</span><span id="line-975"> * the MR job for incremental load.</span>
<span class="source-line-no">976</span><span id="line-976"> */</span>
<span class="source-line-no">977</span><span id="line-977"> @InterfaceAudience.Private</span>
<span class="source-line-no">978</span><span id="line-978"> static Function&lt;ColumnFamilyDescriptor, String&gt; dataBlockEncodingDetails = familyDescriptor -&gt; {</span>
<span class="source-line-no">979</span><span id="line-979"> DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();</span>
<span class="source-line-no">980</span><span id="line-980"> if (encoding == null) {</span>
<span class="source-line-no">981</span><span id="line-981"> encoding = DataBlockEncoding.NONE;</span>
<span class="source-line-no">982</span><span id="line-982"> }</span>
<span class="source-line-no">983</span><span id="line-983"> return encoding.toString();</span>
<span class="source-line-no">984</span><span id="line-984"> };</span>
<span class="source-line-no">985</span><span id="line-985"></span>
<span class="source-line-no">986</span><span id="line-986">}</span>
</pre>
</div>
</main>
</body>
</html>