blob: 6b45b5b98df8fd1240a0c70e6a5731d9d1fbed41 [file] [log] [blame]
<!DOCTYPE HTML>
<html lang="en">
<head>
<!-- Generated by javadoc (17) -->
<title>Source code</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="source: package: org.apache.hadoop.hbase.wal, class: BoundedRecoveredHFilesOutputSink">
<meta name="generator" content="javadoc/SourceToHTMLConverter">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body class="source-page">
<main role="main">
<div class="source-container">
<pre><span class="source-line-no">001</span><span id="line-1">/*</span>
<span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span>
<span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span>
<span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span>
<span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span>
<span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span>
<span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span>
<span class="source-line-no">009</span><span id="line-9"> *</span>
<span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="source-line-no">011</span><span id="line-11"> *</span>
<span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span>
<span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span>
<span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span>
<span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span>
<span class="source-line-no">017</span><span id="line-17"> */</span>
<span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.wal;</span>
<span class="source-line-no">019</span><span id="line-19"></span>
<span class="source-line-no">020</span><span id="line-20">import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;</span>
<span class="source-line-no">021</span><span id="line-21"></span>
<span class="source-line-no">022</span><span id="line-22">import java.io.IOException;</span>
<span class="source-line-no">023</span><span id="line-23">import java.io.InterruptedIOException;</span>
<span class="source-line-no">024</span><span id="line-24">import java.util.HashMap;</span>
<span class="source-line-no">025</span><span id="line-25">import java.util.List;</span>
<span class="source-line-no">026</span><span id="line-26">import java.util.Map;</span>
<span class="source-line-no">027</span><span id="line-27">import java.util.concurrent.ConcurrentHashMap;</span>
<span class="source-line-no">028</span><span id="line-28">import java.util.concurrent.ConcurrentMap;</span>
<span class="source-line-no">029</span><span id="line-29">import java.util.concurrent.ExecutionException;</span>
<span class="source-line-no">030</span><span id="line-30">import java.util.concurrent.Future;</span>
<span class="source-line-no">031</span><span id="line-31">import java.util.concurrent.atomic.AtomicInteger;</span>
<span class="source-line-no">032</span><span id="line-32">import org.apache.hadoop.fs.Path;</span>
<span class="source-line-no">033</span><span id="line-33">import org.apache.hadoop.hbase.Cell;</span>
<span class="source-line-no">034</span><span id="line-34">import org.apache.hadoop.hbase.CellComparatorImpl;</span>
<span class="source-line-no">035</span><span id="line-35">import org.apache.hadoop.hbase.CellUtil;</span>
<span class="source-line-no">036</span><span id="line-36">import org.apache.hadoop.hbase.ExtendedCell;</span>
<span class="source-line-no">037</span><span id="line-37">import org.apache.hadoop.hbase.MetaCellComparator;</span>
<span class="source-line-no">038</span><span id="line-38">import org.apache.hadoop.hbase.PrivateCellUtil;</span>
<span class="source-line-no">039</span><span id="line-39">import org.apache.hadoop.hbase.TableName;</span>
<span class="source-line-no">040</span><span id="line-40">import org.apache.hadoop.hbase.io.hfile.CacheConfig;</span>
<span class="source-line-no">041</span><span id="line-41">import org.apache.hadoop.hbase.io.hfile.HFileContext;</span>
<span class="source-line-no">042</span><span id="line-42">import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;</span>
<span class="source-line-no">043</span><span id="line-43">import org.apache.hadoop.hbase.regionserver.CellSet;</span>
<span class="source-line-no">044</span><span id="line-44">import org.apache.hadoop.hbase.regionserver.StoreFileWriter;</span>
<span class="source-line-no">045</span><span id="line-45">import org.apache.hadoop.hbase.regionserver.StoreUtils;</span>
<span class="source-line-no">046</span><span id="line-46">import org.apache.hadoop.hbase.util.Bytes;</span>
<span class="source-line-no">047</span><span id="line-47">import org.apache.hadoop.hbase.wal.EntryBuffers.RegionEntryBuffer;</span>
<span class="source-line-no">048</span><span id="line-48">import org.apache.hadoop.hbase.wal.WAL.Entry;</span>
<span class="source-line-no">049</span><span id="line-49">import org.apache.yetus.audience.InterfaceAudience;</span>
<span class="source-line-no">050</span><span id="line-50">import org.slf4j.Logger;</span>
<span class="source-line-no">051</span><span id="line-51">import org.slf4j.LoggerFactory;</span>
<span class="source-line-no">052</span><span id="line-52"></span>
<span class="source-line-no">053</span><span id="line-53">/**</span>
<span class="source-line-no">054</span><span id="line-54"> * A WALSplitter sink that outputs {@link org.apache.hadoop.hbase.io.hfile.HFile}s. Runs with a</span>
<span class="source-line-no">055</span><span id="line-55"> * bounded number of HFile writers at any one time rather than let the count run up.</span>
<span class="source-line-no">056</span><span id="line-56"> * @see BoundedRecoveredEditsOutputSink for a sink implementation that writes intermediate</span>
<span class="source-line-no">057</span><span id="line-57"> * recovered.edits files.</span>
<span class="source-line-no">058</span><span id="line-58"> */</span>
<span class="source-line-no">059</span><span id="line-59">@InterfaceAudience.Private</span>
<span class="source-line-no">060</span><span id="line-60">public class BoundedRecoveredHFilesOutputSink extends OutputSink {</span>
<span class="source-line-no">061</span><span id="line-61"> private static final Logger LOG = LoggerFactory.getLogger(BoundedRecoveredHFilesOutputSink.class);</span>
<span class="source-line-no">062</span><span id="line-62"></span>
<span class="source-line-no">063</span><span id="line-63"> private final WALSplitter walSplitter;</span>
<span class="source-line-no">064</span><span id="line-64"></span>
<span class="source-line-no">065</span><span id="line-65"> // Since the splitting process may create multiple output files, we need a map</span>
<span class="source-line-no">066</span><span id="line-66"> // to track the output count of each region.</span>
<span class="source-line-no">067</span><span id="line-67"> private ConcurrentMap&lt;String, Long&gt; regionEditsWrittenMap = new ConcurrentHashMap&lt;&gt;();</span>
<span class="source-line-no">068</span><span id="line-68"> // Need a counter to track the opening writers.</span>
<span class="source-line-no">069</span><span id="line-69"> private final AtomicInteger openingWritersNum = new AtomicInteger(0);</span>
<span class="source-line-no">070</span><span id="line-70"></span>
<span class="source-line-no">071</span><span id="line-71"> public BoundedRecoveredHFilesOutputSink(WALSplitter walSplitter,</span>
<span class="source-line-no">072</span><span id="line-72"> WALSplitter.PipelineController controller, EntryBuffers entryBuffers, int numWriters) {</span>
<span class="source-line-no">073</span><span id="line-73"> super(controller, entryBuffers, numWriters);</span>
<span class="source-line-no">074</span><span id="line-74"> this.walSplitter = walSplitter;</span>
<span class="source-line-no">075</span><span id="line-75"> }</span>
<span class="source-line-no">076</span><span id="line-76"></span>
<span class="source-line-no">077</span><span id="line-77"> @Override</span>
<span class="source-line-no">078</span><span id="line-78"> void append(RegionEntryBuffer buffer) throws IOException {</span>
<span class="source-line-no">079</span><span id="line-79"> Map&lt;String, CellSet&lt;ExtendedCell&gt;&gt; familyCells = new HashMap&lt;&gt;();</span>
<span class="source-line-no">080</span><span id="line-80"> Map&lt;String, Long&gt; familySeqIds = new HashMap&lt;&gt;();</span>
<span class="source-line-no">081</span><span id="line-81"> boolean isMetaTable = buffer.tableName.equals(META_TABLE_NAME);</span>
<span class="source-line-no">082</span><span id="line-82"> // First iterate all Cells to find which column families are present and to stamp Cell with</span>
<span class="source-line-no">083</span><span id="line-83"> // sequence id.</span>
<span class="source-line-no">084</span><span id="line-84"> for (WAL.Entry entry : buffer.entryBuffer) {</span>
<span class="source-line-no">085</span><span id="line-85"> long seqId = entry.getKey().getSequenceId();</span>
<span class="source-line-no">086</span><span id="line-86"> List&lt;Cell&gt; cells = entry.getEdit().getCells();</span>
<span class="source-line-no">087</span><span id="line-87"> for (Cell cell : cells) {</span>
<span class="source-line-no">088</span><span id="line-88"> if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {</span>
<span class="source-line-no">089</span><span id="line-89"> continue;</span>
<span class="source-line-no">090</span><span id="line-90"> }</span>
<span class="source-line-no">091</span><span id="line-91"> // only ExtendedCell can set sequence id, so it is safe to cast it to ExtendedCell later.</span>
<span class="source-line-no">092</span><span id="line-92"> PrivateCellUtil.setSequenceId(cell, seqId);</span>
<span class="source-line-no">093</span><span id="line-93"> String familyName = Bytes.toString(CellUtil.cloneFamily(cell));</span>
<span class="source-line-no">094</span><span id="line-94"> // comparator need to be specified for meta</span>
<span class="source-line-no">095</span><span id="line-95"> familyCells</span>
<span class="source-line-no">096</span><span id="line-96"> .computeIfAbsent(familyName,</span>
<span class="source-line-no">097</span><span id="line-97"> key -&gt; new CellSet&lt;&gt;(</span>
<span class="source-line-no">098</span><span id="line-98"> isMetaTable ? MetaCellComparator.META_COMPARATOR : CellComparatorImpl.COMPARATOR))</span>
<span class="source-line-no">099</span><span id="line-99"> .add((ExtendedCell) cell);</span>
<span class="source-line-no">100</span><span id="line-100"> familySeqIds.compute(familyName, (k, v) -&gt; v == null ? seqId : Math.max(v, seqId));</span>
<span class="source-line-no">101</span><span id="line-101"> }</span>
<span class="source-line-no">102</span><span id="line-102"> }</span>
<span class="source-line-no">103</span><span id="line-103"></span>
<span class="source-line-no">104</span><span id="line-104"> // Create a new hfile writer for each column family, write edits then close writer.</span>
<span class="source-line-no">105</span><span id="line-105"> String regionName = Bytes.toString(buffer.encodedRegionName);</span>
<span class="source-line-no">106</span><span id="line-106"> for (Map.Entry&lt;String, CellSet&lt;ExtendedCell&gt;&gt; cellsEntry : familyCells.entrySet()) {</span>
<span class="source-line-no">107</span><span id="line-107"> String familyName = cellsEntry.getKey();</span>
<span class="source-line-no">108</span><span id="line-108"> StoreFileWriter writer = createRecoveredHFileWriter(buffer.tableName, regionName,</span>
<span class="source-line-no">109</span><span id="line-109"> familySeqIds.get(familyName), familyName, isMetaTable);</span>
<span class="source-line-no">110</span><span id="line-110"> LOG.trace("Created {}", writer.getPath());</span>
<span class="source-line-no">111</span><span id="line-111"> openingWritersNum.incrementAndGet();</span>
<span class="source-line-no">112</span><span id="line-112"> try {</span>
<span class="source-line-no">113</span><span id="line-113"> for (ExtendedCell cell : cellsEntry.getValue()) {</span>
<span class="source-line-no">114</span><span id="line-114"> writer.append(cell);</span>
<span class="source-line-no">115</span><span id="line-115"> }</span>
<span class="source-line-no">116</span><span id="line-116"> // Append the max seqid to hfile, used when recovery.</span>
<span class="source-line-no">117</span><span id="line-117"> writer.appendMetadata(familySeqIds.get(familyName), false);</span>
<span class="source-line-no">118</span><span id="line-118"> regionEditsWrittenMap.compute(Bytes.toString(buffer.encodedRegionName),</span>
<span class="source-line-no">119</span><span id="line-119"> (k, v) -&gt; v == null ? buffer.entryBuffer.size() : v + buffer.entryBuffer.size());</span>
<span class="source-line-no">120</span><span id="line-120"> splits.add(writer.getPath());</span>
<span class="source-line-no">121</span><span id="line-121"> openingWritersNum.decrementAndGet();</span>
<span class="source-line-no">122</span><span id="line-122"> } finally {</span>
<span class="source-line-no">123</span><span id="line-123"> writer.close();</span>
<span class="source-line-no">124</span><span id="line-124"> LOG.trace("Closed {}, edits={}", writer.getPath(), familyCells.size());</span>
<span class="source-line-no">125</span><span id="line-125"> }</span>
<span class="source-line-no">126</span><span id="line-126"> }</span>
<span class="source-line-no">127</span><span id="line-127"> }</span>
<span class="source-line-no">128</span><span id="line-128"></span>
<span class="source-line-no">129</span><span id="line-129"> @Override</span>
<span class="source-line-no">130</span><span id="line-130"> public List&lt;Path&gt; close() throws IOException {</span>
<span class="source-line-no">131</span><span id="line-131"> boolean isSuccessful = true;</span>
<span class="source-line-no">132</span><span id="line-132"> try {</span>
<span class="source-line-no">133</span><span id="line-133"> isSuccessful = finishWriterThreads();</span>
<span class="source-line-no">134</span><span id="line-134"> } finally {</span>
<span class="source-line-no">135</span><span id="line-135"> isSuccessful &amp;= writeRemainingEntryBuffers();</span>
<span class="source-line-no">136</span><span id="line-136"> }</span>
<span class="source-line-no">137</span><span id="line-137"> return isSuccessful ? splits : null;</span>
<span class="source-line-no">138</span><span id="line-138"> }</span>
<span class="source-line-no">139</span><span id="line-139"></span>
<span class="source-line-no">140</span><span id="line-140"> /**</span>
<span class="source-line-no">141</span><span id="line-141"> * Write out the remaining RegionEntryBuffers and close the writers.</span>
<span class="source-line-no">142</span><span id="line-142"> * @return true when there is no error.</span>
<span class="source-line-no">143</span><span id="line-143"> */</span>
<span class="source-line-no">144</span><span id="line-144"> private boolean writeRemainingEntryBuffers() throws IOException {</span>
<span class="source-line-no">145</span><span id="line-145"> for (EntryBuffers.RegionEntryBuffer buffer : entryBuffers.buffers.values()) {</span>
<span class="source-line-no">146</span><span id="line-146"> closeCompletionService.submit(() -&gt; {</span>
<span class="source-line-no">147</span><span id="line-147"> append(buffer);</span>
<span class="source-line-no">148</span><span id="line-148"> return null;</span>
<span class="source-line-no">149</span><span id="line-149"> });</span>
<span class="source-line-no">150</span><span id="line-150"> }</span>
<span class="source-line-no">151</span><span id="line-151"> boolean progressFailed = false;</span>
<span class="source-line-no">152</span><span id="line-152"> try {</span>
<span class="source-line-no">153</span><span id="line-153"> for (int i = 0, n = entryBuffers.buffers.size(); i &lt; n; i++) {</span>
<span class="source-line-no">154</span><span id="line-154"> Future&lt;Void&gt; future = closeCompletionService.take();</span>
<span class="source-line-no">155</span><span id="line-155"> future.get();</span>
<span class="source-line-no">156</span><span id="line-156"> if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {</span>
<span class="source-line-no">157</span><span id="line-157"> progressFailed = true;</span>
<span class="source-line-no">158</span><span id="line-158"> }</span>
<span class="source-line-no">159</span><span id="line-159"> }</span>
<span class="source-line-no">160</span><span id="line-160"> } catch (InterruptedException e) {</span>
<span class="source-line-no">161</span><span id="line-161"> IOException iie = new InterruptedIOException();</span>
<span class="source-line-no">162</span><span id="line-162"> iie.initCause(e);</span>
<span class="source-line-no">163</span><span id="line-163"> throw iie;</span>
<span class="source-line-no">164</span><span id="line-164"> } catch (ExecutionException e) {</span>
<span class="source-line-no">165</span><span id="line-165"> throw new IOException(e.getCause());</span>
<span class="source-line-no">166</span><span id="line-166"> } finally {</span>
<span class="source-line-no">167</span><span id="line-167"> closeThreadPool.shutdownNow();</span>
<span class="source-line-no">168</span><span id="line-168"> }</span>
<span class="source-line-no">169</span><span id="line-169"> return !progressFailed;</span>
<span class="source-line-no">170</span><span id="line-170"> }</span>
<span class="source-line-no">171</span><span id="line-171"></span>
<span class="source-line-no">172</span><span id="line-172"> @Override</span>
<span class="source-line-no">173</span><span id="line-173"> public Map&lt;String, Long&gt; getOutputCounts() {</span>
<span class="source-line-no">174</span><span id="line-174"> return regionEditsWrittenMap;</span>
<span class="source-line-no">175</span><span id="line-175"> }</span>
<span class="source-line-no">176</span><span id="line-176"></span>
<span class="source-line-no">177</span><span id="line-177"> @Override</span>
<span class="source-line-no">178</span><span id="line-178"> public int getNumberOfRecoveredRegions() {</span>
<span class="source-line-no">179</span><span id="line-179"> return regionEditsWrittenMap.size();</span>
<span class="source-line-no">180</span><span id="line-180"> }</span>
<span class="source-line-no">181</span><span id="line-181"></span>
<span class="source-line-no">182</span><span id="line-182"> @Override</span>
<span class="source-line-no">183</span><span id="line-183"> int getNumOpenWriters() {</span>
<span class="source-line-no">184</span><span id="line-184"> return openingWritersNum.get();</span>
<span class="source-line-no">185</span><span id="line-185"> }</span>
<span class="source-line-no">186</span><span id="line-186"></span>
<span class="source-line-no">187</span><span id="line-187"> @Override</span>
<span class="source-line-no">188</span><span id="line-188"> boolean keepRegionEvent(Entry entry) {</span>
<span class="source-line-no">189</span><span id="line-189"> return false;</span>
<span class="source-line-no">190</span><span id="line-190"> }</span>
<span class="source-line-no">191</span><span id="line-191"></span>
<span class="source-line-no">192</span><span id="line-192"> /**</span>
<span class="source-line-no">193</span><span id="line-193"> * @return Returns a base HFile without compressions or encodings; good enough for recovery given</span>
<span class="source-line-no">194</span><span id="line-194"> * hfile has metadata on how it was written.</span>
<span class="source-line-no">195</span><span id="line-195"> */</span>
<span class="source-line-no">196</span><span id="line-196"> private StoreFileWriter createRecoveredHFileWriter(TableName tableName, String regionName,</span>
<span class="source-line-no">197</span><span id="line-197"> long seqId, String familyName, boolean isMetaTable) throws IOException {</span>
<span class="source-line-no">198</span><span id="line-198"> Path outputDir = WALSplitUtil.tryCreateRecoveredHFilesDir(walSplitter.rootFS, walSplitter.conf,</span>
<span class="source-line-no">199</span><span id="line-199"> tableName, regionName, familyName);</span>
<span class="source-line-no">200</span><span id="line-200"> StoreFileWriter.Builder writerBuilder =</span>
<span class="source-line-no">201</span><span id="line-201"> new StoreFileWriter.Builder(walSplitter.conf, CacheConfig.DISABLED, walSplitter.rootFS)</span>
<span class="source-line-no">202</span><span id="line-202"> .withOutputDir(outputDir);</span>
<span class="source-line-no">203</span><span id="line-203"> HFileContext hFileContext =</span>
<span class="source-line-no">204</span><span id="line-204"> new HFileContextBuilder().withChecksumType(StoreUtils.getChecksumType(walSplitter.conf))</span>
<span class="source-line-no">205</span><span id="line-205"> .withBytesPerCheckSum(StoreUtils.getBytesPerChecksum(walSplitter.conf)).withCellComparator(</span>
<span class="source-line-no">206</span><span id="line-206"> isMetaTable ? MetaCellComparator.META_COMPARATOR : CellComparatorImpl.COMPARATOR)</span>
<span class="source-line-no">207</span><span id="line-207"> .build();</span>
<span class="source-line-no">208</span><span id="line-208"> return writerBuilder.withFileContext(hFileContext).build();</span>
<span class="source-line-no">209</span><span id="line-209"> }</span>
<span class="source-line-no">210</span><span id="line-210">}</span>
</pre>
</div>
</main>
</body>
</html>