blob: 0b8a0da88c3e110671bc99ebd725559b28e574b4 [file] [log] [blame]
<!DOCTYPE HTML>
<html lang="en">
<head>
<!-- Generated by javadoc (17) -->
<title>Source code</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="source: package: org.apache.hadoop.hbase.mapreduce, class: TableSnapshotInputFormat">
<meta name="generator" content="javadoc/SourceToHTMLConverter">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body class="source-page">
<main role="main">
<div class="source-container">
<pre><span class="source-line-no">001</span><span id="line-1">/*</span>
<span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span>
<span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span>
<span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span>
<span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span>
<span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span>
<span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span>
<span class="source-line-no">009</span><span id="line-9"> *</span>
<span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="source-line-no">011</span><span id="line-11"> *</span>
<span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span>
<span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span>
<span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span>
<span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span>
<span class="source-line-no">017</span><span id="line-17"> */</span>
<span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.mapreduce;</span>
<span class="source-line-no">019</span><span id="line-19"></span>
<span class="source-line-no">020</span><span id="line-20">import java.io.DataInput;</span>
<span class="source-line-no">021</span><span id="line-21">import java.io.DataOutput;</span>
<span class="source-line-no">022</span><span id="line-22">import java.io.IOException;</span>
<span class="source-line-no">023</span><span id="line-23">import java.util.ArrayList;</span>
<span class="source-line-no">024</span><span id="line-24">import java.util.List;</span>
<span class="source-line-no">025</span><span id="line-25">import org.apache.hadoop.fs.Path;</span>
<span class="source-line-no">026</span><span id="line-26">import org.apache.hadoop.hbase.client.RegionInfo;</span>
<span class="source-line-no">027</span><span id="line-27">import org.apache.hadoop.hbase.client.Result;</span>
<span class="source-line-no">028</span><span id="line-28">import org.apache.hadoop.hbase.client.Scan;</span>
<span class="source-line-no">029</span><span id="line-29">import org.apache.hadoop.hbase.client.TableDescriptor;</span>
<span class="source-line-no">030</span><span id="line-30">import org.apache.hadoop.hbase.client.metrics.ScanMetrics;</span>
<span class="source-line-no">031</span><span id="line-31">import org.apache.hadoop.hbase.io.ImmutableBytesWritable;</span>
<span class="source-line-no">032</span><span id="line-32">import org.apache.hadoop.hbase.util.RegionSplitter;</span>
<span class="source-line-no">033</span><span id="line-33">import org.apache.hadoop.io.Writable;</span>
<span class="source-line-no">034</span><span id="line-34">import org.apache.hadoop.mapreduce.InputFormat;</span>
<span class="source-line-no">035</span><span id="line-35">import org.apache.hadoop.mapreduce.InputSplit;</span>
<span class="source-line-no">036</span><span id="line-36">import org.apache.hadoop.mapreduce.Job;</span>
<span class="source-line-no">037</span><span id="line-37">import org.apache.hadoop.mapreduce.JobContext;</span>
<span class="source-line-no">038</span><span id="line-38">import org.apache.hadoop.mapreduce.RecordReader;</span>
<span class="source-line-no">039</span><span id="line-39">import org.apache.hadoop.mapreduce.TaskAttemptContext;</span>
<span class="source-line-no">040</span><span id="line-40">import org.apache.yetus.audience.InterfaceAudience;</span>
<span class="source-line-no">041</span><span id="line-41"></span>
<span class="source-line-no">042</span><span id="line-42">/**</span>
<span class="source-line-no">043</span><span id="line-43"> * TableSnapshotInputFormat allows a MapReduce job to run over a table snapshot. The job bypasses</span>
<span class="source-line-no">044</span><span id="line-44"> * HBase servers, and directly accesses the underlying files (hfile, recovered edits, wals, etc)</span>
<span class="source-line-no">045</span><span id="line-45"> * directly to provide maximum performance. The snapshot is not required to be restored to the live</span>
<span class="source-line-no">046</span><span id="line-46"> * cluster or cloned. This also allows to run the mapreduce job from an online or offline hbase</span>
<span class="source-line-no">047</span><span id="line-47"> * cluster. The snapshot files can be exported by using the</span>
<span class="source-line-no">048</span><span id="line-48"> * {@link org.apache.hadoop.hbase.snapshot.ExportSnapshot} tool, to a pure-hdfs cluster, and this</span>
<span class="source-line-no">049</span><span id="line-49"> * InputFormat can be used to run the mapreduce job directly over the snapshot files. The snapshot</span>
<span class="source-line-no">050</span><span id="line-50"> * should not be deleted while there are jobs reading from snapshot files.</span>
<span class="source-line-no">051</span><span id="line-51"> * &lt;p&gt;</span>
<span class="source-line-no">052</span><span id="line-52"> * Usage is similar to TableInputFormat, and</span>
<span class="source-line-no">053</span><span id="line-53"> * {@link TableMapReduceUtil#initTableSnapshotMapperJob(String, Scan, Class, Class, Class, Job, boolean, Path)}</span>
<span class="source-line-no">054</span><span id="line-54"> * can be used to configure the job.</span>
<span class="source-line-no">055</span><span id="line-55"> *</span>
<span class="source-line-no">056</span><span id="line-56"> * &lt;pre&gt;</span>
<span class="source-line-no">057</span><span id="line-57"> * {</span>
<span class="source-line-no">058</span><span id="line-58"> * &amp;#64;code</span>
<span class="source-line-no">059</span><span id="line-59"> * Job job = new Job(conf);</span>
<span class="source-line-no">060</span><span id="line-60"> * Scan scan = new Scan();</span>
<span class="source-line-no">061</span><span id="line-61"> * TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName, scan, MyTableMapper.class,</span>
<span class="source-line-no">062</span><span id="line-62"> * MyMapKeyOutput.class, MyMapOutputValueWritable.class, job, true);</span>
<span class="source-line-no">063</span><span id="line-63"> * }</span>
<span class="source-line-no">064</span><span id="line-64"> * &lt;/pre&gt;</span>
<span class="source-line-no">065</span><span id="line-65"> * &lt;p&gt;</span>
<span class="source-line-no">066</span><span id="line-66"> * Internally, this input format restores the snapshot into the given tmp directory. By default, and</span>
<span class="source-line-no">067</span><span id="line-67"> * similar to {@link TableInputFormat} an InputSplit is created per region, but optionally you can</span>
<span class="source-line-no">068</span><span id="line-68"> * run N mapper tasks per every region, in which case the region key range will be split to N</span>
<span class="source-line-no">069</span><span id="line-69"> * sub-ranges and an InputSplit will be created per sub-range. The region is opened for reading from</span>
<span class="source-line-no">070</span><span id="line-70"> * each RecordReader. An internal RegionScanner is used to execute the</span>
<span class="source-line-no">071</span><span id="line-71"> * {@link org.apache.hadoop.hbase.CellScanner} obtained from the user.</span>
<span class="source-line-no">072</span><span id="line-72"> * &lt;p&gt;</span>
<span class="source-line-no">073</span><span id="line-73"> * HBase owns all the data and snapshot files on the filesystem. Only the 'hbase' user can read from</span>
<span class="source-line-no">074</span><span id="line-74"> * snapshot files and data files. To read from snapshot files directly from the file system, the</span>
<span class="source-line-no">075</span><span id="line-75"> * user who is running the MR job must have sufficient permissions to access snapshot and reference</span>
<span class="source-line-no">076</span><span id="line-76"> * files. This means that to run mapreduce over snapshot files, the MR job has to be run as the</span>
<span class="source-line-no">077</span><span id="line-77"> * HBase user or the user must have group or other privileges in the filesystem (See HBASE-8369).</span>
<span class="source-line-no">078</span><span id="line-78"> * Note that, given other users access to read from snapshot/data files will completely circumvent</span>
<span class="source-line-no">079</span><span id="line-79"> * the access control enforced by HBase.</span>
<span class="source-line-no">080</span><span id="line-80"> * @see org.apache.hadoop.hbase.client.TableSnapshotScanner</span>
<span class="source-line-no">081</span><span id="line-81"> */</span>
<span class="source-line-no">082</span><span id="line-82">@InterfaceAudience.Public</span>
<span class="source-line-no">083</span><span id="line-83">public class TableSnapshotInputFormat extends InputFormat&lt;ImmutableBytesWritable, Result&gt; {</span>
<span class="source-line-no">084</span><span id="line-84"></span>
<span class="source-line-no">085</span><span id="line-85"> public static class TableSnapshotRegionSplit extends InputSplit implements Writable {</span>
<span class="source-line-no">086</span><span id="line-86"> private TableSnapshotInputFormatImpl.InputSplit delegate;</span>
<span class="source-line-no">087</span><span id="line-87"></span>
<span class="source-line-no">088</span><span id="line-88"> // constructor for mapreduce framework / Writable</span>
<span class="source-line-no">089</span><span id="line-89"> public TableSnapshotRegionSplit() {</span>
<span class="source-line-no">090</span><span id="line-90"> this.delegate = new TableSnapshotInputFormatImpl.InputSplit();</span>
<span class="source-line-no">091</span><span id="line-91"> }</span>
<span class="source-line-no">092</span><span id="line-92"></span>
<span class="source-line-no">093</span><span id="line-93"> public TableSnapshotRegionSplit(TableSnapshotInputFormatImpl.InputSplit delegate) {</span>
<span class="source-line-no">094</span><span id="line-94"> this.delegate = delegate;</span>
<span class="source-line-no">095</span><span id="line-95"> }</span>
<span class="source-line-no">096</span><span id="line-96"></span>
<span class="source-line-no">097</span><span id="line-97"> public TableSnapshotRegionSplit(TableDescriptor htd, RegionInfo regionInfo,</span>
<span class="source-line-no">098</span><span id="line-98"> List&lt;String&gt; locations, Scan scan, Path restoreDir) {</span>
<span class="source-line-no">099</span><span id="line-99"> this.delegate =</span>
<span class="source-line-no">100</span><span id="line-100"> new TableSnapshotInputFormatImpl.InputSplit(htd, regionInfo, locations, scan, restoreDir);</span>
<span class="source-line-no">101</span><span id="line-101"> }</span>
<span class="source-line-no">102</span><span id="line-102"></span>
<span class="source-line-no">103</span><span id="line-103"> @Override</span>
<span class="source-line-no">104</span><span id="line-104"> public long getLength() throws IOException, InterruptedException {</span>
<span class="source-line-no">105</span><span id="line-105"> return delegate.getLength();</span>
<span class="source-line-no">106</span><span id="line-106"> }</span>
<span class="source-line-no">107</span><span id="line-107"></span>
<span class="source-line-no">108</span><span id="line-108"> @Override</span>
<span class="source-line-no">109</span><span id="line-109"> public String[] getLocations() throws IOException, InterruptedException {</span>
<span class="source-line-no">110</span><span id="line-110"> return delegate.getLocations();</span>
<span class="source-line-no">111</span><span id="line-111"> }</span>
<span class="source-line-no">112</span><span id="line-112"></span>
<span class="source-line-no">113</span><span id="line-113"> @Override</span>
<span class="source-line-no">114</span><span id="line-114"> public void write(DataOutput out) throws IOException {</span>
<span class="source-line-no">115</span><span id="line-115"> delegate.write(out);</span>
<span class="source-line-no">116</span><span id="line-116"> }</span>
<span class="source-line-no">117</span><span id="line-117"></span>
<span class="source-line-no">118</span><span id="line-118"> @Override</span>
<span class="source-line-no">119</span><span id="line-119"> public void readFields(DataInput in) throws IOException {</span>
<span class="source-line-no">120</span><span id="line-120"> delegate.readFields(in);</span>
<span class="source-line-no">121</span><span id="line-121"> }</span>
<span class="source-line-no">122</span><span id="line-122"></span>
<span class="source-line-no">123</span><span id="line-123"> public RegionInfo getRegion() {</span>
<span class="source-line-no">124</span><span id="line-124"> return delegate.getRegionInfo();</span>
<span class="source-line-no">125</span><span id="line-125"> }</span>
<span class="source-line-no">126</span><span id="line-126"></span>
<span class="source-line-no">127</span><span id="line-127"> TableSnapshotInputFormatImpl.InputSplit getDelegate() {</span>
<span class="source-line-no">128</span><span id="line-128"> return this.delegate;</span>
<span class="source-line-no">129</span><span id="line-129"> }</span>
<span class="source-line-no">130</span><span id="line-130"> }</span>
<span class="source-line-no">131</span><span id="line-131"></span>
<span class="source-line-no">132</span><span id="line-132"> @InterfaceAudience.Private</span>
<span class="source-line-no">133</span><span id="line-133"> static class TableSnapshotRegionRecordReader</span>
<span class="source-line-no">134</span><span id="line-134"> extends RecordReader&lt;ImmutableBytesWritable, Result&gt; {</span>
<span class="source-line-no">135</span><span id="line-135"> private TableSnapshotInputFormatImpl.RecordReader delegate =</span>
<span class="source-line-no">136</span><span id="line-136"> new TableSnapshotInputFormatImpl.RecordReader();</span>
<span class="source-line-no">137</span><span id="line-137"> private TaskAttemptContext context;</span>
<span class="source-line-no">138</span><span id="line-138"></span>
<span class="source-line-no">139</span><span id="line-139"> @Override</span>
<span class="source-line-no">140</span><span id="line-140"> public void initialize(InputSplit split, TaskAttemptContext context)</span>
<span class="source-line-no">141</span><span id="line-141"> throws IOException, InterruptedException {</span>
<span class="source-line-no">142</span><span id="line-142"> this.context = context;</span>
<span class="source-line-no">143</span><span id="line-143"> delegate.initialize(((TableSnapshotRegionSplit) split).delegate, context.getConfiguration());</span>
<span class="source-line-no">144</span><span id="line-144"> }</span>
<span class="source-line-no">145</span><span id="line-145"></span>
<span class="source-line-no">146</span><span id="line-146"> @Override</span>
<span class="source-line-no">147</span><span id="line-147"> public boolean nextKeyValue() throws IOException, InterruptedException {</span>
<span class="source-line-no">148</span><span id="line-148"> boolean result = delegate.nextKeyValue();</span>
<span class="source-line-no">149</span><span id="line-149"> if (result) {</span>
<span class="source-line-no">150</span><span id="line-150"> ScanMetrics scanMetrics = delegate.getScanner().getScanMetrics();</span>
<span class="source-line-no">151</span><span id="line-151"> if (scanMetrics != null &amp;&amp; context != null) {</span>
<span class="source-line-no">152</span><span id="line-152"> TableRecordReaderImpl.updateCounters(scanMetrics, 0, context, 0);</span>
<span class="source-line-no">153</span><span id="line-153"> }</span>
<span class="source-line-no">154</span><span id="line-154"> }</span>
<span class="source-line-no">155</span><span id="line-155"> return result;</span>
<span class="source-line-no">156</span><span id="line-156"> }</span>
<span class="source-line-no">157</span><span id="line-157"></span>
<span class="source-line-no">158</span><span id="line-158"> @Override</span>
<span class="source-line-no">159</span><span id="line-159"> public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException {</span>
<span class="source-line-no">160</span><span id="line-160"> return delegate.getCurrentKey();</span>
<span class="source-line-no">161</span><span id="line-161"> }</span>
<span class="source-line-no">162</span><span id="line-162"></span>
<span class="source-line-no">163</span><span id="line-163"> @Override</span>
<span class="source-line-no">164</span><span id="line-164"> public Result getCurrentValue() throws IOException, InterruptedException {</span>
<span class="source-line-no">165</span><span id="line-165"> return delegate.getCurrentValue();</span>
<span class="source-line-no">166</span><span id="line-166"> }</span>
<span class="source-line-no">167</span><span id="line-167"></span>
<span class="source-line-no">168</span><span id="line-168"> @Override</span>
<span class="source-line-no">169</span><span id="line-169"> public float getProgress() throws IOException, InterruptedException {</span>
<span class="source-line-no">170</span><span id="line-170"> return delegate.getProgress();</span>
<span class="source-line-no">171</span><span id="line-171"> }</span>
<span class="source-line-no">172</span><span id="line-172"></span>
<span class="source-line-no">173</span><span id="line-173"> @Override</span>
<span class="source-line-no">174</span><span id="line-174"> public void close() throws IOException {</span>
<span class="source-line-no">175</span><span id="line-175"> delegate.close();</span>
<span class="source-line-no">176</span><span id="line-176"> }</span>
<span class="source-line-no">177</span><span id="line-177"> }</span>
<span class="source-line-no">178</span><span id="line-178"></span>
<span class="source-line-no">179</span><span id="line-179"> @Override</span>
<span class="source-line-no">180</span><span id="line-180"> public RecordReader&lt;ImmutableBytesWritable, Result&gt; createRecordReader(InputSplit split,</span>
<span class="source-line-no">181</span><span id="line-181"> TaskAttemptContext context) throws IOException {</span>
<span class="source-line-no">182</span><span id="line-182"> return new TableSnapshotRegionRecordReader();</span>
<span class="source-line-no">183</span><span id="line-183"> }</span>
<span class="source-line-no">184</span><span id="line-184"></span>
<span class="source-line-no">185</span><span id="line-185"> @Override</span>
<span class="source-line-no">186</span><span id="line-186"> public List&lt;InputSplit&gt; getSplits(JobContext job) throws IOException, InterruptedException {</span>
<span class="source-line-no">187</span><span id="line-187"> List&lt;InputSplit&gt; results = new ArrayList&lt;&gt;();</span>
<span class="source-line-no">188</span><span id="line-188"> for (TableSnapshotInputFormatImpl.InputSplit split : TableSnapshotInputFormatImpl</span>
<span class="source-line-no">189</span><span id="line-189"> .getSplits(job.getConfiguration())) {</span>
<span class="source-line-no">190</span><span id="line-190"> results.add(new TableSnapshotRegionSplit(split));</span>
<span class="source-line-no">191</span><span id="line-191"> }</span>
<span class="source-line-no">192</span><span id="line-192"> return results;</span>
<span class="source-line-no">193</span><span id="line-193"> }</span>
<span class="source-line-no">194</span><span id="line-194"></span>
<span class="source-line-no">195</span><span id="line-195"> /**</span>
<span class="source-line-no">196</span><span id="line-196"> * Configures the job to use TableSnapshotInputFormat to read from a snapshot.</span>
<span class="source-line-no">197</span><span id="line-197"> * @param job the job to configure</span>
<span class="source-line-no">198</span><span id="line-198"> * @param snapshotName the name of the snapshot to read from</span>
<span class="source-line-no">199</span><span id="line-199"> * @param restoreDir a temporary directory to restore the snapshot into. Current user should</span>
<span class="source-line-no">200</span><span id="line-200"> * have write permissions to this directory, and this should not be a</span>
<span class="source-line-no">201</span><span id="line-201"> * subdirectory of rootdir. After the job is finished, restoreDir can be</span>
<span class="source-line-no">202</span><span id="line-202"> * deleted.</span>
<span class="source-line-no">203</span><span id="line-203"> * @throws IOException if an error occurs</span>
<span class="source-line-no">204</span><span id="line-204"> */</span>
<span class="source-line-no">205</span><span id="line-205"> public static void setInput(Job job, String snapshotName, Path restoreDir) throws IOException {</span>
<span class="source-line-no">206</span><span id="line-206"> TableSnapshotInputFormatImpl.setInput(job.getConfiguration(), snapshotName, restoreDir);</span>
<span class="source-line-no">207</span><span id="line-207"> }</span>
<span class="source-line-no">208</span><span id="line-208"></span>
<span class="source-line-no">209</span><span id="line-209"> /**</span>
<span class="source-line-no">210</span><span id="line-210"> * Configures the job to use TableSnapshotInputFormat to read from a snapshot.</span>
<span class="source-line-no">211</span><span id="line-211"> * @param job the job to configure</span>
<span class="source-line-no">212</span><span id="line-212"> * @param snapshotName the name of the snapshot to read from</span>
<span class="source-line-no">213</span><span id="line-213"> * @param restoreDir a temporary directory to restore the snapshot into. Current user</span>
<span class="source-line-no">214</span><span id="line-214"> * should have write permissions to this directory, and this should not</span>
<span class="source-line-no">215</span><span id="line-215"> * be a subdirectory of rootdir. After the job is finished, restoreDir</span>
<span class="source-line-no">216</span><span id="line-216"> * can be deleted.</span>
<span class="source-line-no">217</span><span id="line-217"> * @param splitAlgo split algorithm to generate splits from region</span>
<span class="source-line-no">218</span><span id="line-218"> * @param numSplitsPerRegion how many input splits to generate per one region</span>
<span class="source-line-no">219</span><span id="line-219"> * @throws IOException if an error occurs</span>
<span class="source-line-no">220</span><span id="line-220"> */</span>
<span class="source-line-no">221</span><span id="line-221"> public static void setInput(Job job, String snapshotName, Path restoreDir,</span>
<span class="source-line-no">222</span><span id="line-222"> RegionSplitter.SplitAlgorithm splitAlgo, int numSplitsPerRegion) throws IOException {</span>
<span class="source-line-no">223</span><span id="line-223"> TableSnapshotInputFormatImpl.setInput(job.getConfiguration(), snapshotName, restoreDir,</span>
<span class="source-line-no">224</span><span id="line-224"> splitAlgo, numSplitsPerRegion);</span>
<span class="source-line-no">225</span><span id="line-225"> }</span>
<span class="source-line-no">226</span><span id="line-226"></span>
<span class="source-line-no">227</span><span id="line-227"> /**</span>
<span class="source-line-no">228</span><span id="line-228"> * clean restore directory after snapshot scan job</span>
<span class="source-line-no">229</span><span id="line-229"> * @param job the snapshot scan job</span>
<span class="source-line-no">230</span><span id="line-230"> * @param snapshotName the name of the snapshot to read from</span>
<span class="source-line-no">231</span><span id="line-231"> * @throws IOException if an error occurs</span>
<span class="source-line-no">232</span><span id="line-232"> */</span>
<span class="source-line-no">233</span><span id="line-233"> public static void cleanRestoreDir(Job job, String snapshotName) throws IOException {</span>
<span class="source-line-no">234</span><span id="line-234"> TableSnapshotInputFormatImpl.cleanRestoreDir(job, snapshotName);</span>
<span class="source-line-no">235</span><span id="line-235"> }</span>
<span class="source-line-no">236</span><span id="line-236">}</span>
</pre>
</div>
</main>
</body>
</html>