blob: 6b181a5791ab377600bb7434880298b62cc4d94c [file] [log] [blame]
<!DOCTYPE HTML>
<html lang="en">
<head>
<!-- Generated by javadoc (17) -->
<title>Source code</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="source: package: org.apache.hadoop.hbase.mapreduce, class: IntegrationTestTableSnapshotInputFormat">
<meta name="generator" content="javadoc/SourceToHTMLConverter">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body class="source-page">
<main role="main">
<div class="source-container">
<pre><span class="source-line-no">001</span><span id="line-1">/*</span>
<span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span>
<span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span>
<span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span>
<span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span>
<span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span>
<span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span>
<span class="source-line-no">009</span><span id="line-9"> *</span>
<span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="source-line-no">011</span><span id="line-11"> *</span>
<span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span>
<span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span>
<span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span>
<span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span>
<span class="source-line-no">017</span><span id="line-17"> */</span>
<span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.mapreduce;</span>
<span class="source-line-no">019</span><span id="line-19"></span>
<span class="source-line-no">020</span><span id="line-20">import java.util.Set;</span>
<span class="source-line-no">021</span><span id="line-21">import org.apache.hadoop.conf.Configuration;</span>
<span class="source-line-no">022</span><span id="line-22">import org.apache.hadoop.fs.Path;</span>
<span class="source-line-no">023</span><span id="line-23">import org.apache.hadoop.hbase.HBaseConfiguration;</span>
<span class="source-line-no">024</span><span id="line-24">import org.apache.hadoop.hbase.IntegrationTestBase;</span>
<span class="source-line-no">025</span><span id="line-25">import org.apache.hadoop.hbase.IntegrationTestingUtility;</span>
<span class="source-line-no">026</span><span id="line-26">import org.apache.hadoop.hbase.TableName;</span>
<span class="source-line-no">027</span><span id="line-27">import org.apache.hadoop.hbase.testclassification.IntegrationTests;</span>
<span class="source-line-no">028</span><span id="line-28">import org.apache.hadoop.hbase.util.Bytes;</span>
<span class="source-line-no">029</span><span id="line-29">import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;</span>
<span class="source-line-no">030</span><span id="line-30">import org.apache.hadoop.util.ToolRunner;</span>
<span class="source-line-no">031</span><span id="line-31">import org.junit.After;</span>
<span class="source-line-no">032</span><span id="line-32">import org.junit.Before;</span>
<span class="source-line-no">033</span><span id="line-33">import org.junit.experimental.categories.Category;</span>
<span class="source-line-no">034</span><span id="line-34">import org.slf4j.Logger;</span>
<span class="source-line-no">035</span><span id="line-35">import org.slf4j.LoggerFactory;</span>
<span class="source-line-no">036</span><span id="line-36"></span>
<span class="source-line-no">037</span><span id="line-37">/**</span>
<span class="source-line-no">038</span><span id="line-38"> * An integration test to test {@link TableSnapshotInputFormat} which enables reading directly from</span>
<span class="source-line-no">039</span><span id="line-39"> * snapshot files without going through hbase servers. This test creates a table and loads the table</span>
<span class="source-line-no">040</span><span id="line-40"> * with the rows ranging from 'aaa' to 'zzz', and for each row, sets the columns f1:(null) and</span>
<span class="source-line-no">041</span><span id="line-41"> * f2:(null) to be the the same as the row value.</span>
<span class="source-line-no">042</span><span id="line-42"> *</span>
<span class="source-line-no">043</span><span id="line-43"> * &lt;pre&gt;</span>
<span class="source-line-no">044</span><span id="line-44"> * aaa, f1: =&amp;gt; aaa</span>
<span class="source-line-no">045</span><span id="line-45"> * aaa, f2: =&amp;gt; aaa</span>
<span class="source-line-no">046</span><span id="line-46"> * aab, f1: =&amp;gt; aab</span>
<span class="source-line-no">047</span><span id="line-47"> * ....</span>
<span class="source-line-no">048</span><span id="line-48"> * zzz, f2: =&amp;gt; zzz</span>
<span class="source-line-no">049</span><span id="line-49"> * &lt;/pre&gt;</span>
<span class="source-line-no">050</span><span id="line-50"> *</span>
<span class="source-line-no">051</span><span id="line-51"> * Then the test creates a snapshot from this table, and overrides the values in the original table</span>
<span class="source-line-no">052</span><span id="line-52"> * with values 'after_snapshot_value'. The test, then runs a mapreduce job over the snapshot with a</span>
<span class="source-line-no">053</span><span id="line-53"> * scan start row 'bbb' and stop row 'yyy'. The data is saved in a single reduce output file, and</span>
<span class="source-line-no">054</span><span id="line-54"> * inspected later to verify that the MR job has seen all the values from the snapshot.</span>
<span class="source-line-no">055</span><span id="line-55"> * &lt;p&gt;</span>
<span class="source-line-no">056</span><span id="line-56"> * These parameters can be used to configure the job: &lt;br&gt;</span>
<span class="source-line-no">057</span><span id="line-57"> * "IntegrationTestTableSnapshotInputFormat.table" =&amp;gt; the name of the table &lt;br&gt;</span>
<span class="source-line-no">058</span><span id="line-58"> * "IntegrationTestTableSnapshotInputFormat.snapshot" =&amp;gt; the name of the snapshot &lt;br&gt;</span>
<span class="source-line-no">059</span><span id="line-59"> * "IntegrationTestTableSnapshotInputFormat.numRegions" =&amp;gt; number of regions in the table to be</span>
<span class="source-line-no">060</span><span id="line-60"> * created (default, 32). &lt;br&gt;</span>
<span class="source-line-no">061</span><span id="line-61"> * "IntegrationTestTableSnapshotInputFormat.tableDir" =&amp;gt; temporary directory to restore the</span>
<span class="source-line-no">062</span><span id="line-62"> * snapshot files</span>
<span class="source-line-no">063</span><span id="line-63"> */</span>
<span class="source-line-no">064</span><span id="line-64">@Category(IntegrationTests.class)</span>
<span class="source-line-no">065</span><span id="line-65">// Not runnable as a unit test. See TestTableSnapshotInputFormat</span>
<span class="source-line-no">066</span><span id="line-66">public class IntegrationTestTableSnapshotInputFormat extends IntegrationTestBase {</span>
<span class="source-line-no">067</span><span id="line-67"> private static final Logger LOG =</span>
<span class="source-line-no">068</span><span id="line-68"> LoggerFactory.getLogger(IntegrationTestTableSnapshotInputFormat.class);</span>
<span class="source-line-no">069</span><span id="line-69"></span>
<span class="source-line-no">070</span><span id="line-70"> private static final String TABLE_NAME_KEY = "IntegrationTestTableSnapshotInputFormat.table";</span>
<span class="source-line-no">071</span><span id="line-71"> private static final String DEFAULT_TABLE_NAME = "IntegrationTestTableSnapshotInputFormat";</span>
<span class="source-line-no">072</span><span id="line-72"></span>
<span class="source-line-no">073</span><span id="line-73"> private static final String SNAPSHOT_NAME_KEY =</span>
<span class="source-line-no">074</span><span id="line-74"> "IntegrationTestTableSnapshotInputFormat.snapshot";</span>
<span class="source-line-no">075</span><span id="line-75"> private static final String NUM_REGIONS_KEY =</span>
<span class="source-line-no">076</span><span id="line-76"> "IntegrationTestTableSnapshotInputFormat.numRegions";</span>
<span class="source-line-no">077</span><span id="line-77"></span>
<span class="source-line-no">078</span><span id="line-78"> private static final String MR_IMPLEMENTATION_KEY = "IntegrationTestTableSnapshotInputFormat.API";</span>
<span class="source-line-no">079</span><span id="line-79"> private static final String MAPRED_IMPLEMENTATION = "mapred";</span>
<span class="source-line-no">080</span><span id="line-80"> private static final String MAPREDUCE_IMPLEMENTATION = "mapreduce";</span>
<span class="source-line-no">081</span><span id="line-81"></span>
<span class="source-line-no">082</span><span id="line-82"> private static final int DEFAULT_NUM_REGIONS = 32;</span>
<span class="source-line-no">083</span><span id="line-83"> private static final String TABLE_DIR_KEY = "IntegrationTestTableSnapshotInputFormat.tableDir";</span>
<span class="source-line-no">084</span><span id="line-84"></span>
<span class="source-line-no">085</span><span id="line-85"> private static final byte[] START_ROW = Bytes.toBytes("bbb");</span>
<span class="source-line-no">086</span><span id="line-86"> private static final byte[] END_ROW = Bytes.toBytes("yyy");</span>
<span class="source-line-no">087</span><span id="line-87"></span>
<span class="source-line-no">088</span><span id="line-88"> // mapred API missing feature pairity with mapreduce. See comments in</span>
<span class="source-line-no">089</span><span id="line-89"> // mapred.TestTableSnapshotInputFormat</span>
<span class="source-line-no">090</span><span id="line-90"> private static final byte[] MAPRED_START_ROW = Bytes.toBytes("aaa");</span>
<span class="source-line-no">091</span><span id="line-91"> private static final byte[] MAPRED_END_ROW = Bytes.toBytes("zz{"); // 'z' + 1 =&gt; '{'</span>
<span class="source-line-no">092</span><span id="line-92"></span>
<span class="source-line-no">093</span><span id="line-93"> @Override</span>
<span class="source-line-no">094</span><span id="line-94"> public void setConf(Configuration conf) {</span>
<span class="source-line-no">095</span><span id="line-95"> super.setConf(conf);</span>
<span class="source-line-no">096</span><span id="line-96"> util = getTestingUtil(conf);</span>
<span class="source-line-no">097</span><span id="line-97"> }</span>
<span class="source-line-no">098</span><span id="line-98"></span>
<span class="source-line-no">099</span><span id="line-99"> @Override</span>
<span class="source-line-no">100</span><span id="line-100"> @Before</span>
<span class="source-line-no">101</span><span id="line-101"> public void setUp() throws Exception {</span>
<span class="source-line-no">102</span><span id="line-102"> super.setUp();</span>
<span class="source-line-no">103</span><span id="line-103"> util = getTestingUtil(getConf());</span>
<span class="source-line-no">104</span><span id="line-104"> util.initializeCluster(1);</span>
<span class="source-line-no">105</span><span id="line-105"> this.setConf(util.getConfiguration());</span>
<span class="source-line-no">106</span><span id="line-106"> }</span>
<span class="source-line-no">107</span><span id="line-107"></span>
<span class="source-line-no">108</span><span id="line-108"> @Override</span>
<span class="source-line-no">109</span><span id="line-109"> @After</span>
<span class="source-line-no">110</span><span id="line-110"> public void cleanUp() throws Exception {</span>
<span class="source-line-no">111</span><span id="line-111"> util.restoreCluster();</span>
<span class="source-line-no">112</span><span id="line-112"> }</span>
<span class="source-line-no">113</span><span id="line-113"></span>
<span class="source-line-no">114</span><span id="line-114"> @Override</span>
<span class="source-line-no">115</span><span id="line-115"> public void setUpCluster() throws Exception {</span>
<span class="source-line-no">116</span><span id="line-116"> }</span>
<span class="source-line-no">117</span><span id="line-117"></span>
<span class="source-line-no">118</span><span id="line-118"> @Override</span>
<span class="source-line-no">119</span><span id="line-119"> public int runTestFromCommandLine() throws Exception {</span>
<span class="source-line-no">120</span><span id="line-120"> Configuration conf = getConf();</span>
<span class="source-line-no">121</span><span id="line-121"> TableName tableName = TableName.valueOf(conf.get(TABLE_NAME_KEY, DEFAULT_TABLE_NAME));</span>
<span class="source-line-no">122</span><span id="line-122"> String snapshotName = conf.get(SNAPSHOT_NAME_KEY,</span>
<span class="source-line-no">123</span><span id="line-123"> tableName.getQualifierAsString() + "_snapshot_" + EnvironmentEdgeManager.currentTime());</span>
<span class="source-line-no">124</span><span id="line-124"> int numRegions = conf.getInt(NUM_REGIONS_KEY, DEFAULT_NUM_REGIONS);</span>
<span class="source-line-no">125</span><span id="line-125"> String tableDirStr = conf.get(TABLE_DIR_KEY);</span>
<span class="source-line-no">126</span><span id="line-126"> Path tableDir;</span>
<span class="source-line-no">127</span><span id="line-127"> if (tableDirStr == null) {</span>
<span class="source-line-no">128</span><span id="line-128"> tableDir = util.getDataTestDirOnTestFS(tableName.getQualifierAsString());</span>
<span class="source-line-no">129</span><span id="line-129"> } else {</span>
<span class="source-line-no">130</span><span id="line-130"> tableDir = new Path(tableDirStr);</span>
<span class="source-line-no">131</span><span id="line-131"> }</span>
<span class="source-line-no">132</span><span id="line-132"></span>
<span class="source-line-no">133</span><span id="line-133"> final String mr = conf.get(MR_IMPLEMENTATION_KEY, MAPREDUCE_IMPLEMENTATION);</span>
<span class="source-line-no">134</span><span id="line-134"> if (mr.equalsIgnoreCase(MAPREDUCE_IMPLEMENTATION)) {</span>
<span class="source-line-no">135</span><span id="line-135"> /*</span>
<span class="source-line-no">136</span><span id="line-136"> * We create the table using HBaseAdmin#createTable(), which will create the table with</span>
<span class="source-line-no">137</span><span id="line-137"> * desired number of regions. We pass bbb as startKey and yyy as endKey, so if</span>
<span class="source-line-no">138</span><span id="line-138"> * desiredNumRegions is &gt; 2, we create regions empty - bbb and yyy - empty, and we create</span>
<span class="source-line-no">139</span><span id="line-139"> * numRegions - 2 regions between bbb - yyy. The test uses a Scan with startRow bbb and endRow</span>
<span class="source-line-no">140</span><span id="line-140"> * yyy, so, we expect the first and last region to be filtered out in the input format, and we</span>
<span class="source-line-no">141</span><span id="line-141"> * expect numRegions - 2 splits between bbb and yyy.</span>
<span class="source-line-no">142</span><span id="line-142"> */</span>
<span class="source-line-no">143</span><span id="line-143"> LOG.debug("Running job with mapreduce API.");</span>
<span class="source-line-no">144</span><span id="line-144"> int expectedNumSplits = numRegions &gt; 2 ? numRegions - 2 : numRegions;</span>
<span class="source-line-no">145</span><span id="line-145"></span>
<span class="source-line-no">146</span><span id="line-146"> org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat.doTestWithMapReduce(util,</span>
<span class="source-line-no">147</span><span id="line-147"> tableName, snapshotName, START_ROW, END_ROW, tableDir, numRegions, 1, expectedNumSplits,</span>
<span class="source-line-no">148</span><span id="line-148"> false);</span>
<span class="source-line-no">149</span><span id="line-149"> } else if (mr.equalsIgnoreCase(MAPRED_IMPLEMENTATION)) {</span>
<span class="source-line-no">150</span><span id="line-150"> /*</span>
<span class="source-line-no">151</span><span id="line-151"> * Similar considerations to above. The difference is that mapred API does not support</span>
<span class="source-line-no">152</span><span id="line-152"> * specifying start/end rows (or a scan object at all). Thus the omission of first and last</span>
<span class="source-line-no">153</span><span id="line-153"> * regions are not performed. See comments in mapred.TestTableSnapshotInputFormat for details</span>
<span class="source-line-no">154</span><span id="line-154"> * of how that test works around the problem. This feature should be added in follow-on work.</span>
<span class="source-line-no">155</span><span id="line-155"> */</span>
<span class="source-line-no">156</span><span id="line-156"> LOG.debug("Running job with mapred API.");</span>
<span class="source-line-no">157</span><span id="line-157"> int expectedNumSplits = numRegions;</span>
<span class="source-line-no">158</span><span id="line-158"></span>
<span class="source-line-no">159</span><span id="line-159"> org.apache.hadoop.hbase.mapred.TestTableSnapshotInputFormat.doTestWithMapReduce(util,</span>
<span class="source-line-no">160</span><span id="line-160"> tableName, snapshotName, MAPRED_START_ROW, MAPRED_END_ROW, tableDir, numRegions, 1,</span>
<span class="source-line-no">161</span><span id="line-161"> expectedNumSplits, false);</span>
<span class="source-line-no">162</span><span id="line-162"> } else {</span>
<span class="source-line-no">163</span><span id="line-163"> throw new IllegalArgumentException("Unrecognized mapreduce implementation: " + mr + ".");</span>
<span class="source-line-no">164</span><span id="line-164"> }</span>
<span class="source-line-no">165</span><span id="line-165"></span>
<span class="source-line-no">166</span><span id="line-166"> return 0;</span>
<span class="source-line-no">167</span><span id="line-167"> }</span>
<span class="source-line-no">168</span><span id="line-168"></span>
<span class="source-line-no">169</span><span id="line-169"> @Override // Chaos Monkey is not intended to be run with this test</span>
<span class="source-line-no">170</span><span id="line-170"> public TableName getTablename() {</span>
<span class="source-line-no">171</span><span id="line-171"> return null;</span>
<span class="source-line-no">172</span><span id="line-172"> }</span>
<span class="source-line-no">173</span><span id="line-173"></span>
<span class="source-line-no">174</span><span id="line-174"> @Override // Chaos Monkey is not inteded to be run with this test</span>
<span class="source-line-no">175</span><span id="line-175"> protected Set&lt;String&gt; getColumnFamilies() {</span>
<span class="source-line-no">176</span><span id="line-176"> return null;</span>
<span class="source-line-no">177</span><span id="line-177"> }</span>
<span class="source-line-no">178</span><span id="line-178"></span>
<span class="source-line-no">179</span><span id="line-179"> public static void main(String[] args) throws Exception {</span>
<span class="source-line-no">180</span><span id="line-180"> Configuration conf = HBaseConfiguration.create();</span>
<span class="source-line-no">181</span><span id="line-181"> IntegrationTestingUtility.setUseDistributedCluster(conf);</span>
<span class="source-line-no">182</span><span id="line-182"> int ret = ToolRunner.run(conf, new IntegrationTestTableSnapshotInputFormat(), args);</span>
<span class="source-line-no">183</span><span id="line-183"> System.exit(ret);</span>
<span class="source-line-no">184</span><span id="line-184"> }</span>
<span class="source-line-no">185</span><span id="line-185"></span>
<span class="source-line-no">186</span><span id="line-186">}</span>
</pre>
</div>
</main>
</body>
</html>