blob: c276c4524c2d11b6c872e0adf52fb07f30cba938 [file] [log] [blame]
<!DOCTYPE HTML>
<html lang="en">
<head>
<!-- Generated by javadoc (17) -->
<title>Source code</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="source: package: org.apache.hadoop.hbase.mapreduce, class: MRIncrementalLoadTestBase">
<meta name="generator" content="javadoc/SourceToHTMLConverter">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body class="source-page">
<main role="main">
<div class="source-container">
<pre><span class="source-line-no">001</span><span id="line-1">/*</span>
<span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span>
<span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span>
<span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span>
<span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span>
<span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span>
<span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span>
<span class="source-line-no">009</span><span id="line-9"> *</span>
<span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="source-line-no">011</span><span id="line-11"> *</span>
<span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span>
<span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span>
<span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span>
<span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span>
<span class="source-line-no">017</span><span id="line-17"> */</span>
<span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.mapreduce;</span>
<span class="source-line-no">019</span><span id="line-19"></span>
<span class="source-line-no">020</span><span id="line-20">import static org.junit.Assert.assertEquals;</span>
<span class="source-line-no">021</span><span id="line-21">import static org.junit.Assert.assertTrue;</span>
<span class="source-line-no">022</span><span id="line-22"></span>
<span class="source-line-no">023</span><span id="line-23">import java.io.IOException;</span>
<span class="source-line-no">024</span><span id="line-24">import java.util.ArrayList;</span>
<span class="source-line-no">025</span><span id="line-25">import java.util.HashMap;</span>
<span class="source-line-no">026</span><span id="line-26">import java.util.List;</span>
<span class="source-line-no">027</span><span id="line-27">import java.util.Map;</span>
<span class="source-line-no">028</span><span id="line-28">import org.apache.hadoop.conf.Configuration;</span>
<span class="source-line-no">029</span><span id="line-29">import org.apache.hadoop.fs.FileStatus;</span>
<span class="source-line-no">030</span><span id="line-30">import org.apache.hadoop.fs.Path;</span>
<span class="source-line-no">031</span><span id="line-31">import org.apache.hadoop.hbase.Cell;</span>
<span class="source-line-no">032</span><span id="line-32">import org.apache.hadoop.hbase.CellUtil;</span>
<span class="source-line-no">033</span><span id="line-33">import org.apache.hadoop.hbase.HBaseTestingUtil;</span>
<span class="source-line-no">034</span><span id="line-34">import org.apache.hadoop.hbase.HConstants;</span>
<span class="source-line-no">035</span><span id="line-35">import org.apache.hadoop.hbase.HDFSBlocksDistribution;</span>
<span class="source-line-no">036</span><span id="line-36">import org.apache.hadoop.hbase.StartTestingClusterOption;</span>
<span class="source-line-no">037</span><span id="line-37">import org.apache.hadoop.hbase.TableName;</span>
<span class="source-line-no">038</span><span id="line-38">import org.apache.hadoop.hbase.client.Admin;</span>
<span class="source-line-no">039</span><span id="line-39">import org.apache.hadoop.hbase.client.RegionLocator;</span>
<span class="source-line-no">040</span><span id="line-40">import org.apache.hadoop.hbase.client.Result;</span>
<span class="source-line-no">041</span><span id="line-41">import org.apache.hadoop.hbase.client.ResultScanner;</span>
<span class="source-line-no">042</span><span id="line-42">import org.apache.hadoop.hbase.client.Scan;</span>
<span class="source-line-no">043</span><span id="line-43">import org.apache.hadoop.hbase.client.Table;</span>
<span class="source-line-no">044</span><span id="line-44">import org.apache.hadoop.hbase.regionserver.HRegion;</span>
<span class="source-line-no">045</span><span id="line-45">import org.apache.hadoop.hbase.tool.BulkLoadHFiles;</span>
<span class="source-line-no">046</span><span id="line-46">import org.apache.hadoop.hbase.util.Bytes;</span>
<span class="source-line-no">047</span><span id="line-47">import org.junit.After;</span>
<span class="source-line-no">048</span><span id="line-48">import org.junit.AfterClass;</span>
<span class="source-line-no">049</span><span id="line-49">import org.junit.Before;</span>
<span class="source-line-no">050</span><span id="line-50">import org.junit.Test;</span>
<span class="source-line-no">051</span><span id="line-51">import org.junit.runners.Parameterized.Parameter;</span>
<span class="source-line-no">052</span><span id="line-52">import org.slf4j.Logger;</span>
<span class="source-line-no">053</span><span id="line-53">import org.slf4j.LoggerFactory;</span>
<span class="source-line-no">054</span><span id="line-54"></span>
<span class="source-line-no">055</span><span id="line-55">public class MRIncrementalLoadTestBase extends HFileOutputFormat2TestBase {</span>
<span class="source-line-no">056</span><span id="line-56"></span>
<span class="source-line-no">057</span><span id="line-57"> private static final Logger LOG = LoggerFactory.getLogger(MRIncrementalLoadTestBase.class);</span>
<span class="source-line-no">058</span><span id="line-58"></span>
<span class="source-line-no">059</span><span id="line-59"> private static boolean SHOULD_KEEP_LOCALITY;</span>
<span class="source-line-no">060</span><span id="line-60"></span>
<span class="source-line-no">061</span><span id="line-61"> private static String[] HOSTNAMES;</span>
<span class="source-line-no">062</span><span id="line-62"></span>
<span class="source-line-no">063</span><span id="line-63"> @Parameter(0)</span>
<span class="source-line-no">064</span><span id="line-64"> public boolean shouldChangeRegions;</span>
<span class="source-line-no">065</span><span id="line-65"></span>
<span class="source-line-no">066</span><span id="line-66"> @Parameter(1)</span>
<span class="source-line-no">067</span><span id="line-67"> public boolean putSortReducer;</span>
<span class="source-line-no">068</span><span id="line-68"></span>
<span class="source-line-no">069</span><span id="line-69"> @Parameter(2)</span>
<span class="source-line-no">070</span><span id="line-70"> public List&lt;String&gt; tableStr;</span>
<span class="source-line-no">071</span><span id="line-71"></span>
<span class="source-line-no">072</span><span id="line-72"> private Map&lt;String, Table&gt; allTables;</span>
<span class="source-line-no">073</span><span id="line-73"></span>
<span class="source-line-no">074</span><span id="line-74"> private List&lt;HFileOutputFormat2.TableInfo&gt; tableInfo;</span>
<span class="source-line-no">075</span><span id="line-75"></span>
<span class="source-line-no">076</span><span id="line-76"> private Path testDir;</span>
<span class="source-line-no">077</span><span id="line-77"></span>
<span class="source-line-no">078</span><span id="line-78"> protected static void setupCluster(boolean shouldKeepLocality) throws Exception {</span>
<span class="source-line-no">079</span><span id="line-79"> SHOULD_KEEP_LOCALITY = shouldKeepLocality;</span>
<span class="source-line-no">080</span><span id="line-80"> Configuration conf = UTIL.getConfiguration();</span>
<span class="source-line-no">081</span><span id="line-81"> conf.setBoolean(MultiTableHFileOutputFormat.LOCALITY_SENSITIVE_CONF_KEY, shouldKeepLocality);</span>
<span class="source-line-no">082</span><span id="line-82"> // We should change host count higher than hdfs replica count when MiniHBaseCluster supports</span>
<span class="source-line-no">083</span><span id="line-83"> // explicit hostnames parameter just like MiniDFSCluster does.</span>
<span class="source-line-no">084</span><span id="line-84"> int hostCount = shouldKeepLocality ? 3 : 1;</span>
<span class="source-line-no">085</span><span id="line-85"></span>
<span class="source-line-no">086</span><span id="line-86"> HOSTNAMES = new String[hostCount];</span>
<span class="source-line-no">087</span><span id="line-87"> for (int i = 0; i &lt; hostCount; ++i) {</span>
<span class="source-line-no">088</span><span id="line-88"> HOSTNAMES[i] = "datanode_" + i;</span>
<span class="source-line-no">089</span><span id="line-89"> }</span>
<span class="source-line-no">090</span><span id="line-90"> StartTestingClusterOption option = StartTestingClusterOption.builder()</span>
<span class="source-line-no">091</span><span id="line-91"> .numRegionServers(hostCount).dataNodeHosts(HOSTNAMES).build();</span>
<span class="source-line-no">092</span><span id="line-92"> UTIL.getConfiguration().unset(HConstants.TEMPORARY_FS_DIRECTORY_KEY);</span>
<span class="source-line-no">093</span><span id="line-93"> UTIL.startMiniCluster(option);</span>
<span class="source-line-no">094</span><span id="line-94"></span>
<span class="source-line-no">095</span><span id="line-95"> }</span>
<span class="source-line-no">096</span><span id="line-96"></span>
<span class="source-line-no">097</span><span id="line-97"> @AfterClass</span>
<span class="source-line-no">098</span><span id="line-98"> public static void tearDownAfterClass() throws IOException {</span>
<span class="source-line-no">099</span><span id="line-99"> UTIL.shutdownMiniCluster();</span>
<span class="source-line-no">100</span><span id="line-100"> }</span>
<span class="source-line-no">101</span><span id="line-101"></span>
<span class="source-line-no">102</span><span id="line-102"> @Before</span>
<span class="source-line-no">103</span><span id="line-103"> public void setUp() throws IOException {</span>
<span class="source-line-no">104</span><span id="line-104"> int regionNum = SHOULD_KEEP_LOCALITY ? 20 : 5;</span>
<span class="source-line-no">105</span><span id="line-105"> allTables = new HashMap&lt;&gt;(tableStr.size());</span>
<span class="source-line-no">106</span><span id="line-106"> tableInfo = new ArrayList&lt;&gt;(tableStr.size());</span>
<span class="source-line-no">107</span><span id="line-107"> for (String tableStrSingle : tableStr) {</span>
<span class="source-line-no">108</span><span id="line-108"> byte[][] splitKeys = generateRandomSplitKeys(regionNum - 1);</span>
<span class="source-line-no">109</span><span id="line-109"> TableName tableName = TableName.valueOf(tableStrSingle);</span>
<span class="source-line-no">110</span><span id="line-110"> Table table = UTIL.createTable(tableName, FAMILIES, splitKeys);</span>
<span class="source-line-no">111</span><span id="line-111"></span>
<span class="source-line-no">112</span><span id="line-112"> RegionLocator r = UTIL.getConnection().getRegionLocator(tableName);</span>
<span class="source-line-no">113</span><span id="line-113"> assertEquals("Should start with empty table", 0, HBaseTestingUtil.countRows(table));</span>
<span class="source-line-no">114</span><span id="line-114"> int numRegions = r.getStartKeys().length;</span>
<span class="source-line-no">115</span><span id="line-115"> assertEquals("Should make " + regionNum + " regions", numRegions, regionNum);</span>
<span class="source-line-no">116</span><span id="line-116"></span>
<span class="source-line-no">117</span><span id="line-117"> allTables.put(tableStrSingle, table);</span>
<span class="source-line-no">118</span><span id="line-118"> tableInfo.add(new HFileOutputFormat2.TableInfo(table.getDescriptor(), r));</span>
<span class="source-line-no">119</span><span id="line-119"> }</span>
<span class="source-line-no">120</span><span id="line-120"> testDir = UTIL.getDataTestDirOnTestFS(tableStr.get(0));</span>
<span class="source-line-no">121</span><span id="line-121"> }</span>
<span class="source-line-no">122</span><span id="line-122"></span>
<span class="source-line-no">123</span><span id="line-123"> @After</span>
<span class="source-line-no">124</span><span id="line-124"> public void tearDown() throws IOException {</span>
<span class="source-line-no">125</span><span id="line-125"> for (HFileOutputFormat2.TableInfo tableInfoSingle : tableInfo) {</span>
<span class="source-line-no">126</span><span id="line-126"> tableInfoSingle.getRegionLocator().close();</span>
<span class="source-line-no">127</span><span id="line-127"> }</span>
<span class="source-line-no">128</span><span id="line-128"> tableInfo.clear();</span>
<span class="source-line-no">129</span><span id="line-129"> allTables.clear();</span>
<span class="source-line-no">130</span><span id="line-130"> for (String tableStrSingle : tableStr) {</span>
<span class="source-line-no">131</span><span id="line-131"> UTIL.deleteTable(TableName.valueOf(tableStrSingle));</span>
<span class="source-line-no">132</span><span id="line-132"> }</span>
<span class="source-line-no">133</span><span id="line-133"> }</span>
<span class="source-line-no">134</span><span id="line-134"></span>
<span class="source-line-no">135</span><span id="line-135"> @Test</span>
<span class="source-line-no">136</span><span id="line-136"> public void doIncrementalLoadTest() throws Exception {</span>
<span class="source-line-no">137</span><span id="line-137"> boolean writeMultipleTables = tableStr.size() &gt; 1;</span>
<span class="source-line-no">138</span><span id="line-138"> // Generate the bulk load files</span>
<span class="source-line-no">139</span><span id="line-139"> runIncrementalPELoad(UTIL.getConfiguration(), tableInfo, testDir, putSortReducer);</span>
<span class="source-line-no">140</span><span id="line-140"> if (writeMultipleTables) {</span>
<span class="source-line-no">141</span><span id="line-141"> testDir = new Path(testDir, "default");</span>
<span class="source-line-no">142</span><span id="line-142"> }</span>
<span class="source-line-no">143</span><span id="line-143"></span>
<span class="source-line-no">144</span><span id="line-144"> for (Table tableSingle : allTables.values()) {</span>
<span class="source-line-no">145</span><span id="line-145"> // This doesn't write into the table, just makes files</span>
<span class="source-line-no">146</span><span id="line-146"> assertEquals("HFOF should not touch actual table", 0,</span>
<span class="source-line-no">147</span><span id="line-147"> HBaseTestingUtil.countRows(tableSingle));</span>
<span class="source-line-no">148</span><span id="line-148"> }</span>
<span class="source-line-no">149</span><span id="line-149"> int numTableDirs = 0;</span>
<span class="source-line-no">150</span><span id="line-150"> FileStatus[] fss = testDir.getFileSystem(UTIL.getConfiguration()).listStatus(testDir);</span>
<span class="source-line-no">151</span><span id="line-151"> for (FileStatus tf : fss) {</span>
<span class="source-line-no">152</span><span id="line-152"> Path tablePath = testDir;</span>
<span class="source-line-no">153</span><span id="line-153"> if (writeMultipleTables) {</span>
<span class="source-line-no">154</span><span id="line-154"> if (allTables.containsKey(tf.getPath().getName())) {</span>
<span class="source-line-no">155</span><span id="line-155"> ++numTableDirs;</span>
<span class="source-line-no">156</span><span id="line-156"> tablePath = tf.getPath();</span>
<span class="source-line-no">157</span><span id="line-157"> } else {</span>
<span class="source-line-no">158</span><span id="line-158"> continue;</span>
<span class="source-line-no">159</span><span id="line-159"> }</span>
<span class="source-line-no">160</span><span id="line-160"> }</span>
<span class="source-line-no">161</span><span id="line-161"></span>
<span class="source-line-no">162</span><span id="line-162"> // Make sure that a directory was created for every CF</span>
<span class="source-line-no">163</span><span id="line-163"> int dir = 0;</span>
<span class="source-line-no">164</span><span id="line-164"> fss = tablePath.getFileSystem(UTIL.getConfiguration()).listStatus(tablePath);</span>
<span class="source-line-no">165</span><span id="line-165"> for (FileStatus f : fss) {</span>
<span class="source-line-no">166</span><span id="line-166"> for (byte[] family : FAMILIES) {</span>
<span class="source-line-no">167</span><span id="line-167"> if (Bytes.toString(family).equals(f.getPath().getName())) {</span>
<span class="source-line-no">168</span><span id="line-168"> ++dir;</span>
<span class="source-line-no">169</span><span id="line-169"> }</span>
<span class="source-line-no">170</span><span id="line-170"> }</span>
<span class="source-line-no">171</span><span id="line-171"> }</span>
<span class="source-line-no">172</span><span id="line-172"> assertEquals("Column family not found in FS.", FAMILIES.length, dir);</span>
<span class="source-line-no">173</span><span id="line-173"> }</span>
<span class="source-line-no">174</span><span id="line-174"> if (writeMultipleTables) {</span>
<span class="source-line-no">175</span><span id="line-175"> assertEquals("Dir for all input tables not created", numTableDirs, allTables.size());</span>
<span class="source-line-no">176</span><span id="line-176"> }</span>
<span class="source-line-no">177</span><span id="line-177"></span>
<span class="source-line-no">178</span><span id="line-178"> Admin admin = UTIL.getAdmin();</span>
<span class="source-line-no">179</span><span id="line-179"></span>
<span class="source-line-no">180</span><span id="line-180"> // handle the split case</span>
<span class="source-line-no">181</span><span id="line-181"> if (shouldChangeRegions) {</span>
<span class="source-line-no">182</span><span id="line-182"> Table chosenTable = allTables.values().iterator().next();</span>
<span class="source-line-no">183</span><span id="line-183"> // Choose a semi-random table if multiple tables are available</span>
<span class="source-line-no">184</span><span id="line-184"> LOG.info("Changing regions in table " + chosenTable.getName().getNameAsString());</span>
<span class="source-line-no">185</span><span id="line-185"> admin.disableTable(chosenTable.getName());</span>
<span class="source-line-no">186</span><span id="line-186"> UTIL.waitUntilNoRegionsInTransition();</span>
<span class="source-line-no">187</span><span id="line-187"></span>
<span class="source-line-no">188</span><span id="line-188"> UTIL.deleteTable(chosenTable.getName());</span>
<span class="source-line-no">189</span><span id="line-189"> byte[][] newSplitKeys = generateRandomSplitKeys(14);</span>
<span class="source-line-no">190</span><span id="line-190"> UTIL.createTable(chosenTable.getName(), FAMILIES, newSplitKeys);</span>
<span class="source-line-no">191</span><span id="line-191"> UTIL.waitTableAvailable(chosenTable.getName());</span>
<span class="source-line-no">192</span><span id="line-192"> }</span>
<span class="source-line-no">193</span><span id="line-193"></span>
<span class="source-line-no">194</span><span id="line-194"> // Perform the actual load</span>
<span class="source-line-no">195</span><span id="line-195"> for (HFileOutputFormat2.TableInfo singleTableInfo : tableInfo) {</span>
<span class="source-line-no">196</span><span id="line-196"> Path tableDir = testDir;</span>
<span class="source-line-no">197</span><span id="line-197"> String tableNameStr = singleTableInfo.getTableDescriptor().getTableName().getNameAsString();</span>
<span class="source-line-no">198</span><span id="line-198"> LOG.info("Running BulkLoadHFiles on table" + tableNameStr);</span>
<span class="source-line-no">199</span><span id="line-199"> if (writeMultipleTables) {</span>
<span class="source-line-no">200</span><span id="line-200"> tableDir = new Path(testDir, tableNameStr);</span>
<span class="source-line-no">201</span><span id="line-201"> }</span>
<span class="source-line-no">202</span><span id="line-202"> Table currentTable = allTables.get(tableNameStr);</span>
<span class="source-line-no">203</span><span id="line-203"> TableName currentTableName = currentTable.getName();</span>
<span class="source-line-no">204</span><span id="line-204"> BulkLoadHFiles.create(UTIL.getConfiguration()).bulkLoad(currentTableName, tableDir);</span>
<span class="source-line-no">205</span><span id="line-205"></span>
<span class="source-line-no">206</span><span id="line-206"> // Ensure data shows up</span>
<span class="source-line-no">207</span><span id="line-207"> int expectedRows = 0;</span>
<span class="source-line-no">208</span><span id="line-208"> if (putSortReducer) {</span>
<span class="source-line-no">209</span><span id="line-209"> // no rows should be extracted</span>
<span class="source-line-no">210</span><span id="line-210"> assertEquals("BulkLoadHFiles should put expected data in table", expectedRows,</span>
<span class="source-line-no">211</span><span id="line-211"> HBaseTestingUtil.countRows(currentTable));</span>
<span class="source-line-no">212</span><span id="line-212"> } else {</span>
<span class="source-line-no">213</span><span id="line-213"> expectedRows = NMapInputFormat.getNumMapTasks(UTIL.getConfiguration()) * ROWSPERSPLIT;</span>
<span class="source-line-no">214</span><span id="line-214"> assertEquals("BulkLoadHFiles should put expected data in table", expectedRows,</span>
<span class="source-line-no">215</span><span id="line-215"> HBaseTestingUtil.countRows(currentTable));</span>
<span class="source-line-no">216</span><span id="line-216"> Scan scan = new Scan();</span>
<span class="source-line-no">217</span><span id="line-217"> ResultScanner results = currentTable.getScanner(scan);</span>
<span class="source-line-no">218</span><span id="line-218"> for (Result res : results) {</span>
<span class="source-line-no">219</span><span id="line-219"> assertEquals(FAMILIES.length, res.rawCells().length);</span>
<span class="source-line-no">220</span><span id="line-220"> Cell first = res.rawCells()[0];</span>
<span class="source-line-no">221</span><span id="line-221"> for (Cell kv : res.rawCells()) {</span>
<span class="source-line-no">222</span><span id="line-222"> assertTrue(CellUtil.matchingRows(first, kv));</span>
<span class="source-line-no">223</span><span id="line-223"> assertTrue(Bytes.equals(CellUtil.cloneValue(first), CellUtil.cloneValue(kv)));</span>
<span class="source-line-no">224</span><span id="line-224"> }</span>
<span class="source-line-no">225</span><span id="line-225"> }</span>
<span class="source-line-no">226</span><span id="line-226"> results.close();</span>
<span class="source-line-no">227</span><span id="line-227"> }</span>
<span class="source-line-no">228</span><span id="line-228"> String tableDigestBefore = UTIL.checksumRows(currentTable);</span>
<span class="source-line-no">229</span><span id="line-229"> // Check region locality</span>
<span class="source-line-no">230</span><span id="line-230"> HDFSBlocksDistribution hbd = new HDFSBlocksDistribution();</span>
<span class="source-line-no">231</span><span id="line-231"> for (HRegion region : UTIL.getHBaseCluster().getRegions(currentTableName)) {</span>
<span class="source-line-no">232</span><span id="line-232"> hbd.add(region.getHDFSBlocksDistribution());</span>
<span class="source-line-no">233</span><span id="line-233"> }</span>
<span class="source-line-no">234</span><span id="line-234"> for (String hostname : HOSTNAMES) {</span>
<span class="source-line-no">235</span><span id="line-235"> float locality = hbd.getBlockLocalityIndex(hostname);</span>
<span class="source-line-no">236</span><span id="line-236"> LOG.info("locality of [" + hostname + "]: " + locality);</span>
<span class="source-line-no">237</span><span id="line-237"> assertEquals(100, (int) (locality * 100));</span>
<span class="source-line-no">238</span><span id="line-238"> }</span>
<span class="source-line-no">239</span><span id="line-239"></span>
<span class="source-line-no">240</span><span id="line-240"> // Cause regions to reopen</span>
<span class="source-line-no">241</span><span id="line-241"> admin.disableTable(currentTableName);</span>
<span class="source-line-no">242</span><span id="line-242"> while (!admin.isTableDisabled(currentTableName)) {</span>
<span class="source-line-no">243</span><span id="line-243"> Thread.sleep(200);</span>
<span class="source-line-no">244</span><span id="line-244"> LOG.info("Waiting for table to disable");</span>
<span class="source-line-no">245</span><span id="line-245"> }</span>
<span class="source-line-no">246</span><span id="line-246"> admin.enableTable(currentTableName);</span>
<span class="source-line-no">247</span><span id="line-247"> UTIL.waitTableAvailable(currentTableName);</span>
<span class="source-line-no">248</span><span id="line-248"> assertEquals("Data should remain after reopening of regions", tableDigestBefore,</span>
<span class="source-line-no">249</span><span id="line-249"> UTIL.checksumRows(currentTable));</span>
<span class="source-line-no">250</span><span id="line-250"> }</span>
<span class="source-line-no">251</span><span id="line-251"> }</span>
<span class="source-line-no">252</span><span id="line-252">}</span>
</pre>
</div>
</main>
</body>
</html>