| <!DOCTYPE HTML> |
| <html lang="en"> |
| <head> |
| <!-- Generated by javadoc (17) --> |
| <title>Source code</title> |
| <meta name="viewport" content="width=device-width, initial-scale=1"> |
| <meta name="description" content="source: package: org.apache.hadoop.hbase.fs, class: TestBlockReorderBlockLocation"> |
| <meta name="generator" content="javadoc/SourceToHTMLConverter"> |
| <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> |
| </head> |
| <body class="source-page"> |
| <main role="main"> |
| <div class="source-container"> |
| <pre><span class="source-line-no">001</span><span id="line-1">/*</span> |
| <span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span> |
| <span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span> |
| <span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span> |
| <span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span> |
| <span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span> |
| <span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span> |
| <span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span> |
| <span class="source-line-no">009</span><span id="line-9"> *</span> |
| <span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span> |
| <span class="source-line-no">011</span><span id="line-11"> *</span> |
| <span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span> |
| <span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span> |
| <span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span> |
| <span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span> |
| <span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span> |
| <span class="source-line-no">017</span><span id="line-17"> */</span> |
| <span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.fs;</span> |
| <span class="source-line-no">019</span><span id="line-19"></span> |
| <span class="source-line-no">020</span><span id="line-20">import static org.apache.hadoop.hbase.util.LocatedBlockHelper.getLocatedBlockLocations;</span> |
| <span class="source-line-no">021</span><span id="line-21"></span> |
| <span class="source-line-no">022</span><span id="line-22">import java.lang.reflect.Field;</span> |
| <span class="source-line-no">023</span><span id="line-23">import org.apache.hadoop.conf.Configuration;</span> |
| <span class="source-line-no">024</span><span id="line-24">import org.apache.hadoop.fs.FSDataOutputStream;</span> |
| <span class="source-line-no">025</span><span id="line-25">import org.apache.hadoop.fs.FileSystem;</span> |
| <span class="source-line-no">026</span><span id="line-26">import org.apache.hadoop.fs.Path;</span> |
| <span class="source-line-no">027</span><span id="line-27">import org.apache.hadoop.hbase.HBaseClassTestRule;</span> |
| <span class="source-line-no">028</span><span id="line-28">import org.apache.hadoop.hbase.HBaseTestingUtil;</span> |
| <span class="source-line-no">029</span><span id="line-29">import org.apache.hadoop.hbase.HConstants;</span> |
| <span class="source-line-no">030</span><span id="line-30">import org.apache.hadoop.hbase.SingleProcessHBaseCluster;</span> |
| <span class="source-line-no">031</span><span id="line-31">import org.apache.hadoop.hbase.testclassification.LargeTests;</span> |
| <span class="source-line-no">032</span><span id="line-32">import org.apache.hadoop.hbase.testclassification.MiscTests;</span> |
| <span class="source-line-no">033</span><span id="line-33">import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;</span> |
| <span class="source-line-no">034</span><span id="line-34">import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;</span> |
| <span class="source-line-no">035</span><span id="line-35">import org.apache.hadoop.hdfs.DFSClient;</span> |
| <span class="source-line-no">036</span><span id="line-36">import org.apache.hadoop.hdfs.DistributedFileSystem;</span> |
| <span class="source-line-no">037</span><span id="line-37">import org.apache.hadoop.hdfs.MiniDFSCluster;</span> |
| <span class="source-line-no">038</span><span id="line-38">import org.apache.hadoop.hdfs.protocol.ClientProtocol;</span> |
| <span class="source-line-no">039</span><span id="line-39">import org.apache.hadoop.hdfs.protocol.LocatedBlocks;</span> |
| <span class="source-line-no">040</span><span id="line-40">import org.junit.After;</span> |
| <span class="source-line-no">041</span><span id="line-41">import org.junit.Assert;</span> |
| <span class="source-line-no">042</span><span id="line-42">import org.junit.Before;</span> |
| <span class="source-line-no">043</span><span id="line-43">import org.junit.ClassRule;</span> |
| <span class="source-line-no">044</span><span id="line-44">import org.junit.Rule;</span> |
| <span class="source-line-no">045</span><span id="line-45">import org.junit.Test;</span> |
| <span class="source-line-no">046</span><span id="line-46">import org.junit.experimental.categories.Category;</span> |
| <span class="source-line-no">047</span><span id="line-47">import org.junit.rules.TestName;</span> |
| <span class="source-line-no">048</span><span id="line-48"></span> |
| <span class="source-line-no">049</span><span id="line-49">/**</span> |
| <span class="source-line-no">050</span><span id="line-50"> * Tests for the hdfs fix from HBASE-6435. Please don't add new subtest which involves starting /</span> |
| <span class="source-line-no">051</span><span id="line-51"> * stopping MiniDFSCluster in this class. When stopping MiniDFSCluster, shutdown hooks would be</span> |
| <span class="source-line-no">052</span><span id="line-52"> * cleared in hadoop's ShutdownHookManager in hadoop 3. This leads to 'Failed suppression of fs</span> |
| <span class="source-line-no">053</span><span id="line-53"> * shutdown hook' error in region server.</span> |
| <span class="source-line-no">054</span><span id="line-54"> */</span> |
| <span class="source-line-no">055</span><span id="line-55">@Category({ MiscTests.class, LargeTests.class })</span> |
| <span class="source-line-no">056</span><span id="line-56">public class TestBlockReorderBlockLocation {</span> |
| <span class="source-line-no">057</span><span id="line-57"></span> |
| <span class="source-line-no">058</span><span id="line-58"> @ClassRule</span> |
| <span class="source-line-no">059</span><span id="line-59"> public static final HBaseClassTestRule CLASS_RULE =</span> |
| <span class="source-line-no">060</span><span id="line-60"> HBaseClassTestRule.forClass(TestBlockReorderBlockLocation.class);</span> |
| <span class="source-line-no">061</span><span id="line-61"></span> |
| <span class="source-line-no">062</span><span id="line-62"> private Configuration conf;</span> |
| <span class="source-line-no">063</span><span id="line-63"> private MiniDFSCluster cluster;</span> |
| <span class="source-line-no">064</span><span id="line-64"> private HBaseTestingUtil htu;</span> |
| <span class="source-line-no">065</span><span id="line-65"> private DistributedFileSystem dfs;</span> |
| <span class="source-line-no">066</span><span id="line-66"> private static final String host1 = "host1";</span> |
| <span class="source-line-no">067</span><span id="line-67"> private static final String host2 = "host2";</span> |
| <span class="source-line-no">068</span><span id="line-68"> private static final String host3 = "host3";</span> |
| <span class="source-line-no">069</span><span id="line-69"></span> |
| <span class="source-line-no">070</span><span id="line-70"> @Rule</span> |
| <span class="source-line-no">071</span><span id="line-71"> public TestName name = new TestName();</span> |
| <span class="source-line-no">072</span><span id="line-72"></span> |
| <span class="source-line-no">073</span><span id="line-73"> @Before</span> |
| <span class="source-line-no">074</span><span id="line-74"> public void setUp() throws Exception {</span> |
| <span class="source-line-no">075</span><span id="line-75"> htu = new HBaseTestingUtil();</span> |
| <span class="source-line-no">076</span><span id="line-76"> htu.getConfiguration().setInt("dfs.blocksize", 1024);// For the test with multiple blocks</span> |
| <span class="source-line-no">077</span><span id="line-77"> htu.getConfiguration().setInt("dfs.replication", 3);</span> |
| <span class="source-line-no">078</span><span id="line-78"> htu.startMiniDFSCluster(3, new String[] { "/r1", "/r2", "/r3" },</span> |
| <span class="source-line-no">079</span><span id="line-79"> new String[] { host1, host2, host3 });</span> |
| <span class="source-line-no">080</span><span id="line-80"></span> |
| <span class="source-line-no">081</span><span id="line-81"> conf = htu.getConfiguration();</span> |
| <span class="source-line-no">082</span><span id="line-82"> cluster = htu.getDFSCluster();</span> |
| <span class="source-line-no">083</span><span id="line-83"> dfs = (DistributedFileSystem) FileSystem.get(conf);</span> |
| <span class="source-line-no">084</span><span id="line-84"> }</span> |
| <span class="source-line-no">085</span><span id="line-85"></span> |
| <span class="source-line-no">086</span><span id="line-86"> @After</span> |
| <span class="source-line-no">087</span><span id="line-87"> public void tearDownAfterClass() throws Exception {</span> |
| <span class="source-line-no">088</span><span id="line-88"> htu.shutdownMiniCluster();</span> |
| <span class="source-line-no">089</span><span id="line-89"> }</span> |
| <span class="source-line-no">090</span><span id="line-90"></span> |
| <span class="source-line-no">091</span><span id="line-91"> private static ClientProtocol getNamenode(DFSClient dfsc) throws Exception {</span> |
| <span class="source-line-no">092</span><span id="line-92"> Field nf = DFSClient.class.getDeclaredField("namenode");</span> |
| <span class="source-line-no">093</span><span id="line-93"> nf.setAccessible(true);</span> |
| <span class="source-line-no">094</span><span id="line-94"> return (ClientProtocol) nf.get(dfsc);</span> |
| <span class="source-line-no">095</span><span id="line-95"> }</span> |
| <span class="source-line-no">096</span><span id="line-96"></span> |
| <span class="source-line-no">097</span><span id="line-97"> /**</span> |
| <span class="source-line-no">098</span><span id="line-98"> * Test that the reorder algo works as we expect.</span> |
| <span class="source-line-no">099</span><span id="line-99"> */</span> |
| <span class="source-line-no">100</span><span id="line-100"> @Test</span> |
| <span class="source-line-no">101</span><span id="line-101"> public void testBlockLocation() throws Exception {</span> |
| <span class="source-line-no">102</span><span id="line-102"> // We need to start HBase to get HConstants.HBASE_DIR set in conf</span> |
| <span class="source-line-no">103</span><span id="line-103"> htu.startMiniZKCluster();</span> |
| <span class="source-line-no">104</span><span id="line-104"> SingleProcessHBaseCluster hbm = htu.startMiniHBaseCluster();</span> |
| <span class="source-line-no">105</span><span id="line-105"> conf = hbm.getConfiguration();</span> |
| <span class="source-line-no">106</span><span id="line-106"></span> |
| <span class="source-line-no">107</span><span id="line-107"> // The "/" is mandatory, without it we've got a null pointer exception on the namenode</span> |
| <span class="source-line-no">108</span><span id="line-108"> final String fileName = "/helloWorld";</span> |
| <span class="source-line-no">109</span><span id="line-109"> Path p = new Path(fileName);</span> |
| <span class="source-line-no">110</span><span id="line-110"></span> |
| <span class="source-line-no">111</span><span id="line-111"> final int repCount = 3;</span> |
| <span class="source-line-no">112</span><span id="line-112"> Assert.assertTrue((short) cluster.getDataNodes().size() >= repCount);</span> |
| <span class="source-line-no">113</span><span id="line-113"></span> |
| <span class="source-line-no">114</span><span id="line-114"> // Let's write the file</span> |
| <span class="source-line-no">115</span><span id="line-115"> FSDataOutputStream fop = dfs.create(p, (short) repCount);</span> |
| <span class="source-line-no">116</span><span id="line-116"> final double toWrite = 875.5613;</span> |
| <span class="source-line-no">117</span><span id="line-117"> fop.writeDouble(toWrite);</span> |
| <span class="source-line-no">118</span><span id="line-118"> fop.close();</span> |
| <span class="source-line-no">119</span><span id="line-119"></span> |
| <span class="source-line-no">120</span><span id="line-120"> for (int i = 0; i < 10; i++) {</span> |
| <span class="source-line-no">121</span><span id="line-121"> // The interceptor is not set in this test, so we get the raw list at this point</span> |
| <span class="source-line-no">122</span><span id="line-122"> LocatedBlocks lbs;</span> |
| <span class="source-line-no">123</span><span id="line-123"> final long max = EnvironmentEdgeManager.currentTime() + 10000;</span> |
| <span class="source-line-no">124</span><span id="line-124"> do {</span> |
| <span class="source-line-no">125</span><span id="line-125"> lbs = getNamenode(dfs.getClient()).getBlockLocations(fileName, 0, 1);</span> |
| <span class="source-line-no">126</span><span id="line-126"> Assert.assertNotNull(lbs.getLocatedBlocks());</span> |
| <span class="source-line-no">127</span><span id="line-127"> Assert.assertEquals(1, lbs.getLocatedBlocks().size());</span> |
| <span class="source-line-no">128</span><span id="line-128"> Assert.assertTrue(</span> |
| <span class="source-line-no">129</span><span id="line-129"> "Expecting " + repCount + " , got " + getLocatedBlockLocations(lbs.get(0)).length,</span> |
| <span class="source-line-no">130</span><span id="line-130"> EnvironmentEdgeManager.currentTime() < max);</span> |
| <span class="source-line-no">131</span><span id="line-131"> } while (getLocatedBlockLocations(lbs.get(0)).length != repCount);</span> |
| <span class="source-line-no">132</span><span id="line-132"></span> |
| <span class="source-line-no">133</span><span id="line-133"> // Should be filtered, the name is different => The order won't change</span> |
| <span class="source-line-no">134</span><span id="line-134"> Object[] originalList = lbs.getLocatedBlocks().toArray();</span> |
| <span class="source-line-no">135</span><span id="line-135"> HFileSystem.ReorderWALBlocks lrb = new HFileSystem.ReorderWALBlocks();</span> |
| <span class="source-line-no">136</span><span id="line-136"> lrb.reorderBlocks(conf, lbs, fileName);</span> |
| <span class="source-line-no">137</span><span id="line-137"> Assert.assertArrayEquals(originalList, lbs.getLocatedBlocks().toArray());</span> |
| <span class="source-line-no">138</span><span id="line-138"></span> |
| <span class="source-line-no">139</span><span id="line-139"> // Should be reordered, as we pretend to be a file name with a compliant stuff</span> |
| <span class="source-line-no">140</span><span id="line-140"> Assert.assertNotNull(conf.get(HConstants.HBASE_DIR));</span> |
| <span class="source-line-no">141</span><span id="line-141"> Assert.assertFalse(conf.get(HConstants.HBASE_DIR).isEmpty());</span> |
| <span class="source-line-no">142</span><span id="line-142"> String pseudoLogFile = conf.get(HConstants.HBASE_DIR) + "/" + HConstants.HREGION_LOGDIR_NAME</span> |
| <span class="source-line-no">143</span><span id="line-143"> + "/" + host1 + ",6977,6576" + "/mylogfile";</span> |
| <span class="source-line-no">144</span><span id="line-144"></span> |
| <span class="source-line-no">145</span><span id="line-145"> // Check that it will be possible to extract a ServerName from our construction</span> |
| <span class="source-line-no">146</span><span id="line-146"> Assert.assertNotNull("log= " + pseudoLogFile,</span> |
| <span class="source-line-no">147</span><span id="line-147"> AbstractFSWALProvider.getServerNameFromWALDirectoryName(dfs.getConf(), pseudoLogFile));</span> |
| <span class="source-line-no">148</span><span id="line-148"></span> |
| <span class="source-line-no">149</span><span id="line-149"> // And check we're doing the right reorder.</span> |
| <span class="source-line-no">150</span><span id="line-150"> lrb.reorderBlocks(conf, lbs, pseudoLogFile);</span> |
| <span class="source-line-no">151</span><span id="line-151"> Assert.assertEquals(host1, getLocatedBlockLocations(lbs.get(0))[2].getHostName());</span> |
| <span class="source-line-no">152</span><span id="line-152"></span> |
| <span class="source-line-no">153</span><span id="line-153"> // Check again, it should remain the same.</span> |
| <span class="source-line-no">154</span><span id="line-154"> lrb.reorderBlocks(conf, lbs, pseudoLogFile);</span> |
| <span class="source-line-no">155</span><span id="line-155"> Assert.assertEquals(host1, getLocatedBlockLocations(lbs.get(0))[2].getHostName());</span> |
| <span class="source-line-no">156</span><span id="line-156"> }</span> |
| <span class="source-line-no">157</span><span id="line-157"> }</span> |
| <span class="source-line-no">158</span><span id="line-158"></span> |
| <span class="source-line-no">159</span><span id="line-159">}</span> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| </pre> |
| </div> |
| </main> |
| </body> |
| </html> |