| <!DOCTYPE HTML> |
| <html lang="en"> |
| <head> |
| <!-- Generated by javadoc (17) --> |
| <title>Source code</title> |
| <meta name="viewport" content="width=device-width, initial-scale=1"> |
| <meta name="description" content="source: package: org.apache.hadoop.hbase.io.hfile, class: TestPrefetchWithBucketCache"> |
| <meta name="generator" content="javadoc/SourceToHTMLConverter"> |
| <link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style"> |
| </head> |
| <body class="source-page"> |
| <main role="main"> |
| <div class="source-container"> |
| <pre><span class="source-line-no">001</span><span id="line-1">/*</span> |
| <span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span> |
| <span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span> |
| <span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span> |
| <span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span> |
| <span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span> |
| <span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span> |
| <span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span> |
| <span class="source-line-no">009</span><span id="line-9"> *</span> |
| <span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span> |
| <span class="source-line-no">011</span><span id="line-11"> *</span> |
| <span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span> |
| <span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span> |
| <span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span> |
| <span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span> |
| <span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span> |
| <span class="source-line-no">017</span><span id="line-17"> */</span> |
| <span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.io.hfile;</span> |
| <span class="source-line-no">019</span><span id="line-19"></span> |
| <span class="source-line-no">020</span><span id="line-20">import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY;</span> |
| <span class="source-line-no">021</span><span id="line-21">import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY;</span> |
| <span class="source-line-no">022</span><span id="line-22">import static org.apache.hadoop.hbase.io.hfile.BlockCacheFactory.BUCKET_CACHE_BUCKETS_KEY;</span> |
| <span class="source-line-no">023</span><span id="line-23">import static org.junit.Assert.assertEquals;</span> |
| <span class="source-line-no">024</span><span id="line-24">import static org.junit.Assert.assertNotNull;</span> |
| <span class="source-line-no">025</span><span id="line-25">import static org.junit.Assert.assertNull;</span> |
| <span class="source-line-no">026</span><span id="line-26">import static org.junit.Assert.assertTrue;</span> |
| <span class="source-line-no">027</span><span id="line-27">import static org.junit.Assert.fail;</span> |
| <span class="source-line-no">028</span><span id="line-28"></span> |
| <span class="source-line-no">029</span><span id="line-29">import java.io.File;</span> |
| <span class="source-line-no">030</span><span id="line-30">import java.io.IOException;</span> |
| <span class="source-line-no">031</span><span id="line-31">import java.util.Map;</span> |
| <span class="source-line-no">032</span><span id="line-32">import java.util.Random;</span> |
| <span class="source-line-no">033</span><span id="line-33">import java.util.concurrent.ThreadLocalRandom;</span> |
| <span class="source-line-no">034</span><span id="line-34">import java.util.function.BiConsumer;</span> |
| <span class="source-line-no">035</span><span id="line-35">import java.util.function.BiFunction;</span> |
| <span class="source-line-no">036</span><span id="line-36">import org.apache.commons.lang3.mutable.MutableLong;</span> |
| <span class="source-line-no">037</span><span id="line-37">import org.apache.hadoop.conf.Configuration;</span> |
| <span class="source-line-no">038</span><span id="line-38">import org.apache.hadoop.fs.FileSystem;</span> |
| <span class="source-line-no">039</span><span id="line-39">import org.apache.hadoop.fs.Path;</span> |
| <span class="source-line-no">040</span><span id="line-40">import org.apache.hadoop.hbase.HBaseClassTestRule;</span> |
| <span class="source-line-no">041</span><span id="line-41">import org.apache.hadoop.hbase.HBaseTestingUtil;</span> |
| <span class="source-line-no">042</span><span id="line-42">import org.apache.hadoop.hbase.KeyValue;</span> |
| <span class="source-line-no">043</span><span id="line-43">import org.apache.hadoop.hbase.TableName;</span> |
| <span class="source-line-no">044</span><span id="line-44">import org.apache.hadoop.hbase.Waiter;</span> |
| <span class="source-line-no">045</span><span id="line-45">import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;</span> |
| <span class="source-line-no">046</span><span id="line-46">import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;</span> |
| <span class="source-line-no">047</span><span id="line-47">import org.apache.hadoop.hbase.client.RegionInfo;</span> |
| <span class="source-line-no">048</span><span id="line-48">import org.apache.hadoop.hbase.client.RegionInfoBuilder;</span> |
| <span class="source-line-no">049</span><span id="line-49">import org.apache.hadoop.hbase.fs.HFileSystem;</span> |
| <span class="source-line-no">050</span><span id="line-50">import org.apache.hadoop.hbase.io.ByteBuffAllocator;</span> |
| <span class="source-line-no">051</span><span id="line-51">import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;</span> |
| <span class="source-line-no">052</span><span id="line-52">import org.apache.hadoop.hbase.io.hfile.bucket.BucketEntry;</span> |
| <span class="source-line-no">053</span><span id="line-53">import org.apache.hadoop.hbase.regionserver.BloomType;</span> |
| <span class="source-line-no">054</span><span id="line-54">import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;</span> |
| <span class="source-line-no">055</span><span id="line-55">import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;</span> |
| <span class="source-line-no">056</span><span id="line-56">import org.apache.hadoop.hbase.regionserver.HStoreFile;</span> |
| <span class="source-line-no">057</span><span id="line-57">import org.apache.hadoop.hbase.regionserver.StoreFileWriter;</span> |
| <span class="source-line-no">058</span><span id="line-58">import org.apache.hadoop.hbase.testclassification.IOTests;</span> |
| <span class="source-line-no">059</span><span id="line-59">import org.apache.hadoop.hbase.testclassification.MediumTests;</span> |
| <span class="source-line-no">060</span><span id="line-60">import org.apache.hadoop.hbase.util.Bytes;</span> |
| <span class="source-line-no">061</span><span id="line-61">import org.junit.After;</span> |
| <span class="source-line-no">062</span><span id="line-62">import org.junit.Before;</span> |
| <span class="source-line-no">063</span><span id="line-63">import org.junit.ClassRule;</span> |
| <span class="source-line-no">064</span><span id="line-64">import org.junit.Rule;</span> |
| <span class="source-line-no">065</span><span id="line-65">import org.junit.Test;</span> |
| <span class="source-line-no">066</span><span id="line-66">import org.junit.experimental.categories.Category;</span> |
| <span class="source-line-no">067</span><span id="line-67">import org.junit.rules.TestName;</span> |
| <span class="source-line-no">068</span><span id="line-68">import org.slf4j.Logger;</span> |
| <span class="source-line-no">069</span><span id="line-69">import org.slf4j.LoggerFactory;</span> |
| <span class="source-line-no">070</span><span id="line-70"></span> |
| <span class="source-line-no">071</span><span id="line-71">import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;</span> |
| <span class="source-line-no">072</span><span id="line-72"></span> |
| <span class="source-line-no">073</span><span id="line-73">@Category({ IOTests.class, MediumTests.class })</span> |
| <span class="source-line-no">074</span><span id="line-74">public class TestPrefetchWithBucketCache {</span> |
| <span class="source-line-no">075</span><span id="line-75"></span> |
| <span class="source-line-no">076</span><span id="line-76"> private static final Logger LOG = LoggerFactory.getLogger(TestPrefetchWithBucketCache.class);</span> |
| <span class="source-line-no">077</span><span id="line-77"></span> |
| <span class="source-line-no">078</span><span id="line-78"> @ClassRule</span> |
| <span class="source-line-no">079</span><span id="line-79"> public static final HBaseClassTestRule CLASS_RULE =</span> |
| <span class="source-line-no">080</span><span id="line-80"> HBaseClassTestRule.forClass(TestPrefetchWithBucketCache.class);</span> |
| <span class="source-line-no">081</span><span id="line-81"></span> |
| <span class="source-line-no">082</span><span id="line-82"> @Rule</span> |
| <span class="source-line-no">083</span><span id="line-83"> public TestName name = new TestName();</span> |
| <span class="source-line-no">084</span><span id="line-84"></span> |
| <span class="source-line-no">085</span><span id="line-85"> private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();</span> |
| <span class="source-line-no">086</span><span id="line-86"></span> |
| <span class="source-line-no">087</span><span id="line-87"> private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;</span> |
| <span class="source-line-no">088</span><span id="line-88"> private static final int DATA_BLOCK_SIZE = 2048;</span> |
| <span class="source-line-no">089</span><span id="line-89"> private Configuration conf;</span> |
| <span class="source-line-no">090</span><span id="line-90"> private CacheConfig cacheConf;</span> |
| <span class="source-line-no">091</span><span id="line-91"> private FileSystem fs;</span> |
| <span class="source-line-no">092</span><span id="line-92"> private BlockCache blockCache;</span> |
| <span class="source-line-no">093</span><span id="line-93"></span> |
| <span class="source-line-no">094</span><span id="line-94"> @Before</span> |
| <span class="source-line-no">095</span><span id="line-95"> public void setUp() throws IOException {</span> |
| <span class="source-line-no">096</span><span id="line-96"> conf = TEST_UTIL.getConfiguration();</span> |
| <span class="source-line-no">097</span><span id="line-97"> conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);</span> |
| <span class="source-line-no">098</span><span id="line-98"> fs = HFileSystem.get(conf);</span> |
| <span class="source-line-no">099</span><span id="line-99"> File testDir = new File(name.getMethodName());</span> |
| <span class="source-line-no">100</span><span id="line-100"> testDir.mkdir();</span> |
| <span class="source-line-no">101</span><span id="line-101"> conf.set(BUCKET_CACHE_IOENGINE_KEY, "file:/" + testDir.getAbsolutePath() + "/bucket.cache");</span> |
| <span class="source-line-no">102</span><span id="line-102"> }</span> |
| <span class="source-line-no">103</span><span id="line-103"></span> |
| <span class="source-line-no">104</span><span id="line-104"> @After</span> |
| <span class="source-line-no">105</span><span id="line-105"> public void tearDown() {</span> |
| <span class="source-line-no">106</span><span id="line-106"> File cacheFile = new File(name.getMethodName() + "/bucket.cache");</span> |
| <span class="source-line-no">107</span><span id="line-107"> File dir = new File(name.getMethodName());</span> |
| <span class="source-line-no">108</span><span id="line-108"> cacheFile.delete();</span> |
| <span class="source-line-no">109</span><span id="line-109"> dir.delete();</span> |
| <span class="source-line-no">110</span><span id="line-110"> }</span> |
| <span class="source-line-no">111</span><span id="line-111"></span> |
| <span class="source-line-no">112</span><span id="line-112"> @Test</span> |
| <span class="source-line-no">113</span><span id="line-113"> public void testPrefetchDoesntOverwork() throws Exception {</span> |
| <span class="source-line-no">114</span><span id="line-114"> conf.setLong(BUCKET_CACHE_SIZE_KEY, 200);</span> |
| <span class="source-line-no">115</span><span id="line-115"> blockCache = BlockCacheFactory.createBlockCache(conf);</span> |
| <span class="source-line-no">116</span><span id="line-116"> cacheConf = new CacheConfig(conf, blockCache);</span> |
| <span class="source-line-no">117</span><span id="line-117"> Path storeFile = writeStoreFile("TestPrefetchDoesntOverwork", 100);</span> |
| <span class="source-line-no">118</span><span id="line-118"> // Prefetches the file blocks</span> |
| <span class="source-line-no">119</span><span id="line-119"> LOG.debug("First read should prefetch the blocks.");</span> |
| <span class="source-line-no">120</span><span id="line-120"> readStoreFile(storeFile);</span> |
| <span class="source-line-no">121</span><span id="line-121"> BucketCache bc = BucketCache.getBucketCacheFromCacheConfig(cacheConf).get();</span> |
| <span class="source-line-no">122</span><span id="line-122"> // Our file should have 6 DATA blocks. We should wait for all of them to be cached</span> |
| <span class="source-line-no">123</span><span id="line-123"> Waiter.waitFor(conf, 300, () -> bc.getBackingMap().size() == 6);</span> |
| <span class="source-line-no">124</span><span id="line-124"> Map<BlockCacheKey, BucketEntry> snapshot = ImmutableMap.copyOf(bc.getBackingMap());</span> |
| <span class="source-line-no">125</span><span id="line-125"> // Reads file again and check we are not prefetching it again</span> |
| <span class="source-line-no">126</span><span id="line-126"> LOG.debug("Second read, no prefetch should happen here.");</span> |
| <span class="source-line-no">127</span><span id="line-127"> readStoreFile(storeFile);</span> |
| <span class="source-line-no">128</span><span id="line-128"> // Makes sure the cache hasn't changed</span> |
| <span class="source-line-no">129</span><span id="line-129"> snapshot.entrySet().forEach(e -> {</span> |
| <span class="source-line-no">130</span><span id="line-130"> BucketEntry entry = bc.getBackingMap().get(e.getKey());</span> |
| <span class="source-line-no">131</span><span id="line-131"> assertNotNull(entry);</span> |
| <span class="source-line-no">132</span><span id="line-132"> assertEquals(e.getValue().getCachedTime(), entry.getCachedTime());</span> |
| <span class="source-line-no">133</span><span id="line-133"> });</span> |
| <span class="source-line-no">134</span><span id="line-134"> // forcibly removes first block from the bc backing map, in order to cause it to be cached again</span> |
| <span class="source-line-no">135</span><span id="line-135"> BlockCacheKey key = snapshot.keySet().stream().findFirst().get();</span> |
| <span class="source-line-no">136</span><span id="line-136"> LOG.debug("removing block {}", key);</span> |
| <span class="source-line-no">137</span><span id="line-137"> bc.getBackingMap().remove(key);</span> |
| <span class="source-line-no">138</span><span id="line-138"> bc.getFullyCachedFiles().get().remove(storeFile.getName());</span> |
| <span class="source-line-no">139</span><span id="line-139"> assertTrue(snapshot.size() > bc.getBackingMap().size());</span> |
| <span class="source-line-no">140</span><span id="line-140"> LOG.debug("Third read should prefetch again, as we removed one block for the file.");</span> |
| <span class="source-line-no">141</span><span id="line-141"> readStoreFile(storeFile);</span> |
| <span class="source-line-no">142</span><span id="line-142"> Waiter.waitFor(conf, 300, () -> snapshot.size() == bc.getBackingMap().size());</span> |
| <span class="source-line-no">143</span><span id="line-143"> assertTrue(snapshot.get(key).getCachedTime() < bc.getBackingMap().get(key).getCachedTime());</span> |
| <span class="source-line-no">144</span><span id="line-144"> }</span> |
| <span class="source-line-no">145</span><span id="line-145"></span> |
| <span class="source-line-no">146</span><span id="line-146"> @Test</span> |
| <span class="source-line-no">147</span><span id="line-147"> public void testPrefetchRefsAfterSplit() throws Exception {</span> |
| <span class="source-line-no">148</span><span id="line-148"> conf.setLong(BUCKET_CACHE_SIZE_KEY, 200);</span> |
| <span class="source-line-no">149</span><span id="line-149"> blockCache = BlockCacheFactory.createBlockCache(conf);</span> |
| <span class="source-line-no">150</span><span id="line-150"> cacheConf = new CacheConfig(conf, blockCache);</span> |
| <span class="source-line-no">151</span><span id="line-151"></span> |
| <span class="source-line-no">152</span><span id="line-152"> Path tableDir = new Path(TEST_UTIL.getDataTestDir(), "testPrefetchRefsAfterSplit");</span> |
| <span class="source-line-no">153</span><span id="line-153"> RegionInfo region = RegionInfoBuilder.newBuilder(TableName.valueOf(tableDir.getName())).build();</span> |
| <span class="source-line-no">154</span><span id="line-154"> Path regionDir = new Path(tableDir, region.getEncodedName());</span> |
| <span class="source-line-no">155</span><span id="line-155"> Path cfDir = new Path(regionDir, "cf");</span> |
| <span class="source-line-no">156</span><span id="line-156"> HRegionFileSystem regionFS =</span> |
| <span class="source-line-no">157</span><span id="line-157"> HRegionFileSystem.createRegionOnFileSystem(conf, fs, tableDir, region);</span> |
| <span class="source-line-no">158</span><span id="line-158"> Path storeFile = writeStoreFile(100, cfDir);</span> |
| <span class="source-line-no">159</span><span id="line-159"></span> |
| <span class="source-line-no">160</span><span id="line-160"> // Prefetches the file blocks</span> |
| <span class="source-line-no">161</span><span id="line-161"> LOG.debug("First read should prefetch the blocks.");</span> |
| <span class="source-line-no">162</span><span id="line-162"> readStoreFile(storeFile);</span> |
| <span class="source-line-no">163</span><span id="line-163"> BucketCache bc = BucketCache.getBucketCacheFromCacheConfig(cacheConf).get();</span> |
| <span class="source-line-no">164</span><span id="line-164"> // Our file should have 6 DATA blocks. We should wait for all of them to be cached</span> |
| <span class="source-line-no">165</span><span id="line-165"> Waiter.waitFor(conf, 300, () -> bc.getBackingMap().size() == 6);</span> |
| <span class="source-line-no">166</span><span id="line-166"></span> |
| <span class="source-line-no">167</span><span id="line-167"> // split the file and return references to the original file</span> |
| <span class="source-line-no">168</span><span id="line-168"> Random rand = ThreadLocalRandom.current();</span> |
| <span class="source-line-no">169</span><span id="line-169"> byte[] splitPoint = RandomKeyValueUtil.randomOrderedKey(rand, 50);</span> |
| <span class="source-line-no">170</span><span id="line-170"> HStoreFile file = new HStoreFile(fs, storeFile, conf, cacheConf, BloomType.NONE, true);</span> |
| <span class="source-line-no">171</span><span id="line-171"> Path ref = regionFS.splitStoreFile(region, "cf", file, splitPoint, false,</span> |
| <span class="source-line-no">172</span><span id="line-172"> new ConstantSizeRegionSplitPolicy());</span> |
| <span class="source-line-no">173</span><span id="line-173"> HStoreFile refHsf = new HStoreFile(this.fs, ref, conf, cacheConf, BloomType.NONE, true);</span> |
| <span class="source-line-no">174</span><span id="line-174"> // starts reader for the ref. The ref should resolve to the original file blocks</span> |
| <span class="source-line-no">175</span><span id="line-175"> // and not duplicate blocks in the cache.</span> |
| <span class="source-line-no">176</span><span id="line-176"> refHsf.initReader();</span> |
| <span class="source-line-no">177</span><span id="line-177"> HFile.Reader reader = refHsf.getReader().getHFileReader();</span> |
| <span class="source-line-no">178</span><span id="line-178"> while (!reader.prefetchComplete()) {</span> |
| <span class="source-line-no">179</span><span id="line-179"> // Sleep for a bit</span> |
| <span class="source-line-no">180</span><span id="line-180"> Thread.sleep(1000);</span> |
| <span class="source-line-no">181</span><span id="line-181"> }</span> |
| <span class="source-line-no">182</span><span id="line-182"> // the ref file blocks keys should actually resolve to the referred file blocks,</span> |
| <span class="source-line-no">183</span><span id="line-183"> // so we should not see additional blocks in the cache.</span> |
| <span class="source-line-no">184</span><span id="line-184"> Waiter.waitFor(conf, 300, () -> bc.getBackingMap().size() == 6);</span> |
| <span class="source-line-no">185</span><span id="line-185"></span> |
| <span class="source-line-no">186</span><span id="line-186"> BlockCacheKey refCacheKey = new BlockCacheKey(ref.getName(), 0);</span> |
| <span class="source-line-no">187</span><span id="line-187"> Cacheable result = bc.getBlock(refCacheKey, true, false, true);</span> |
| <span class="source-line-no">188</span><span id="line-188"> assertNotNull(result);</span> |
| <span class="source-line-no">189</span><span id="line-189"> BlockCacheKey fileCacheKey = new BlockCacheKey(file.getPath().getName(), 0);</span> |
| <span class="source-line-no">190</span><span id="line-190"> assertEquals(result, bc.getBlock(fileCacheKey, true, false, true));</span> |
| <span class="source-line-no">191</span><span id="line-191"> assertNull(bc.getBackingMap().get(refCacheKey));</span> |
| <span class="source-line-no">192</span><span id="line-192"> assertNotNull(bc.getBlockForReference(refCacheKey));</span> |
| <span class="source-line-no">193</span><span id="line-193"> }</span> |
| <span class="source-line-no">194</span><span id="line-194"></span> |
| <span class="source-line-no">195</span><span id="line-195"> @Test</span> |
| <span class="source-line-no">196</span><span id="line-196"> public void testPrefetchInterruptOnCapacity() throws Exception {</span> |
| <span class="source-line-no">197</span><span id="line-197"> conf.setLong(BUCKET_CACHE_SIZE_KEY, 1);</span> |
| <span class="source-line-no">198</span><span id="line-198"> conf.set(BUCKET_CACHE_BUCKETS_KEY, "3072");</span> |
| <span class="source-line-no">199</span><span id="line-199"> conf.setDouble("hbase.bucketcache.acceptfactor", 0.98);</span> |
| <span class="source-line-no">200</span><span id="line-200"> conf.setDouble("hbase.bucketcache.minfactor", 0.95);</span> |
| <span class="source-line-no">201</span><span id="line-201"> conf.setDouble("hbase.bucketcache.extrafreefactor", 0.01);</span> |
| <span class="source-line-no">202</span><span id="line-202"> blockCache = BlockCacheFactory.createBlockCache(conf);</span> |
| <span class="source-line-no">203</span><span id="line-203"> cacheConf = new CacheConfig(conf, blockCache);</span> |
| <span class="source-line-no">204</span><span id="line-204"> Path storeFile = writeStoreFile("testPrefetchInterruptOnCapacity", 10000);</span> |
| <span class="source-line-no">205</span><span id="line-205"> // Prefetches the file blocks</span> |
| <span class="source-line-no">206</span><span id="line-206"> LOG.debug("First read should prefetch the blocks.");</span> |
| <span class="source-line-no">207</span><span id="line-207"> createReaderAndWaitForPrefetchInterruption(storeFile);</span> |
| <span class="source-line-no">208</span><span id="line-208"> BucketCache bc = BucketCache.getBucketCacheFromCacheConfig(cacheConf).get();</span> |
| <span class="source-line-no">209</span><span id="line-209"> long evictionsFirstPrefetch = bc.getStats().getEvictionCount();</span> |
| <span class="source-line-no">210</span><span id="line-210"> LOG.debug("evictions after first prefetch: {}", bc.getStats().getEvictionCount());</span> |
| <span class="source-line-no">211</span><span id="line-211"> HFile.Reader reader = createReaderAndWaitForPrefetchInterruption(storeFile);</span> |
| <span class="source-line-no">212</span><span id="line-212"> LOG.debug("evictions after second prefetch: {}", bc.getStats().getEvictionCount());</span> |
| <span class="source-line-no">213</span><span id="line-213"> assertTrue((bc.getStats().getEvictionCount() - evictionsFirstPrefetch) < 10);</span> |
| <span class="source-line-no">214</span><span id="line-214"> HFileScanner scanner = reader.getScanner(conf, true, true);</span> |
| <span class="source-line-no">215</span><span id="line-215"> scanner.seekTo();</span> |
| <span class="source-line-no">216</span><span id="line-216"> while (scanner.next()) {</span> |
| <span class="source-line-no">217</span><span id="line-217"> // do a full scan to force some evictions</span> |
| <span class="source-line-no">218</span><span id="line-218"> LOG.trace("Iterating the full scan to evict some blocks");</span> |
| <span class="source-line-no">219</span><span id="line-219"> }</span> |
| <span class="source-line-no">220</span><span id="line-220"> scanner.close();</span> |
| <span class="source-line-no">221</span><span id="line-221"> LOG.debug("evictions after scanner: {}", bc.getStats().getEvictionCount());</span> |
| <span class="source-line-no">222</span><span id="line-222"> // The scanner should had triggered at least 3x evictions from the prefetch,</span> |
| <span class="source-line-no">223</span><span id="line-223"> // as we try cache each block without interruption.</span> |
| <span class="source-line-no">224</span><span id="line-224"> assertTrue(bc.getStats().getEvictionCount() > evictionsFirstPrefetch);</span> |
| <span class="source-line-no">225</span><span id="line-225"> }</span> |
| <span class="source-line-no">226</span><span id="line-226"></span> |
| <span class="source-line-no">227</span><span id="line-227"> @Test</span> |
| <span class="source-line-no">228</span><span id="line-228"> public void testPrefetchDoesntInterruptInMemoryOnCapacity() throws Exception {</span> |
| <span class="source-line-no">229</span><span id="line-229"> conf.setLong(BUCKET_CACHE_SIZE_KEY, 1);</span> |
| <span class="source-line-no">230</span><span id="line-230"> conf.set(BUCKET_CACHE_BUCKETS_KEY, "3072");</span> |
| <span class="source-line-no">231</span><span id="line-231"> conf.setDouble("hbase.bucketcache.acceptfactor", 0.98);</span> |
| <span class="source-line-no">232</span><span id="line-232"> conf.setDouble("hbase.bucketcache.minfactor", 0.95);</span> |
| <span class="source-line-no">233</span><span id="line-233"> conf.setDouble("hbase.bucketcache.extrafreefactor", 0.01);</span> |
| <span class="source-line-no">234</span><span id="line-234"> blockCache = BlockCacheFactory.createBlockCache(conf);</span> |
| <span class="source-line-no">235</span><span id="line-235"> ColumnFamilyDescriptor family =</span> |
| <span class="source-line-no">236</span><span id="line-236"> ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f")).setInMemory(true).build();</span> |
| <span class="source-line-no">237</span><span id="line-237"> cacheConf = new CacheConfig(conf, family, blockCache, ByteBuffAllocator.HEAP);</span> |
| <span class="source-line-no">238</span><span id="line-238"> Path storeFile = writeStoreFile("testPrefetchDoesntInterruptInMemoryOnCapacity", 10000);</span> |
| <span class="source-line-no">239</span><span id="line-239"> // Prefetches the file blocks</span> |
| <span class="source-line-no">240</span><span id="line-240"> LOG.debug("First read should prefetch the blocks.");</span> |
| <span class="source-line-no">241</span><span id="line-241"> createReaderAndWaitForPrefetchInterruption(storeFile);</span> |
| <span class="source-line-no">242</span><span id="line-242"> BucketCache bc = BucketCache.getBucketCacheFromCacheConfig(cacheConf).get();</span> |
| <span class="source-line-no">243</span><span id="line-243"> assertTrue(bc.getStats().getEvictedCount() > 200);</span> |
| <span class="source-line-no">244</span><span id="line-244"> }</span> |
| <span class="source-line-no">245</span><span id="line-245"></span> |
| <span class="source-line-no">246</span><span id="line-246"> @Test</span> |
| <span class="source-line-no">247</span><span id="line-247"> public void testPrefetchMetricProgress() throws Exception {</span> |
| <span class="source-line-no">248</span><span id="line-248"> conf.setLong(BUCKET_CACHE_SIZE_KEY, 200);</span> |
| <span class="source-line-no">249</span><span id="line-249"> blockCache = BlockCacheFactory.createBlockCache(conf);</span> |
| <span class="source-line-no">250</span><span id="line-250"> cacheConf = new CacheConfig(conf, blockCache);</span> |
| <span class="source-line-no">251</span><span id="line-251"> Path storeFile = writeStoreFile("testPrefetchMetricsProgress", 100);</span> |
| <span class="source-line-no">252</span><span id="line-252"> // Prefetches the file blocks</span> |
| <span class="source-line-no">253</span><span id="line-253"> LOG.debug("First read should prefetch the blocks.");</span> |
| <span class="source-line-no">254</span><span id="line-254"> readStoreFile(storeFile);</span> |
| <span class="source-line-no">255</span><span id="line-255"> String regionName = storeFile.getParent().getParent().getName();</span> |
| <span class="source-line-no">256</span><span id="line-256"> BucketCache bc = BucketCache.getBucketCacheFromCacheConfig(cacheConf).get();</span> |
| <span class="source-line-no">257</span><span id="line-257"> MutableLong regionCachedSize = new MutableLong(0);</span> |
| <span class="source-line-no">258</span><span id="line-258"> // Our file should have 6 DATA blocks. We should wait for all of them to be cached</span> |
| <span class="source-line-no">259</span><span id="line-259"> long waitedTime = Waiter.waitFor(conf, 300, () -> {</span> |
| <span class="source-line-no">260</span><span id="line-260"> if (bc.getBackingMap().size() > 0) {</span> |
| <span class="source-line-no">261</span><span id="line-261"> long currentSize = bc.getRegionCachedInfo().get().get(regionName);</span> |
| <span class="source-line-no">262</span><span id="line-262"> assertTrue(regionCachedSize.getValue() <= currentSize);</span> |
| <span class="source-line-no">263</span><span id="line-263"> LOG.debug("Logging progress of region caching: {}", currentSize);</span> |
| <span class="source-line-no">264</span><span id="line-264"> regionCachedSize.setValue(currentSize);</span> |
| <span class="source-line-no">265</span><span id="line-265"> }</span> |
| <span class="source-line-no">266</span><span id="line-266"> return bc.getBackingMap().size() == 6;</span> |
| <span class="source-line-no">267</span><span id="line-267"> });</span> |
| <span class="source-line-no">268</span><span id="line-268"> }</span> |
| <span class="source-line-no">269</span><span id="line-269"></span> |
| <span class="source-line-no">270</span><span id="line-270"> private void readStoreFile(Path storeFilePath) throws Exception {</span> |
| <span class="source-line-no">271</span><span id="line-271"> readStoreFile(storeFilePath, (r, o) -> {</span> |
| <span class="source-line-no">272</span><span id="line-272"> HFileBlock block = null;</span> |
| <span class="source-line-no">273</span><span id="line-273"> try {</span> |
| <span class="source-line-no">274</span><span id="line-274"> block = r.readBlock(o, -1, false, true, false, true, null, null);</span> |
| <span class="source-line-no">275</span><span id="line-275"> } catch (IOException e) {</span> |
| <span class="source-line-no">276</span><span id="line-276"> fail(e.getMessage());</span> |
| <span class="source-line-no">277</span><span id="line-277"> }</span> |
| <span class="source-line-no">278</span><span id="line-278"> return block;</span> |
| <span class="source-line-no">279</span><span id="line-279"> }, (key, block) -> {</span> |
| <span class="source-line-no">280</span><span id="line-280"> boolean isCached = blockCache.getBlock(key, true, false, true) != null;</span> |
| <span class="source-line-no">281</span><span id="line-281"> if (</span> |
| <span class="source-line-no">282</span><span id="line-282"> block.getBlockType() == BlockType.DATA || block.getBlockType() == BlockType.ROOT_INDEX</span> |
| <span class="source-line-no">283</span><span id="line-283"> || block.getBlockType() == BlockType.INTERMEDIATE_INDEX</span> |
| <span class="source-line-no">284</span><span id="line-284"> ) {</span> |
| <span class="source-line-no">285</span><span id="line-285"> assertTrue(isCached);</span> |
| <span class="source-line-no">286</span><span id="line-286"> }</span> |
| <span class="source-line-no">287</span><span id="line-287"> });</span> |
| <span class="source-line-no">288</span><span id="line-288"> }</span> |
| <span class="source-line-no">289</span><span id="line-289"></span> |
| <span class="source-line-no">290</span><span id="line-290"> private void readStoreFile(Path storeFilePath,</span> |
| <span class="source-line-no">291</span><span id="line-291"> BiFunction<HFile.Reader, Long, HFileBlock> readFunction,</span> |
| <span class="source-line-no">292</span><span id="line-292"> BiConsumer<BlockCacheKey, HFileBlock> validationFunction) throws Exception {</span> |
| <span class="source-line-no">293</span><span id="line-293"> // Open the file</span> |
| <span class="source-line-no">294</span><span id="line-294"> HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);</span> |
| <span class="source-line-no">295</span><span id="line-295"></span> |
| <span class="source-line-no">296</span><span id="line-296"> while (!reader.prefetchComplete()) {</span> |
| <span class="source-line-no">297</span><span id="line-297"> // Sleep for a bit</span> |
| <span class="source-line-no">298</span><span id="line-298"> Thread.sleep(1000);</span> |
| <span class="source-line-no">299</span><span id="line-299"> }</span> |
| <span class="source-line-no">300</span><span id="line-300"> long offset = 0;</span> |
| <span class="source-line-no">301</span><span id="line-301"> long sizeForDataBlocks = 0;</span> |
| <span class="source-line-no">302</span><span id="line-302"> while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {</span> |
| <span class="source-line-no">303</span><span id="line-303"> HFileBlock block = readFunction.apply(reader, offset);</span> |
| <span class="source-line-no">304</span><span id="line-304"> BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);</span> |
| <span class="source-line-no">305</span><span id="line-305"> validationFunction.accept(blockCacheKey, block);</span> |
| <span class="source-line-no">306</span><span id="line-306"> offset += block.getOnDiskSizeWithHeader();</span> |
| <span class="source-line-no">307</span><span id="line-307"> }</span> |
| <span class="source-line-no">308</span><span id="line-308"> }</span> |
| <span class="source-line-no">309</span><span id="line-309"></span> |
| <span class="source-line-no">310</span><span id="line-310"> private HFile.Reader createReaderAndWaitForPrefetchInterruption(Path storeFilePath)</span> |
| <span class="source-line-no">311</span><span id="line-311"> throws Exception {</span> |
| <span class="source-line-no">312</span><span id="line-312"> // Open the file</span> |
| <span class="source-line-no">313</span><span id="line-313"> HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);</span> |
| <span class="source-line-no">314</span><span id="line-314"></span> |
| <span class="source-line-no">315</span><span id="line-315"> while (!reader.prefetchComplete()) {</span> |
| <span class="source-line-no">316</span><span id="line-316"> // Sleep for a bit</span> |
| <span class="source-line-no">317</span><span id="line-317"> Thread.sleep(1000);</span> |
| <span class="source-line-no">318</span><span id="line-318"> }</span> |
| <span class="source-line-no">319</span><span id="line-319"> assertEquals(0, BucketCache.getBucketCacheFromCacheConfig(cacheConf).get().getFullyCachedFiles()</span> |
| <span class="source-line-no">320</span><span id="line-320"> .get().size());</span> |
| <span class="source-line-no">321</span><span id="line-321"></span> |
| <span class="source-line-no">322</span><span id="line-322"> return reader;</span> |
| <span class="source-line-no">323</span><span id="line-323"> }</span> |
| <span class="source-line-no">324</span><span id="line-324"></span> |
| <span class="source-line-no">325</span><span id="line-325"> private Path writeStoreFile(String fname, int numKVs) throws IOException {</span> |
| <span class="source-line-no">326</span><span id="line-326"> HFileContext meta = new HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build();</span> |
| <span class="source-line-no">327</span><span id="line-327"> return writeStoreFile(fname, meta, numKVs);</span> |
| <span class="source-line-no">328</span><span id="line-328"> }</span> |
| <span class="source-line-no">329</span><span id="line-329"></span> |
| <span class="source-line-no">330</span><span id="line-330"> private Path writeStoreFile(int numKVs, Path regionCFDir) throws IOException {</span> |
| <span class="source-line-no">331</span><span id="line-331"> HFileContext meta = new HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build();</span> |
| <span class="source-line-no">332</span><span id="line-332"> return writeStoreFile(meta, numKVs, regionCFDir);</span> |
| <span class="source-line-no">333</span><span id="line-333"> }</span> |
| <span class="source-line-no">334</span><span id="line-334"></span> |
| <span class="source-line-no">335</span><span id="line-335"> private Path writeStoreFile(String fname, HFileContext context, int numKVs) throws IOException {</span> |
| <span class="source-line-no">336</span><span id="line-336"> return writeStoreFile(context, numKVs, new Path(TEST_UTIL.getDataTestDir(), fname));</span> |
| <span class="source-line-no">337</span><span id="line-337"> }</span> |
| <span class="source-line-no">338</span><span id="line-338"></span> |
| <span class="source-line-no">339</span><span id="line-339"> private Path writeStoreFile(HFileContext context, int numKVs, Path regionCFDir)</span> |
| <span class="source-line-no">340</span><span id="line-340"> throws IOException {</span> |
| <span class="source-line-no">341</span><span id="line-341"> StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs)</span> |
| <span class="source-line-no">342</span><span id="line-342"> .withOutputDir(regionCFDir).withFileContext(context).build();</span> |
| <span class="source-line-no">343</span><span id="line-343"> Random rand = ThreadLocalRandom.current();</span> |
| <span class="source-line-no">344</span><span id="line-344"> final int rowLen = 32;</span> |
| <span class="source-line-no">345</span><span id="line-345"> for (int i = 0; i < numKVs; ++i) {</span> |
| <span class="source-line-no">346</span><span id="line-346"> byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i);</span> |
| <span class="source-line-no">347</span><span id="line-347"> byte[] v = RandomKeyValueUtil.randomValue(rand);</span> |
| <span class="source-line-no">348</span><span id="line-348"> int cfLen = rand.nextInt(k.length - rowLen + 1);</span> |
| <span class="source-line-no">349</span><span id="line-349"> KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen,</span> |
| <span class="source-line-no">350</span><span id="line-350"> k.length - rowLen - cfLen, rand.nextLong(), generateKeyType(rand), v, 0, v.length);</span> |
| <span class="source-line-no">351</span><span id="line-351"> sfw.append(kv);</span> |
| <span class="source-line-no">352</span><span id="line-352"> }</span> |
| <span class="source-line-no">353</span><span id="line-353"></span> |
| <span class="source-line-no">354</span><span id="line-354"> sfw.close();</span> |
| <span class="source-line-no">355</span><span id="line-355"> return sfw.getPath();</span> |
| <span class="source-line-no">356</span><span id="line-356"> }</span> |
| <span class="source-line-no">357</span><span id="line-357"></span> |
| <span class="source-line-no">358</span><span id="line-358"> public static KeyValue.Type generateKeyType(Random rand) {</span> |
| <span class="source-line-no">359</span><span id="line-359"> if (rand.nextBoolean()) {</span> |
| <span class="source-line-no">360</span><span id="line-360"> // Let's make half of KVs puts.</span> |
| <span class="source-line-no">361</span><span id="line-361"> return KeyValue.Type.Put;</span> |
| <span class="source-line-no">362</span><span id="line-362"> } else {</span> |
| <span class="source-line-no">363</span><span id="line-363"> KeyValue.Type keyType = KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];</span> |
| <span class="source-line-no">364</span><span id="line-364"> if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum) {</span> |
| <span class="source-line-no">365</span><span id="line-365"> throw new RuntimeException("Generated an invalid key type: " + keyType + ". "</span> |
| <span class="source-line-no">366</span><span id="line-366"> + "Probably the layout of KeyValue.Type has changed.");</span> |
| <span class="source-line-no">367</span><span id="line-367"> }</span> |
| <span class="source-line-no">368</span><span id="line-368"> return keyType;</span> |
| <span class="source-line-no">369</span><span id="line-369"> }</span> |
| <span class="source-line-no">370</span><span id="line-370"> }</span> |
| <span class="source-line-no">371</span><span id="line-371">}</span> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| </pre> |
| </div> |
| </main> |
| </body> |
| </html> |