|
56 | 56 | import org.apache.hadoop.hbase.client.trace.StringTraceRenderer; |
57 | 57 | import org.apache.hadoop.hbase.fs.HFileSystem; |
58 | 58 | import org.apache.hadoop.hbase.io.ByteBuffAllocator; |
| 59 | +import org.apache.hadoop.hbase.io.HFileLink; |
59 | 60 | import org.apache.hadoop.hbase.io.compress.Compression; |
60 | 61 | import org.apache.hadoop.hbase.regionserver.BloomType; |
61 | 62 | import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; |
62 | 63 | import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; |
63 | 64 | import org.apache.hadoop.hbase.regionserver.HStoreFile; |
| 65 | +import org.apache.hadoop.hbase.regionserver.StoreFileInfo; |
64 | 66 | import org.apache.hadoop.hbase.regionserver.StoreFileWriter; |
| 67 | +import org.apache.hadoop.hbase.regionserver.TestHStoreFile; |
65 | 68 | import org.apache.hadoop.hbase.testclassification.IOTests; |
66 | 69 | import org.apache.hadoop.hbase.testclassification.MediumTests; |
67 | 70 | import org.apache.hadoop.hbase.trace.TraceUtil; |
68 | 71 | import org.apache.hadoop.hbase.util.Bytes; |
| 72 | +import org.apache.hadoop.hbase.util.CommonFSUtils; |
69 | 73 | import org.apache.hadoop.hbase.util.Pair; |
70 | 74 | import org.junit.Before; |
71 | 75 | import org.junit.ClassRule; |
@@ -252,6 +256,14 @@ public void testPrefetchDoesntSkipRefs() throws Exception { |
252 | 256 | }); |
253 | 257 | } |
254 | 258 |
|
| 259 | + @Test |
| 260 | + public void testPrefetchDoesntSkipHFileLink() throws Exception { |
| 261 | + testPrefetchWhenHFileLink(c -> { |
| 262 | + boolean isCached = c != null; |
| 263 | + assertTrue(isCached); |
| 264 | + }); |
| 265 | + } |
| 266 | + |
255 | 267 | private void testPrefetchWhenRefs(boolean compactionEnabled, Consumer<Cacheable> test) |
256 | 268 | throws Exception { |
257 | 269 | cacheConf = new CacheConfig(conf, blockCache); |
@@ -287,6 +299,52 @@ private void testPrefetchWhenRefs(boolean compactionEnabled, Consumer<Cacheable> |
287 | 299 | } |
288 | 300 | } |
289 | 301 |
|
| 302 | + private void testPrefetchWhenHFileLink(Consumer<Cacheable> test) throws Exception { |
| 303 | + cacheConf = new CacheConfig(conf, blockCache); |
| 304 | + HFileContext context = new HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build(); |
| 305 | + Path testDir = TEST_UTIL.getDataTestDir("testPrefetchWhenHFileLink"); |
| 306 | + final RegionInfo hri = |
| 307 | + RegionInfoBuilder.newBuilder(TableName.valueOf("testPrefetchWhenHFileLink")).build(); |
| 308 | + // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ |
| 309 | + Configuration testConf = new Configuration(this.conf); |
| 310 | + CommonFSUtils.setRootDir(testConf, testDir); |
| 311 | + HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, |
| 312 | + CommonFSUtils.getTableDir(testDir, hri.getTable()), hri); |
| 313 | + |
| 314 | + // Make a store file and write data to it. |
| 315 | + StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) |
| 316 | + .withFilePath(regionFs.createTempName()).withFileContext(context).build(); |
| 317 | + TestHStoreFile.writeStoreFile(writer, Bytes.toBytes("testPrefetchWhenHFileLink"), |
| 318 | + Bytes.toBytes("testPrefetchWhenHFileLink")); |
| 319 | + |
| 320 | + Path storeFilePath = regionFs.commitStoreFile("cf", writer.getPath()); |
| 321 | + Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", "cf")); |
| 322 | + HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); |
| 323 | + Path linkFilePath = |
| 324 | + new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName())); |
| 325 | + |
| 326 | + // Try to open store file from link |
| 327 | + StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true); |
| 328 | + HStoreFile hsf = new HStoreFile(storeFileInfo, BloomType.NONE, cacheConf); |
| 329 | + assertTrue(storeFileInfo.isLink()); |
| 330 | + |
| 331 | + hsf.initReader(); |
| 332 | + HFile.Reader reader = hsf.getReader().getHFileReader(); |
| 333 | + while (!reader.prefetchComplete()) { |
| 334 | + // Sleep for a bit |
| 335 | + Thread.sleep(1000); |
| 336 | + } |
| 337 | + long offset = 0; |
| 338 | + while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) { |
| 339 | + HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null, true); |
| 340 | + BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset); |
| 341 | + if (block.getBlockType() == BlockType.DATA) { |
| 342 | + test.accept(blockCache.getBlock(blockCacheKey, true, false, true)); |
| 343 | + } |
| 344 | + offset += block.getOnDiskSizeWithHeader(); |
| 345 | + } |
| 346 | + } |
| 347 | + |
290 | 348 | private Path writeStoreFile(String fname) throws IOException { |
291 | 349 | HFileContext meta = new HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build(); |
292 | 350 | return writeStoreFile(fname, meta); |
|
0 commit comments