From 8c93b51784c151a4bf33ceb0b2a624efed183971 Mon Sep 17 00:00:00 2001 From: "Tak Lon (Stephen) Wu" Date: Fri, 20 Jan 2023 09:34:06 -0800 Subject: [PATCH] Revert "HBASE-27539 Encapsulate and centralise access to ref count through StoreFileInfo (#4939)" This reverts commit 1e53e1e486119654f68cf4acf025cdf8fe40482a. Revert reason: the change in StoreFileReader breaks the API backward compatibility for the next branch-2.5 patch release --- .../hadoop/hbase/io/HalfStoreFileReader.java | 7 ++- .../hadoop/hbase/regionserver/HStoreFile.java | 8 +-- .../hbase/regionserver/StoreFileInfo.java | 23 ++------ .../hbase/regionserver/StoreFileReader.java | 27 +++++---- .../hbase/regionserver/StoreFileWriter.java | 2 +- .../hbase/tool/LoadIncrementalHFiles.java | 9 ++- .../hbase/io/TestHalfStoreFileReader.java | 23 ++++---- .../hbase/regionserver/TestHStoreFile.java | 58 ++++++++----------- .../TestRowPrefixBloomFilter.java | 41 ++++++------- ...estStoreFileScannerWithTagCompression.java | 18 +++--- 10 files changed, 90 insertions(+), 126 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java index cc680173a4e3..95665391740e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; @@ -30,7 +31,6 @@ import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.ReaderContext; -import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.regionserver.StoreFileReader; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -69,12 +69,13 @@ public class HalfStoreFileReader extends StoreFileReader { * @param fileInfo HFile info * @param cacheConf CacheConfig * @param r original reference file (contains top or bottom) + * @param refCount reference count * @param conf Configuration */ public HalfStoreFileReader(final ReaderContext context, final HFileInfo fileInfo, - final CacheConfig cacheConf, final Reference r, StoreFileInfo storeFileInfo, + final CacheConfig cacheConf, final Reference r, AtomicInteger refCount, final Configuration conf) throws IOException { - super(context, fileInfo, cacheConf, storeFileInfo, conf); + super(context, fileInfo, cacheConf, refCount, conf); // This is not actual midkey for this half-file; its just border // around which we split top and bottom. Have to look in files to find // actual last and first keys for bottom and top halves. Half-files don't diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java index 35e1309fabc9..58d97a8743db 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java @@ -349,12 +349,12 @@ public boolean isCompactedAway() { } public int getRefCount() { - return fileInfo.getRefCount(); + return fileInfo.refCount.get(); } /** Returns true if the file is still used in reads */ public boolean isReferencedInReads() { - int rc = fileInfo.getRefCount(); + int rc = fileInfo.refCount.get(); assert rc >= 0; // we should not go negative. return rc > 0; } @@ -653,11 +653,11 @@ Set getCompactedStoreFiles() { } long increaseRefCount() { - return this.fileInfo.increaseRefCount(); + return this.fileInfo.refCount.incrementAndGet(); } long decreaseRefCount() { - return this.fileInfo.decreaseRefCount(); + return this.fileInfo.refCount.decrementAndGet(); } static void increaseStoreFilesRefeCount(Collection storeFiles) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java index 96bf5a5bf9f4..ae840bcfa7bb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java @@ -107,7 +107,7 @@ public class StoreFileInfo implements Configurable { // Counter that is incremented every time a scanner is created on the // store file. It is decremented when the scan on the store file is // done. - private final AtomicInteger refCount = new AtomicInteger(0); + final AtomicInteger refCount = new AtomicInteger(0); /** * Create a Store File Info @@ -274,13 +274,12 @@ public HDFSBlocksDistribution getHDFSBlockDistribution() { return this.hdfsBlocksDistribution; } - public StoreFileReader createReader(ReaderContext context, CacheConfig cacheConf) - throws IOException { + StoreFileReader createReader(ReaderContext context, CacheConfig cacheConf) throws IOException { StoreFileReader reader = null; if (this.reference != null) { - reader = new HalfStoreFileReader(context, hfileInfo, cacheConf, reference, this, conf); + reader = new HalfStoreFileReader(context, hfileInfo, cacheConf, reference, refCount, conf); } else { - reader = new StoreFileReader(context, hfileInfo, cacheConf, this, conf); + reader = new StoreFileReader(context, hfileInfo, cacheConf, refCount, conf); } return reader; } @@ -650,7 +649,7 @@ boolean isNoReadahead() { return this.noReadahead; } - public HFileInfo getHFileInfo() { + HFileInfo getHFileInfo() { return hfileInfo; } @@ -682,16 +681,4 @@ public void initHFileInfo(ReaderContext context) throws IOException { this.hfileInfo = new HFileInfo(context, conf); } - int getRefCount() { - return this.refCount.get(); - } - - int increaseRefCount() { - return this.refCount.incrementAndGet(); - } - - int decreaseRefCount() { - return this.refCount.decrementAndGet(); - } - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java index a2778e54a725..36c67f41a3e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Optional; import java.util.SortedSet; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -77,26 +78,24 @@ public class StoreFileReader { private int prefixLength = -1; protected Configuration conf; - /** - * All {@link StoreFileReader} for the same StoreFile will share the - * {@link StoreFileInfo#refCount}. Counter that is incremented every time a scanner is created on - * the store file. It is decremented when the scan on the store file is done. - */ - private final StoreFileInfo storeFileInfo; + // Counter that is incremented every time a scanner is created on the + // store file. It is decremented when the scan on the store file is + // done. All StoreFileReader for the same StoreFile will share this counter. + private final AtomicInteger refCount; private final ReaderContext context; - private StoreFileReader(HFile.Reader reader, StoreFileInfo storeFileInfo, ReaderContext context, + private StoreFileReader(HFile.Reader reader, AtomicInteger refCount, ReaderContext context, Configuration conf) { this.reader = reader; bloomFilterType = BloomType.NONE; - this.storeFileInfo = storeFileInfo; + this.refCount = refCount; this.context = context; this.conf = conf; } public StoreFileReader(ReaderContext context, HFileInfo fileInfo, CacheConfig cacheConf, - StoreFileInfo storeFileInfo, Configuration conf) throws IOException { - this(HFile.createReader(context, fileInfo, cacheConf, conf), storeFileInfo, context, conf); + AtomicInteger refCount, Configuration conf) throws IOException { + this(HFile.createReader(context, fileInfo, cacheConf, conf), refCount, context, conf); } void copyFields(StoreFileReader storeFileReader) throws IOException { @@ -121,7 +120,7 @@ public boolean isPrimaryReplicaReader() { */ @InterfaceAudience.Private StoreFileReader() { - this.storeFileInfo = null; + this.refCount = new AtomicInteger(0); this.reader = null; this.context = null; } @@ -152,7 +151,7 @@ public StoreFileScanner getStoreFileScanner(boolean cacheBlocks, boolean pread, * is opened. */ int getRefCount() { - return storeFileInfo.getRefCount(); + return refCount.get(); } /** @@ -160,7 +159,7 @@ int getRefCount() { * count so reader is not close until some object is holding the lock */ void incrementRefCount() { - storeFileInfo.increaseRefCount(); + refCount.incrementAndGet(); } /** @@ -168,7 +167,7 @@ void incrementRefCount() { * count, and also, if this is not the common pread reader, we should close it. */ void readCompleted() { - storeFileInfo.decreaseRefCount(); + refCount.decrementAndGet(); if (context.getReaderType() == ReaderType.STREAM) { try { reader.close(false); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java index b76867d1c223..de32c270565b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java @@ -402,7 +402,7 @@ HFile.Writer getHFileWriter() { * @param dir Directory to create file in. * @return random filename inside passed dir */ - public static Path getUniqueFile(final FileSystem fs, final Path dir) throws IOException { + static Path getUniqueFile(final FileSystem fs, final Path dir) throws IOException { if (!fs.getFileStatus(dir).isDirectory()) { throw new IOException("Expecting " + dir.toString() + " to be a directory"); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java index 9700a488aa56..54adfd22a36b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java @@ -1168,11 +1168,10 @@ private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile, StoreFileWriter halfWriter = null; try { ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, inFile).build(); - StoreFileInfo storeFileInfo = - new StoreFileInfo(conf, fs, fs.getFileStatus(inFile), reference); - storeFileInfo.initHFileInfo(context); - halfReader = (HalfStoreFileReader) storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(halfReader.getHFileReader()); + HFileInfo hfile = new HFileInfo(context, conf); + halfReader = + new HalfStoreFileReader(context, hfile, cacheConf, reference, new AtomicInteger(0), conf); + hfile.initMetaAndIndex(halfReader.getHFileReader()); Map fileInfo = halfReader.loadFileInfo(); int blocksize = familyDescriptor.getBlocksize(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java index 13955ccebfec..7020ad9f559b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,10 +39,10 @@ import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; +import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder; -import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -117,12 +118,10 @@ public void testHalfScanAndReseek() throws IOException { private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, CacheConfig cacheConf) throws IOException { ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, p).build(); - StoreFileInfo storeFileInfo = - new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, fs.getFileStatus(p), bottom); - storeFileInfo.initHFileInfo(context); - final HalfStoreFileReader halfreader = - (HalfStoreFileReader) storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(halfreader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, TEST_UTIL.getConfiguration()); + final HalfStoreFileReader halfreader = new HalfStoreFileReader(context, fileInfo, cacheConf, + bottom, new AtomicInteger(0), TEST_UTIL.getConfiguration()); + fileInfo.initMetaAndIndex(halfreader.getHFileReader()); halfreader.loadFileInfo(); final HFileScanner scanner = halfreader.getScanner(false, false); @@ -215,12 +214,10 @@ public void testHalfScanner() throws IOException { private Cell doTestOfSeekBefore(Path p, FileSystem fs, Reference bottom, Cell seekBefore, CacheConfig cacheConfig) throws IOException { ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, p).build(); - StoreFileInfo storeFileInfo = - new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, fs.getFileStatus(p), bottom); - storeFileInfo.initHFileInfo(context); - final HalfStoreFileReader halfreader = - (HalfStoreFileReader) storeFileInfo.createReader(context, cacheConfig); - storeFileInfo.getHFileInfo().initMetaAndIndex(halfreader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, TEST_UTIL.getConfiguration()); + final HalfStoreFileReader halfreader = new HalfStoreFileReader(context, fileInfo, cacheConfig, + bottom, new AtomicInteger(0), TEST_UTIL.getConfiguration()); + fileInfo.initMetaAndIndex(halfreader.getHFileReader()); halfreader.loadFileInfo(); final HFileScanner scanner = halfreader.getScanner(false, false); scanner.seekBefore(seekBefore); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index fa58e63bc067..734905424018 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -38,6 +38,7 @@ import java.util.Map; import java.util.OptionalLong; import java.util.TreeSet; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -69,6 +70,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; +import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder; @@ -109,7 +111,7 @@ public class TestHStoreFile { private static final Logger LOG = LoggerFactory.getLogger(TestHStoreFile.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration()); - private static Path ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFile"); + private static String ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFile").toString(); private static final ChecksumType CKTYPE = ChecksumType.CRC32C; private static final int CKBYTES = 512; private static String TEST_FAMILY = "cf"; @@ -565,10 +567,10 @@ private void bloomWriteRead(StoreFileWriter writer, FileSystem fs) throws Except writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); StoreFileScanner scanner = getStoreFileScanner(reader, false, false); @@ -613,10 +615,7 @@ public void testBloomFilter() throws Exception { conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true); // write the file - if (!fs.exists(ROOT_DIR)) { - fs.mkdirs(ROOT_DIR); - } - Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); + Path f = new Path(ROOT_DIR, name.getMethodName()); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. @@ -632,10 +631,7 @@ public void testDeleteFamilyBloomFilter() throws Exception { float err = conf.getFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0); // write the file - if (!fs.exists(ROOT_DIR)) { - fs.mkdirs(ROOT_DIR); - } - Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); + Path f = new Path(ROOT_DIR, name.getMethodName()); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build(); @@ -654,10 +650,10 @@ public void testDeleteFamilyBloomFilter() throws Exception { writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); @@ -693,11 +689,7 @@ public void testDeleteFamilyBloomFilter() throws Exception { @Test public void testReseek() throws Exception { // write the file - if (!fs.exists(ROOT_DIR)) { - fs.mkdirs(ROOT_DIR); - } - Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); - + Path f = new Path(ROOT_DIR, name.getMethodName()); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f) @@ -707,10 +699,10 @@ public void testReseek() throws Exception { writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); // Now do reseek with empty KV to position to the beginning of the file @@ -741,13 +733,9 @@ public void testBloomTypes() throws Exception { // 2nd for loop for every column (2*colCount) float[] expErr = { 2 * rowCount * colCount * err, 2 * rowCount * 2 * colCount * err }; - if (!fs.exists(ROOT_DIR)) { - fs.mkdirs(ROOT_DIR); - } for (int x : new int[] { 0, 1 }) { // write the file - Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); - + Path f = new Path(ROOT_DIR, name.getMethodName() + x); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. @@ -771,10 +759,10 @@ public void testBloomTypes() throws Exception { ReaderContext context = new ReaderContextBuilder().withFilePath(f).withFileSize(fs.getFileStatus(f).getLen()) .withFileSystem(fs).withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f)).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); StoreFileScanner scanner = getStoreFileScanner(reader, false, false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java index acd5362e0363..e4331ea4fe0b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java @@ -24,6 +24,7 @@ import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -36,6 +37,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; +import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder; import org.apache.hadoop.hbase.log.HBaseMarkers; @@ -178,18 +180,15 @@ public void testRowPrefixBloomFilter() throws Exception { float expErr = 2 * prefixRowCount * suffixRowCount * err; int expKeys = fixedLengthExpKeys; // write the file - if (!fs.exists(testDir)) { - fs.mkdirs(testDir); - } - Path f = StoreFileWriter.getUniqueFile(fs, testDir); + Path f = new Path(testDir, name.getMethodName()); writeStoreFile(f, bt, expKeys); // read the file ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); @@ -252,17 +251,14 @@ public void testRowPrefixBloomFilterWithGet() throws Exception { FileSystem fs = FileSystem.getLocal(conf); int expKeys = fixedLengthExpKeys; // write the file - if (!fs.exists(testDir)) { - fs.mkdirs(testDir); - } - Path f = StoreFileWriter.getUniqueFile(fs, testDir); + Path f = new Path(testDir, name.getMethodName()); writeStoreFile(f, bt, expKeys); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); @@ -308,17 +304,14 @@ public void testRowPrefixBloomFilterWithScan() throws Exception { FileSystem fs = FileSystem.getLocal(conf); int expKeys = fixedLengthExpKeys; // write the file - if (!fs.exists(testDir)) { - fs.mkdirs(testDir); - } - Path f = StoreFileWriter.getUniqueFile(fs, testDir); + Path f = new Path(testDir, name.getMethodName()); writeStoreFile(f, bt, expKeys); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java index 6a251539ccba..e321bf37b4f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -37,6 +38,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; +import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -58,7 +60,8 @@ public class TestStoreFileScannerWithTagCompression { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf = TEST_UTIL.getConfiguration(); private static CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration()); - private static Path ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFileScannerWithTagCompression"); + private static String ROOT_DIR = + TEST_UTIL.getDataTestDir("TestStoreFileScannerWithTagCompression").toString(); private static FileSystem fs = null; @BeforeClass @@ -70,10 +73,7 @@ public static void setUp() throws IOException { @Test public void testReseek() throws Exception { // write the file - if (!fs.exists(ROOT_DIR)) { - fs.mkdirs(ROOT_DIR); - } - Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); + Path f = new Path(ROOT_DIR, "testReseek"); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true) .withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build(); // Make a store file and write data to it. @@ -84,10 +84,10 @@ public void testReseek() throws Exception { writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); - storeFileInfo.initHFileInfo(context); - StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); - storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); + HFileInfo fileInfo = new HFileInfo(context, conf); + StoreFileReader reader = + new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); + fileInfo.initMetaAndIndex(reader.getHFileReader()); StoreFileScanner s = reader.getStoreFileScanner(false, false, false, 0, 0, false); try { // Now do reseek with empty KV to position to the beginning of the file