Skip to content

Commit 21381ba

Browse files
authored
HBASE-28840: Optimise memory utilization bucketcache retrieval from persistence. (#6253)
Signed-off-by: Wellington Chevreuil <[email protected]>
1 parent 5cf9170 commit 21381ba

File tree

1 file changed

+15
-20
lines changed
  • hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket

1 file changed

+15
-20
lines changed

hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java

Lines changed: 15 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1590,31 +1590,22 @@ private void verifyFileIntegrity(BucketCacheProtos.BucketCacheEntry proto) {
15901590
}
15911591
}
15921592

1593-
private void parsePB(BucketCacheProtos.BucketCacheEntry firstChunk,
1594-
List<BucketCacheProtos.BackingMap> chunks) throws IOException {
1593+
private void parseFirstChunk(BucketCacheProtos.BucketCacheEntry firstChunk) throws IOException {
15951594
fullyCachedFiles.clear();
15961595
Pair<ConcurrentHashMap<BlockCacheKey, BucketEntry>, NavigableSet<BlockCacheKey>> pair =
15971596
BucketProtoUtils.fromPB(firstChunk.getDeserializersMap(), firstChunk.getBackingMap(),
15981597
this::createRecycler);
15991598
backingMap.putAll(pair.getFirst());
16001599
blocksByHFile.addAll(pair.getSecond());
16011600
fullyCachedFiles.putAll(BucketProtoUtils.fromPB(firstChunk.getCachedFilesMap()));
1601+
}
16021602

1603-
LOG.debug("Number of blocks after first chunk: {}, blocksByHFile: {}", backingMap.size(),
1604-
fullyCachedFiles.size());
1605-
int i = 1;
1606-
for (BucketCacheProtos.BackingMap chunk : chunks) {
1607-
Pair<ConcurrentHashMap<BlockCacheKey, BucketEntry>, NavigableSet<BlockCacheKey>> pair2 =
1608-
BucketProtoUtils.fromPB(firstChunk.getDeserializersMap(), chunk, this::createRecycler);
1609-
backingMap.putAll(pair2.getFirst());
1610-
blocksByHFile.addAll(pair2.getSecond());
1611-
LOG.debug("Number of blocks after {} reading chunk: {}, blocksByHFile: {}", ++i,
1612-
backingMap.size(), fullyCachedFiles.size());
1613-
}
1614-
verifyFileIntegrity(firstChunk);
1615-
verifyCapacityAndClasses(firstChunk.getCacheCapacity(), firstChunk.getIoClass(),
1616-
firstChunk.getMapClass());
1617-
updateRegionSizeMapWhileRetrievingFromFile();
1603+
private void parseChunkPB(BucketCacheProtos.BackingMap chunk,
1604+
java.util.Map<java.lang.Integer, java.lang.String> deserializer) throws IOException {
1605+
Pair<ConcurrentHashMap<BlockCacheKey, BucketEntry>, NavigableSet<BlockCacheKey>> pair2 =
1606+
BucketProtoUtils.fromPB(deserializer, chunk, this::createRecycler);
1607+
backingMap.putAll(pair2.getFirst());
1608+
blocksByHFile.addAll(pair2.getSecond());
16181609
}
16191610

16201611
private void parsePB(BucketCacheProtos.BucketCacheEntry proto) throws IOException {
@@ -1669,18 +1660,22 @@ private void retrieveChunkedBackingMap(FileInputStream in, int[] bucketSizes) th
16691660

16701661
LOG.info("Number of chunks: {}, chunk size: {}", numChunks, batchSize);
16711662

1672-
ArrayList<BucketCacheProtos.BackingMap> bucketCacheMaps = new ArrayList<>();
16731663
// Read the first chunk that has all the details.
16741664
BucketCacheProtos.BucketCacheEntry firstChunk =
16751665
BucketCacheProtos.BucketCacheEntry.parseDelimitedFrom(in);
1666+
parseFirstChunk(firstChunk);
16761667

16771668
// Subsequent chunks have the backingMap entries.
16781669
for (int i = 1; i < numChunks; i++) {
16791670
LOG.info("Reading chunk no: {}", i + 1);
1680-
bucketCacheMaps.add(BucketCacheProtos.BackingMap.parseDelimitedFrom(in));
1671+
parseChunkPB(BucketCacheProtos.BackingMap.parseDelimitedFrom(in),
1672+
firstChunk.getDeserializersMap());
16811673
LOG.info("Retrieved chunk: {}", i + 1);
16821674
}
1683-
parsePB(firstChunk, bucketCacheMaps);
1675+
verifyFileIntegrity(firstChunk);
1676+
verifyCapacityAndClasses(firstChunk.getCacheCapacity(), firstChunk.getIoClass(),
1677+
firstChunk.getMapClass());
1678+
updateRegionSizeMapWhileRetrievingFromFile();
16841679
}
16851680

16861681
/**

0 commit comments

Comments
 (0)