Skip to content

Commit 29eae5e

Browse files
committed
HBASE-27936 NPE in StoreFileReader.passesGeneralRowPrefixBloomFilter() (#5300)
Need to also copy bloomFilterMetrics in StoreFileReader.copyFields Signed-off-by: Viraj Jasani <[email protected]> (cherry picked from commit da171c3)
1 parent 8a0ff90 commit 29eae5e

File tree

4 files changed

+235
-29
lines changed

4 files changed

+235
-29
lines changed

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
import static org.apache.hadoop.hbase.regionserver.HStoreFile.DELETE_FAMILY_COUNT;
2323
import static org.apache.hadoop.hbase.regionserver.HStoreFile.LAST_BLOOM_KEY;
2424

25+
import com.google.errorprone.annotations.RestrictedApi;
2526
import java.io.DataInput;
2627
import java.io.IOException;
2728
import java.util.Map;
@@ -102,6 +103,7 @@ void copyFields(StoreFileReader storeFileReader) throws IOException {
102103
this.generalBloomFilter = storeFileReader.generalBloomFilter;
103104
this.deleteFamilyBloomFilter = storeFileReader.deleteFamilyBloomFilter;
104105
this.bloomFilterType = storeFileReader.bloomFilterType;
106+
this.bloomFilterMetrics = storeFileReader.bloomFilterMetrics;
105107
this.sequenceID = storeFileReader.sequenceID;
106108
this.timeRange = storeFileReader.timeRange;
107109
this.lastBloomKey = storeFileReader.lastBloomKey;
@@ -498,7 +500,9 @@ public Map<byte[], byte[]> loadFileInfo() throws IOException {
498500
return fi;
499501
}
500502

501-
public void loadBloomfilter() {
503+
@RestrictedApi(explanation = "Should only be called in tests", link = "",
504+
allowedOnPath = ".*/src/test/.*")
505+
void loadBloomfilter() {
502506
this.loadBloomfilter(BlockType.GENERAL_BLOOM_META, null);
503507
this.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META, null);
504508
}
@@ -548,7 +552,9 @@ public void loadBloomfilter(BlockType blockType, BloomFilterMetrics metrics) {
548552
}
549553
}
550554

551-
private void setBloomFilterFaulty(BlockType blockType) {
555+
@RestrictedApi(explanation = "Should only be called in tests", link = "",
556+
allowedOnPath = ".*/StoreFileReader.java|.*/src/test/.*")
557+
void setBloomFilterFaulty(BlockType blockType) {
552558
if (blockType == BlockType.GENERAL_BLOOM_META) {
553559
setGeneralBloomFilterFaulty();
554560
} else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
@@ -565,11 +571,11 @@ public long getFilterEntries() {
565571
return generalBloomFilter != null ? generalBloomFilter.getKeyCount() : reader.getEntries();
566572
}
567573

568-
public void setGeneralBloomFilterFaulty() {
574+
private void setGeneralBloomFilterFaulty() {
569575
generalBloomFilter = null;
570576
}
571577

572-
public void setDeleteFamilyBloomFilterFaulty() {
578+
private void setDeleteFamilyBloomFilterFaulty() {
573579
this.deleteFamilyBloomFilter = null;
574580
}
575581

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,8 +154,7 @@ private StoreFileWriter(FileSystem fs, Path path, final Configuration conf, Cach
154154
this.bloomType = BloomType.NONE;
155155
}
156156

157-
// initialize delete family Bloom filter when there is NO RowCol Bloom
158-
// filter
157+
// initialize delete family Bloom filter when there is NO RowCol Bloom filter
159158
if (this.bloomType != BloomType.ROWCOL) {
160159
this.deleteFamilyBloomFilterWriter = BloomFilterFactory.createDeleteBloomAtWrite(conf,
161160
cacheConf, (int) Math.min(maxKeys, Integer.MAX_VALUE), writer);

hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java

Lines changed: 24 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import com.google.protobuf.ServiceException;
2424
import java.io.IOException;
2525
import java.util.ArrayList;
26-
import java.util.Iterator;
2726
import java.util.List;
2827
import java.util.Map;
2928
import java.util.concurrent.TimeUnit;
@@ -137,39 +136,41 @@ public Result[] get(List<Get> gets) throws IOException {
137136
}
138137

139138
static class RegionScannerToResultScannerAdaptor implements ResultScanner {
140-
private static final Result[] EMPTY_RESULT_ARRAY = new Result[0];
141-
private final RegionScanner regionScanner;
142139

143-
RegionScannerToResultScannerAdaptor(final RegionScanner regionScanner) {
144-
this.regionScanner = regionScanner;
145-
}
140+
private final RegionScanner scanner;
146141

147-
@Override
148-
public Iterator<Result> iterator() {
149-
throw new UnsupportedOperationException();
150-
}
142+
private boolean moreRows = true;
151143

152-
@Override
153-
public Result next() throws IOException {
154-
List<Cell> cells = new ArrayList<>();
155-
return regionScanner.next(cells) ? Result.create(cells) : null;
144+
private final List<Cell> cells = new ArrayList<>();
145+
146+
RegionScannerToResultScannerAdaptor(final RegionScanner scanner) {
147+
this.scanner = scanner;
156148
}
157149

158150
@Override
159-
public Result[] next(int nbRows) throws IOException {
160-
List<Result> results = new ArrayList<>(nbRows);
161-
for (int i = 0; i < nbRows; i++) {
162-
Result result = next();
163-
if (result == null) break;
164-
results.add(result);
151+
public Result next() throws IOException {
152+
if (!moreRows) {
153+
return null;
154+
}
155+
for (;;) {
156+
moreRows = scanner.next(cells);
157+
if (cells.isEmpty()) {
158+
if (!moreRows) {
159+
return null;
160+
} else {
161+
continue;
162+
}
163+
}
164+
Result result = Result.create(cells);
165+
cells.clear();
166+
return result;
165167
}
166-
return results.toArray(EMPTY_RESULT_ARRAY);
167168
}
168169

169170
@Override
170171
public void close() {
171172
try {
172-
regionScanner.close();
173+
scanner.close();
173174
} catch (IOException e) {
174175
throw new RuntimeException(e);
175176
}
@@ -182,7 +183,7 @@ public boolean renewLease() {
182183

183184
@Override
184185
public ScanMetrics getScanMetrics() {
185-
throw new UnsupportedOperationException();
186+
return null;
186187
}
187188
};
188189

Lines changed: 200 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hbase.regionserver;
19+
20+
import static org.junit.Assert.assertEquals;
21+
import static org.junit.Assert.assertNotNull;
22+
import static org.junit.Assert.assertNull;
23+
import static org.junit.Assert.assertTrue;
24+
25+
import java.io.IOException;
26+
import org.apache.hadoop.fs.Path;
27+
import org.apache.hadoop.hbase.HBaseClassTestRule;
28+
import org.apache.hadoop.hbase.HBaseTestingUtility;
29+
import org.apache.hadoop.hbase.TableName;
30+
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
31+
import org.apache.hadoop.hbase.client.Delete;
32+
import org.apache.hadoop.hbase.client.Get;
33+
import org.apache.hadoop.hbase.client.Put;
34+
import org.apache.hadoop.hbase.client.RegionInfo;
35+
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
36+
import org.apache.hadoop.hbase.client.Result;
37+
import org.apache.hadoop.hbase.client.ResultScanner;
38+
import org.apache.hadoop.hbase.client.Scan;
39+
import org.apache.hadoop.hbase.client.Scan.ReadType;
40+
import org.apache.hadoop.hbase.client.TableDescriptor;
41+
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
42+
import org.apache.hadoop.hbase.io.hfile.BlockType;
43+
import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult;
44+
import org.apache.hadoop.hbase.testclassification.MediumTests;
45+
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
46+
import org.apache.hadoop.hbase.util.Bytes;
47+
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
48+
import org.junit.After;
49+
import org.junit.AfterClass;
50+
import org.junit.Before;
51+
import org.junit.ClassRule;
52+
import org.junit.Rule;
53+
import org.junit.Test;
54+
import org.junit.experimental.categories.Category;
55+
import org.junit.rules.TestName;
56+
57+
/**
58+
* A UT to make sure that everything is fine when we fail to load bloom filter.
59+
* <p>
60+
* See HBASE-27936 for more details.
61+
*/
62+
@Category({ RegionServerTests.class, MediumTests.class })
63+
public class TestBloomFilterFaulty {
64+
65+
@ClassRule
66+
public static final HBaseClassTestRule CLASS_RULE =
67+
HBaseClassTestRule.forClass(TestBloomFilterFaulty.class);
68+
69+
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
70+
71+
private static final byte[] FAMILY = Bytes.toBytes("family");
72+
73+
private static final byte[] QUAL = Bytes.toBytes("qualifier");
74+
75+
private static final TableDescriptor TD =
76+
TableDescriptorBuilder.newBuilder(TableName.valueOf("test"))
77+
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY)
78+
.setBloomFilterType(BloomType.ROWPREFIX_FIXED_LENGTH)
79+
.setConfiguration("RowPrefixBloomFilter.prefix_length", "2").build())
80+
.build();
81+
82+
private static final RegionInfo RI = RegionInfoBuilder.newBuilder(TD.getTableName()).build();
83+
84+
@AfterClass
85+
public static void tearDownAfterClass() {
86+
UTIL.cleanupTestDir();
87+
}
88+
89+
private HRegion region;
90+
91+
@Rule
92+
public final TestName name = new TestName();
93+
94+
private void generateHFiles() throws IOException {
95+
for (int i = 0; i < 4; i++) {
96+
long ts = EnvironmentEdgeManager.currentTime();
97+
for (int j = 0; j < 5; j++) {
98+
byte[] row = Bytes.toBytes(j);
99+
region.put(new Put(row).addColumn(FAMILY, QUAL, ts, Bytes.toBytes(i * 10 + j)));
100+
region.delete(new Delete(row).addFamilyVersion(FAMILY, ts));
101+
}
102+
103+
for (int j = 5; j < 10; j++) {
104+
byte[] row = Bytes.toBytes(j);
105+
region.put(new Put(row).addColumn(FAMILY, QUAL, ts + 1, Bytes.toBytes(i * 10 + j)));
106+
}
107+
108+
FlushResult result = region.flush(true);
109+
if (
110+
result.getResult() == FlushResult.Result.CANNOT_FLUSH
111+
|| result.getResult() == FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY
112+
) {
113+
throw new IOException("Can not flush region, flush result: " + result);
114+
}
115+
}
116+
}
117+
118+
@Before
119+
public void setUp() throws IOException {
120+
Path rootDir = UTIL.getDataTestDir(name.getMethodName());
121+
// generate some hfiles so we can have StoreFileReader which has bloomfilters
122+
region = HBaseTestingUtility.createRegionAndWAL(RI, rootDir, UTIL.getConfiguration(), TD);
123+
generateHFiles();
124+
HStore store = region.getStore(FAMILY);
125+
for (HStoreFile storefile : store.getStorefiles()) {
126+
storefile.initReader();
127+
StoreFileReader reader = storefile.getReader();
128+
// make sure we load bloom filters correctly
129+
assertNotNull(reader.generalBloomFilter);
130+
assertNotNull(reader.deleteFamilyBloomFilter);
131+
}
132+
}
133+
134+
@After
135+
public void tearDown() throws IOException {
136+
if (region != null) {
137+
HBaseTestingUtility.closeRegionAndWAL(region);
138+
}
139+
}
140+
141+
private void setFaulty(BlockType type) {
142+
HStore store = region.getStore(FAMILY);
143+
for (HStoreFile storefile : store.getStorefiles()) {
144+
storefile.getReader().setBloomFilterFaulty(type);
145+
}
146+
}
147+
148+
private void testGet() throws IOException {
149+
for (int i = 0; i < 5; i++) {
150+
assertTrue(region.get(new Get(Bytes.toBytes(i))).isEmpty());
151+
}
152+
for (int i = 5; i < 10; i++) {
153+
assertEquals(30 + i,
154+
Bytes.toInt(region.get(new Get(Bytes.toBytes(i))).getValue(FAMILY, QUAL)));
155+
}
156+
}
157+
158+
private void testStreamScan() throws IOException {
159+
try (RegionAsTable table = new RegionAsTable(region);
160+
ResultScanner scanner = table.getScanner(new Scan().setReadType(ReadType.STREAM))) {
161+
for (int i = 5; i < 10; i++) {
162+
Result result = scanner.next();
163+
assertEquals(i, Bytes.toInt(result.getRow()));
164+
assertEquals(30 + i, Bytes.toInt(result.getValue(FAMILY, QUAL)));
165+
}
166+
assertNull(scanner.next());
167+
}
168+
}
169+
170+
private void testRegion() throws IOException {
171+
// normal read
172+
testGet();
173+
// scan with stream reader
174+
testStreamScan();
175+
// major compact
176+
region.compact(true);
177+
// test read and scan again
178+
testGet();
179+
testStreamScan();
180+
}
181+
182+
@Test
183+
public void testNoGeneralBloomFilter() throws IOException {
184+
setFaulty(BlockType.GENERAL_BLOOM_META);
185+
testRegion();
186+
}
187+
188+
@Test
189+
public void testNoDeleteFamilyBloomFilter() throws IOException {
190+
setFaulty(BlockType.DELETE_FAMILY_BLOOM_META);
191+
testRegion();
192+
}
193+
194+
@Test
195+
public void testNoAnyBloomFilter() throws IOException {
196+
setFaulty(BlockType.GENERAL_BLOOM_META);
197+
setFaulty(BlockType.DELETE_FAMILY_BLOOM_META);
198+
testRegion();
199+
}
200+
}

0 commit comments

Comments
 (0)