|  | 
|  | 1 | +/* | 
|  | 2 | + * Licensed to the Apache Software Foundation (ASF) under one | 
|  | 3 | + * or more contributor license agreements.  See the NOTICE file | 
|  | 4 | + * distributed with this work for additional information | 
|  | 5 | + * regarding copyright ownership.  The ASF licenses this file | 
|  | 6 | + * to you under the Apache License, Version 2.0 (the | 
|  | 7 | + * "License"); you may not use this file except in compliance | 
|  | 8 | + * with the License.  You may obtain a copy of the License at | 
|  | 9 | + * | 
|  | 10 | + *     http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 11 | + * | 
|  | 12 | + * Unless required by applicable law or agreed to in writing, software | 
|  | 13 | + * distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 14 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 15 | + * See the License for the specific language governing permissions and | 
|  | 16 | + * limitations under the License. | 
|  | 17 | + */ | 
|  | 18 | +package org.apache.hadoop.hbase.regionserver; | 
|  | 19 | + | 
|  | 20 | +import static org.junit.Assert.assertEquals; | 
|  | 21 | +import static org.junit.Assert.assertNotNull; | 
|  | 22 | +import static org.junit.Assert.assertNull; | 
|  | 23 | +import static org.junit.Assert.assertTrue; | 
|  | 24 | + | 
|  | 25 | +import java.io.IOException; | 
|  | 26 | +import org.apache.hadoop.fs.Path; | 
|  | 27 | +import org.apache.hadoop.hbase.HBaseClassTestRule; | 
|  | 28 | +import org.apache.hadoop.hbase.HBaseTestingUtility; | 
|  | 29 | +import org.apache.hadoop.hbase.TableName; | 
|  | 30 | +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; | 
|  | 31 | +import org.apache.hadoop.hbase.client.Delete; | 
|  | 32 | +import org.apache.hadoop.hbase.client.Get; | 
|  | 33 | +import org.apache.hadoop.hbase.client.Put; | 
|  | 34 | +import org.apache.hadoop.hbase.client.RegionInfo; | 
|  | 35 | +import org.apache.hadoop.hbase.client.RegionInfoBuilder; | 
|  | 36 | +import org.apache.hadoop.hbase.client.Result; | 
|  | 37 | +import org.apache.hadoop.hbase.client.ResultScanner; | 
|  | 38 | +import org.apache.hadoop.hbase.client.Scan; | 
|  | 39 | +import org.apache.hadoop.hbase.client.Scan.ReadType; | 
|  | 40 | +import org.apache.hadoop.hbase.client.TableDescriptor; | 
|  | 41 | +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; | 
|  | 42 | +import org.apache.hadoop.hbase.io.hfile.BlockType; | 
|  | 43 | +import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult; | 
|  | 44 | +import org.apache.hadoop.hbase.testclassification.MediumTests; | 
|  | 45 | +import org.apache.hadoop.hbase.testclassification.RegionServerTests; | 
|  | 46 | +import org.apache.hadoop.hbase.util.Bytes; | 
|  | 47 | +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; | 
|  | 48 | +import org.junit.After; | 
|  | 49 | +import org.junit.AfterClass; | 
|  | 50 | +import org.junit.Before; | 
|  | 51 | +import org.junit.ClassRule; | 
|  | 52 | +import org.junit.Rule; | 
|  | 53 | +import org.junit.Test; | 
|  | 54 | +import org.junit.experimental.categories.Category; | 
|  | 55 | +import org.junit.rules.TestName; | 
|  | 56 | + | 
|  | 57 | +/** | 
|  | 58 | + * A UT to make sure that everything is fine when we fail to load bloom filter. | 
|  | 59 | + * <p> | 
|  | 60 | + * See HBASE-27936 for more details. | 
|  | 61 | + */ | 
|  | 62 | +@Category({ RegionServerTests.class, MediumTests.class }) | 
|  | 63 | +public class TestBloomFilterFaulty { | 
|  | 64 | + | 
|  | 65 | +  @ClassRule | 
|  | 66 | +  public static final HBaseClassTestRule CLASS_RULE = | 
|  | 67 | +    HBaseClassTestRule.forClass(TestBloomFilterFaulty.class); | 
|  | 68 | + | 
|  | 69 | +  private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); | 
|  | 70 | + | 
|  | 71 | +  private static final byte[] FAMILY = Bytes.toBytes("family"); | 
|  | 72 | + | 
|  | 73 | +  private static final byte[] QUAL = Bytes.toBytes("qualifier"); | 
|  | 74 | + | 
|  | 75 | +  private static final TableDescriptor TD = | 
|  | 76 | +    TableDescriptorBuilder.newBuilder(TableName.valueOf("test")) | 
|  | 77 | +      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY) | 
|  | 78 | +        .setBloomFilterType(BloomType.ROWPREFIX_FIXED_LENGTH) | 
|  | 79 | +        .setConfiguration("RowPrefixBloomFilter.prefix_length", "2").build()) | 
|  | 80 | +      .build(); | 
|  | 81 | + | 
|  | 82 | +  private static final RegionInfo RI = RegionInfoBuilder.newBuilder(TD.getTableName()).build(); | 
|  | 83 | + | 
|  | 84 | +  @AfterClass | 
|  | 85 | +  public static void tearDownAfterClass() { | 
|  | 86 | +    UTIL.cleanupTestDir(); | 
|  | 87 | +  } | 
|  | 88 | + | 
|  | 89 | +  private HRegion region; | 
|  | 90 | + | 
|  | 91 | +  @Rule | 
|  | 92 | +  public final TestName name = new TestName(); | 
|  | 93 | + | 
|  | 94 | +  private void generateHFiles() throws IOException { | 
|  | 95 | +    for (int i = 0; i < 4; i++) { | 
|  | 96 | +      long ts = EnvironmentEdgeManager.currentTime(); | 
|  | 97 | +      for (int j = 0; j < 5; j++) { | 
|  | 98 | +        byte[] row = Bytes.toBytes(j); | 
|  | 99 | +        region.put(new Put(row).addColumn(FAMILY, QUAL, ts, Bytes.toBytes(i * 10 + j))); | 
|  | 100 | +        region.delete(new Delete(row).addFamilyVersion(FAMILY, ts)); | 
|  | 101 | +      } | 
|  | 102 | + | 
|  | 103 | +      for (int j = 5; j < 10; j++) { | 
|  | 104 | +        byte[] row = Bytes.toBytes(j); | 
|  | 105 | +        region.put(new Put(row).addColumn(FAMILY, QUAL, ts + 1, Bytes.toBytes(i * 10 + j))); | 
|  | 106 | +      } | 
|  | 107 | + | 
|  | 108 | +      FlushResult result = region.flush(true); | 
|  | 109 | +      if ( | 
|  | 110 | +        result.getResult() == FlushResult.Result.CANNOT_FLUSH | 
|  | 111 | +          || result.getResult() == FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY | 
|  | 112 | +      ) { | 
|  | 113 | +        throw new IOException("Can not flush region, flush result: " + result); | 
|  | 114 | +      } | 
|  | 115 | +    } | 
|  | 116 | +  } | 
|  | 117 | + | 
|  | 118 | +  @Before | 
|  | 119 | +  public void setUp() throws IOException { | 
|  | 120 | +    Path rootDir = UTIL.getDataTestDir(name.getMethodName()); | 
|  | 121 | +    // generate some hfiles so we can have StoreFileReader which has bloomfilters | 
|  | 122 | +    region = HBaseTestingUtility.createRegionAndWAL(RI, rootDir, UTIL.getConfiguration(), TD); | 
|  | 123 | +    generateHFiles(); | 
|  | 124 | +    HStore store = region.getStore(FAMILY); | 
|  | 125 | +    for (HStoreFile storefile : store.getStorefiles()) { | 
|  | 126 | +      storefile.initReader(); | 
|  | 127 | +      StoreFileReader reader = storefile.getReader(); | 
|  | 128 | +      // make sure we load bloom filters correctly | 
|  | 129 | +      assertNotNull(reader.generalBloomFilter); | 
|  | 130 | +      assertNotNull(reader.deleteFamilyBloomFilter); | 
|  | 131 | +    } | 
|  | 132 | +  } | 
|  | 133 | + | 
|  | 134 | +  @After | 
|  | 135 | +  public void tearDown() throws IOException { | 
|  | 136 | +    if (region != null) { | 
|  | 137 | +      HBaseTestingUtility.closeRegionAndWAL(region); | 
|  | 138 | +    } | 
|  | 139 | +  } | 
|  | 140 | + | 
|  | 141 | +  private void setFaulty(BlockType type) { | 
|  | 142 | +    HStore store = region.getStore(FAMILY); | 
|  | 143 | +    for (HStoreFile storefile : store.getStorefiles()) { | 
|  | 144 | +      storefile.getReader().setBloomFilterFaulty(type); | 
|  | 145 | +    } | 
|  | 146 | +  } | 
|  | 147 | + | 
|  | 148 | +  private void testGet() throws IOException { | 
|  | 149 | +    for (int i = 0; i < 5; i++) { | 
|  | 150 | +      assertTrue(region.get(new Get(Bytes.toBytes(i))).isEmpty()); | 
|  | 151 | +    } | 
|  | 152 | +    for (int i = 5; i < 10; i++) { | 
|  | 153 | +      assertEquals(30 + i, | 
|  | 154 | +        Bytes.toInt(region.get(new Get(Bytes.toBytes(i))).getValue(FAMILY, QUAL))); | 
|  | 155 | +    } | 
|  | 156 | +  } | 
|  | 157 | + | 
|  | 158 | +  private void testStreamScan() throws IOException { | 
|  | 159 | +    try (RegionAsTable table = new RegionAsTable(region); | 
|  | 160 | +      ResultScanner scanner = table.getScanner(new Scan().setReadType(ReadType.STREAM))) { | 
|  | 161 | +      for (int i = 5; i < 10; i++) { | 
|  | 162 | +        Result result = scanner.next(); | 
|  | 163 | +        assertEquals(i, Bytes.toInt(result.getRow())); | 
|  | 164 | +        assertEquals(30 + i, Bytes.toInt(result.getValue(FAMILY, QUAL))); | 
|  | 165 | +      } | 
|  | 166 | +      assertNull(scanner.next()); | 
|  | 167 | +    } | 
|  | 168 | +  } | 
|  | 169 | + | 
|  | 170 | +  private void testRegion() throws IOException { | 
|  | 171 | +    // normal read | 
|  | 172 | +    testGet(); | 
|  | 173 | +    // scan with stream reader | 
|  | 174 | +    testStreamScan(); | 
|  | 175 | +    // major compact | 
|  | 176 | +    region.compact(true); | 
|  | 177 | +    // test read and scan again | 
|  | 178 | +    testGet(); | 
|  | 179 | +    testStreamScan(); | 
|  | 180 | +  } | 
|  | 181 | + | 
|  | 182 | +  @Test | 
|  | 183 | +  public void testNoGeneralBloomFilter() throws IOException { | 
|  | 184 | +    setFaulty(BlockType.GENERAL_BLOOM_META); | 
|  | 185 | +    testRegion(); | 
|  | 186 | +  } | 
|  | 187 | + | 
|  | 188 | +  @Test | 
|  | 189 | +  public void testNoDeleteFamilyBloomFilter() throws IOException { | 
|  | 190 | +    setFaulty(BlockType.DELETE_FAMILY_BLOOM_META); | 
|  | 191 | +    testRegion(); | 
|  | 192 | +  } | 
|  | 193 | + | 
|  | 194 | +  @Test | 
|  | 195 | +  public void testNoAnyBloomFilter() throws IOException { | 
|  | 196 | +    setFaulty(BlockType.GENERAL_BLOOM_META); | 
|  | 197 | +    setFaulty(BlockType.DELETE_FAMILY_BLOOM_META); | 
|  | 198 | +    testRegion(); | 
|  | 199 | +  } | 
|  | 200 | +} | 
0 commit comments