001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertNotNull; 022import static org.junit.Assert.assertNull; 023import static org.junit.Assert.assertTrue; 024 025import java.io.IOException; 026import org.apache.hadoop.fs.Path; 027import org.apache.hadoop.hbase.HBaseClassTestRule; 028import org.apache.hadoop.hbase.HBaseTestingUtil; 029import org.apache.hadoop.hbase.TableName; 030import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 031import org.apache.hadoop.hbase.client.Delete; 032import org.apache.hadoop.hbase.client.Get; 033import org.apache.hadoop.hbase.client.Put; 034import org.apache.hadoop.hbase.client.RegionInfo; 035import org.apache.hadoop.hbase.client.RegionInfoBuilder; 036import org.apache.hadoop.hbase.client.Result; 037import org.apache.hadoop.hbase.client.ResultScanner; 038import org.apache.hadoop.hbase.client.Scan; 039import org.apache.hadoop.hbase.client.Scan.ReadType; 040import org.apache.hadoop.hbase.client.TableDescriptor; 041import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 042import org.apache.hadoop.hbase.io.hfile.BlockType; 043import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult; 044import org.apache.hadoop.hbase.testclassification.MediumTests; 045import org.apache.hadoop.hbase.testclassification.RegionServerTests; 046import org.apache.hadoop.hbase.util.Bytes; 047import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; 048import org.junit.After; 049import org.junit.AfterClass; 050import org.junit.Before; 051import org.junit.ClassRule; 052import org.junit.Rule; 053import org.junit.Test; 054import org.junit.experimental.categories.Category; 055import org.junit.rules.TestName; 056 057/** 058 * A UT to make sure that everything is fine when we fail to load bloom filter. 059 * <p> 060 * See HBASE-27936 for more details. 061 */ 062@Category({ RegionServerTests.class, MediumTests.class }) 063public class TestBloomFilterFaulty { 064 065 @ClassRule 066 public static final HBaseClassTestRule CLASS_RULE = 067 HBaseClassTestRule.forClass(TestBloomFilterFaulty.class); 068 069 private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); 070 071 private static final byte[] FAMILY = Bytes.toBytes("family"); 072 073 private static final byte[] QUAL = Bytes.toBytes("qualifier"); 074 075 private static final TableDescriptor TD = 076 TableDescriptorBuilder.newBuilder(TableName.valueOf("test")) 077 .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY) 078 .setBloomFilterType(BloomType.ROWPREFIX_FIXED_LENGTH) 079 .setConfiguration("RowPrefixBloomFilter.prefix_length", "2").build()) 080 .build(); 081 082 private static final RegionInfo RI = RegionInfoBuilder.newBuilder(TD.getTableName()).build(); 083 084 @AfterClass 085 public static void tearDownAfterClass() { 086 UTIL.cleanupTestDir(); 087 } 088 089 private HRegion region; 090 091 @Rule 092 public final TestName name = new TestName(); 093 094 private void generateHFiles() throws IOException { 095 for (int i = 0; i < 4; i++) { 096 long ts = EnvironmentEdgeManager.currentTime(); 097 for (int j = 0; j < 5; j++) { 098 byte[] row = Bytes.toBytes(j); 099 region.put(new Put(row).addColumn(FAMILY, QUAL, ts, Bytes.toBytes(i * 10 + j))); 100 region.delete(new Delete(row).addFamilyVersion(FAMILY, ts)); 101 } 102 103 for (int j = 5; j < 10; j++) { 104 byte[] row = Bytes.toBytes(j); 105 region.put(new Put(row).addColumn(FAMILY, QUAL, ts + 1, Bytes.toBytes(i * 10 + j))); 106 } 107 108 FlushResult result = region.flush(true); 109 if ( 110 result.getResult() == FlushResult.Result.CANNOT_FLUSH 111 || result.getResult() == FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY 112 ) { 113 throw new IOException("Can not flush region, flush result: " + result); 114 } 115 } 116 } 117 118 @Before 119 public void setUp() throws IOException { 120 Path rootDir = UTIL.getDataTestDir(name.getMethodName()); 121 // generate some hfiles so we can have StoreFileReader which has bloomfilters 122 region = HBaseTestingUtil.createRegionAndWAL(RI, rootDir, UTIL.getConfiguration(), TD); 123 generateHFiles(); 124 HStore store = region.getStore(FAMILY); 125 for (HStoreFile storefile : store.getStorefiles()) { 126 storefile.initReader(); 127 StoreFileReader reader = storefile.getReader(); 128 // make sure we load bloom filters correctly 129 assertNotNull(reader.generalBloomFilter); 130 assertNotNull(reader.deleteFamilyBloomFilter); 131 } 132 } 133 134 @After 135 public void tearDown() throws IOException { 136 if (region != null) { 137 HBaseTestingUtil.closeRegionAndWAL(region); 138 } 139 } 140 141 private void setFaulty(BlockType type) { 142 HStore store = region.getStore(FAMILY); 143 for (HStoreFile storefile : store.getStorefiles()) { 144 storefile.getReader().setBloomFilterFaulty(type); 145 } 146 } 147 148 private void testGet() throws IOException { 149 for (int i = 0; i < 5; i++) { 150 assertTrue(region.get(new Get(Bytes.toBytes(i))).isEmpty()); 151 } 152 for (int i = 5; i < 10; i++) { 153 assertEquals(30 + i, 154 Bytes.toInt(region.get(new Get(Bytes.toBytes(i))).getValue(FAMILY, QUAL))); 155 } 156 } 157 158 private void testStreamScan() throws IOException { 159 try (RegionAsTable table = new RegionAsTable(region); 160 ResultScanner scanner = table.getScanner(new Scan().setReadType(ReadType.STREAM))) { 161 for (int i = 5; i < 10; i++) { 162 Result result = scanner.next(); 163 assertEquals(i, Bytes.toInt(result.getRow())); 164 assertEquals(30 + i, Bytes.toInt(result.getValue(FAMILY, QUAL))); 165 } 166 assertNull(scanner.next()); 167 } 168 } 169 170 private void testRegion() throws IOException { 171 // normal read 172 testGet(); 173 // scan with stream reader 174 testStreamScan(); 175 // major compact 176 region.compact(true); 177 // test read and scan again 178 testGet(); 179 testStreamScan(); 180 } 181 182 @Test 183 public void testNoGeneralBloomFilter() throws IOException { 184 setFaulty(BlockType.GENERAL_BLOOM_META); 185 testRegion(); 186 } 187 188 @Test 189 public void testNoDeleteFamilyBloomFilter() throws IOException { 190 setFaulty(BlockType.DELETE_FAMILY_BLOOM_META); 191 testRegion(); 192 } 193 194 @Test 195 public void testNoAnyBloomFilter() throws IOException { 196 setFaulty(BlockType.GENERAL_BLOOM_META); 197 setFaulty(BlockType.DELETE_FAMILY_BLOOM_META); 198 testRegion(); 199 } 200}