001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import java.io.IOException; 021import java.util.List; 022import java.util.Optional; 023import org.apache.hadoop.hbase.DoNotRetryIOException; 024import org.apache.hadoop.hbase.HBaseClassTestRule; 025import org.apache.hadoop.hbase.HBaseTestingUtility; 026import org.apache.hadoop.hbase.HColumnDescriptor; 027import org.apache.hadoop.hbase.HTableDescriptor; 028import org.apache.hadoop.hbase.TableName; 029import org.apache.hadoop.hbase.client.Durability; 030import org.apache.hadoop.hbase.client.Put; 031import org.apache.hadoop.hbase.client.Result; 032import org.apache.hadoop.hbase.client.ResultScanner; 033import org.apache.hadoop.hbase.client.Scan; 034import org.apache.hadoop.hbase.client.Table; 035import org.apache.hadoop.hbase.coprocessor.ObserverContext; 036import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; 037import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; 038import org.apache.hadoop.hbase.coprocessor.RegionObserver; 039import org.apache.hadoop.hbase.io.hfile.CorruptHFileException; 040import org.apache.hadoop.hbase.testclassification.MediumTests; 041import org.apache.hadoop.hbase.util.Bytes; 042import org.junit.AfterClass; 043import org.junit.BeforeClass; 044import org.junit.ClassRule; 045import org.junit.Rule; 046import org.junit.Test; 047import org.junit.experimental.categories.Category; 048import org.junit.rules.TestName; 049 050/** 051 * Tests a scanner on a corrupt hfile. 052 */ 053@Category(MediumTests.class) 054public class TestScannerWithCorruptHFile { 055 056 @ClassRule 057 public static final HBaseClassTestRule CLASS_RULE = 058 HBaseClassTestRule.forClass(TestScannerWithCorruptHFile.class); 059 060 @Rule 061 public TestName name = new TestName(); 062 private static final byte[] FAMILY_NAME = Bytes.toBytes("f"); 063 private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); 064 065 @BeforeClass 066 public static void setup() throws Exception { 067 TEST_UTIL.startMiniCluster(1); 068 } 069 070 @AfterClass 071 public static void tearDown() throws Exception { 072 TEST_UTIL.shutdownMiniCluster(); 073 } 074 075 public static class CorruptHFileCoprocessor implements RegionCoprocessor, RegionObserver { 076 @Override 077 public Optional<RegionObserver> getRegionObserver() { 078 return Optional.of(this); 079 } 080 081 @Override 082 public boolean preScannerNext(ObserverContext<RegionCoprocessorEnvironment> e, 083 InternalScanner s, List<Result> results, int limit, boolean hasMore) throws IOException { 084 throw new CorruptHFileException("For test"); 085 } 086 } 087 088 @Test(expected = DoNotRetryIOException.class) 089 public void testScanOnCorruptHFile() throws IOException { 090 TableName tableName = TableName.valueOf(name.getMethodName()); 091 HTableDescriptor htd = new HTableDescriptor(tableName); 092 htd.addCoprocessor(CorruptHFileCoprocessor.class.getName()); 093 htd.addFamily(new HColumnDescriptor(FAMILY_NAME)); 094 Table table = TEST_UTIL.createTable(htd, null); 095 try { 096 loadTable(table, 1); 097 scan(table); 098 } finally { 099 table.close(); 100 } 101 } 102 103 private void loadTable(Table table, int numRows) throws IOException { 104 for (int i = 0; i < numRows; ++i) { 105 byte[] row = Bytes.toBytes(i); 106 Put put = new Put(row); 107 put.setDurability(Durability.SKIP_WAL); 108 put.addColumn(FAMILY_NAME, null, row); 109 table.put(put); 110 } 111 } 112 113 private void scan(Table table) throws IOException { 114 Scan scan = new Scan(); 115 scan.setCaching(1); 116 scan.setCacheBlocks(false); 117 ResultScanner scanner = table.getScanner(scan); 118 try { 119 scanner.next(); 120 } finally { 121 scanner.close(); 122 } 123 } 124}