001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import java.util.ArrayList; 021import java.util.List; 022import org.apache.hadoop.conf.Configuration; 023import org.apache.hadoop.fs.FileSystem; 024import org.apache.hadoop.fs.Path; 025import org.apache.hadoop.hbase.Cell; 026import org.apache.hadoop.hbase.CellBuilderType; 027import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; 028import org.apache.hadoop.hbase.HBaseClassTestRule; 029import org.apache.hadoop.hbase.HBaseTestingUtil; 030import org.apache.hadoop.hbase.HConstants; 031import org.apache.hadoop.hbase.KeyValue; 032import org.apache.hadoop.hbase.testclassification.IOTests; 033import org.apache.hadoop.hbase.testclassification.SmallTests; 034import org.apache.hadoop.hbase.util.Bytes; 035import org.junit.ClassRule; 036import org.junit.Test; 037import org.junit.experimental.categories.Category; 038 039/** 040 * Test a case when an inline index chunk is converted to a root one. This reproduces the bug in 041 * HBASE-6871. We write a carefully selected number of relatively large keys so that we accumulate a 042 * leaf index chunk that only goes over the configured index chunk size after adding the last 043 * key/value. The bug is in that when we close the file, we convert that inline (leaf-level) chunk 044 * into a root chunk, but then look at the size of that root chunk, find that it is greater than the 045 * configured chunk size, and split it into a number of intermediate index blocks that should really 046 * be leaf-level blocks. If more keys were added, we would flush the leaf-level block, add another 047 * entry to the root-level block, and that would prevent us from upgrading the leaf-level chunk to 048 * the root chunk, thus not triggering the bug. 049 */ 050@Category({ IOTests.class, SmallTests.class }) 051public class TestHFileInlineToRootChunkConversion { 052 053 @ClassRule 054 public static final HBaseClassTestRule CLASS_RULE = 055 HBaseClassTestRule.forClass(TestHFileInlineToRootChunkConversion.class); 056 057 private final HBaseTestingUtil testUtil = new HBaseTestingUtil(); 058 private final Configuration conf = testUtil.getConfiguration(); 059 060 @Test 061 public void testWriteHFile() throws Exception { 062 Path hfPath = new Path(testUtil.getDataTestDir(), 063 TestHFileInlineToRootChunkConversion.class.getSimpleName() + ".hfile"); 064 int maxChunkSize = 1024; 065 FileSystem fs = FileSystem.get(conf); 066 CacheConfig cacheConf = new CacheConfig(conf); 067 conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); 068 HFileContext context = new HFileContextBuilder().withBlockSize(16).build(); 069 HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf).withFileContext(context) 070 .withPath(fs, hfPath).create(); 071 List<byte[]> keys = new ArrayList<>(); 072 StringBuilder sb = new StringBuilder(); 073 074 for (int i = 0; i < 4; ++i) { 075 sb.append("key" + String.format("%05d", i)); 076 sb.append("_"); 077 for (int j = 0; j < 100; ++j) { 078 sb.append('0' + j); 079 } 080 String keyStr = sb.toString(); 081 sb.setLength(0); 082 083 byte[] k = Bytes.toBytes(keyStr); 084 keys.add(k); 085 byte[] v = Bytes.toBytes("value" + i); 086 hfw.append(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(k) 087 .setFamily(HConstants.EMPTY_BYTE_ARRAY).setQualifier(HConstants.EMPTY_BYTE_ARRAY) 088 .setTimestamp(HConstants.LATEST_TIMESTAMP).setType(KeyValue.Type.Maximum.getCode()) 089 .setValue(v).setType(Cell.Type.Put).build()); 090 } 091 hfw.close(); 092 093 HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf); 094 // Scanner doesn't do Cells yet. Fix. 095 HFileScanner scanner = reader.getScanner(conf, true, true); 096 for (int i = 0; i < keys.size(); ++i) { 097 scanner.seekTo(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) 098 .setRow(keys.get(i)).setFamily(HConstants.EMPTY_BYTE_ARRAY) 099 .setQualifier(HConstants.EMPTY_BYTE_ARRAY).setTimestamp(HConstants.LATEST_TIMESTAMP) 100 .setType(KeyValue.Type.Maximum.getCode()).setValue(HConstants.EMPTY_BYTE_ARRAY).build()); 101 } 102 reader.close(); 103 } 104}