001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import java.util.ArrayList; 021import java.util.List; 022import org.apache.hadoop.conf.Configuration; 023import org.apache.hadoop.fs.FileSystem; 024import org.apache.hadoop.fs.Path; 025import org.apache.hadoop.hbase.Cell; 026import org.apache.hadoop.hbase.CellBuilderType; 027import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; 028import org.apache.hadoop.hbase.HBaseTestingUtil; 029import org.apache.hadoop.hbase.HConstants; 030import org.apache.hadoop.hbase.KeyValue; 031import org.apache.hadoop.hbase.testclassification.IOTests; 032import org.apache.hadoop.hbase.testclassification.SmallTests; 033import org.apache.hadoop.hbase.util.Bytes; 034import org.junit.jupiter.api.Tag; 035import org.junit.jupiter.api.Test; 036 037/** 038 * Test a case when an inline index chunk is converted to a root one. This reproduces the bug in 039 * HBASE-6871. We write a carefully selected number of relatively large keys so that we accumulate a 040 * leaf index chunk that only goes over the configured index chunk size after adding the last 041 * key/value. The bug is in that when we close the file, we convert that inline (leaf-level) chunk 042 * into a root chunk, but then look at the size of that root chunk, find that it is greater than the 043 * configured chunk size, and split it into a number of intermediate index blocks that should really 044 * be leaf-level blocks. If more keys were added, we would flush the leaf-level block, add another 045 * entry to the root-level block, and that would prevent us from upgrading the leaf-level chunk to 046 * the root chunk, thus not triggering the bug. 047 */ 048@Tag(IOTests.TAG) 049@Tag(SmallTests.TAG) 050public class TestHFileInlineToRootChunkConversion { 051 052 private final HBaseTestingUtil testUtil = new HBaseTestingUtil(); 053 private final Configuration conf = testUtil.getConfiguration(); 054 055 @Test 056 public void testWriteHFile() throws Exception { 057 Path hfPath = new Path(testUtil.getDataTestDir(), 058 TestHFileInlineToRootChunkConversion.class.getSimpleName() + ".hfile"); 059 int maxChunkSize = 1024; 060 FileSystem fs = FileSystem.get(conf); 061 CacheConfig cacheConf = new CacheConfig(conf); 062 conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); 063 HFileContext context = new HFileContextBuilder().withBlockSize(16).build(); 064 HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf).withFileContext(context) 065 .withPath(fs, hfPath).create(); 066 List<byte[]> keys = new ArrayList<>(); 067 StringBuilder sb = new StringBuilder(); 068 069 for (int i = 0; i < 4; ++i) { 070 sb.append("key" + String.format("%05d", i)); 071 sb.append("_"); 072 for (int j = 0; j < 100; ++j) { 073 sb.append('0' + j); 074 } 075 String keyStr = sb.toString(); 076 sb.setLength(0); 077 078 byte[] k = Bytes.toBytes(keyStr); 079 keys.add(k); 080 byte[] v = Bytes.toBytes("value" + i); 081 hfw.append(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(k) 082 .setFamily(HConstants.EMPTY_BYTE_ARRAY).setQualifier(HConstants.EMPTY_BYTE_ARRAY) 083 .setTimestamp(HConstants.LATEST_TIMESTAMP).setType(KeyValue.Type.Maximum.getCode()) 084 .setValue(v).setType(Cell.Type.Put).build()); 085 } 086 hfw.close(); 087 088 HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf); 089 // Scanner doesn't do Cells yet. Fix. 090 HFileScanner scanner = reader.getScanner(conf, true, true); 091 for (int i = 0; i < keys.size(); ++i) { 092 scanner.seekTo(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) 093 .setRow(keys.get(i)).setFamily(HConstants.EMPTY_BYTE_ARRAY) 094 .setQualifier(HConstants.EMPTY_BYTE_ARRAY).setTimestamp(HConstants.LATEST_TIMESTAMP) 095 .setType(KeyValue.Type.Maximum.getCode()).setValue(HConstants.EMPTY_BYTE_ARRAY).build()); 096 } 097 reader.close(); 098 } 099}