001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import java.util.ArrayList;
021import java.util.List;
022import org.apache.hadoop.conf.Configuration;
023import org.apache.hadoop.fs.FileSystem;
024import org.apache.hadoop.fs.Path;
025import org.apache.hadoop.hbase.CellBuilderType;
026import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
027import org.apache.hadoop.hbase.HBaseClassTestRule;
028import org.apache.hadoop.hbase.HBaseTestingUtil;
029import org.apache.hadoop.hbase.HConstants;
030import org.apache.hadoop.hbase.KeyValue;
031import org.apache.hadoop.hbase.testclassification.IOTests;
032import org.apache.hadoop.hbase.testclassification.SmallTests;
033import org.apache.hadoop.hbase.util.Bytes;
034import org.junit.ClassRule;
035import org.junit.Test;
036import org.junit.experimental.categories.Category;
037
038/**
039 * Test a case when an inline index chunk is converted to a root one. This reproduces the bug in
040 * HBASE-6871. We write a carefully selected number of relatively large keys so that we accumulate
041 * a leaf index chunk that only goes over the configured index chunk size after adding the last
042 * key/value. The bug is in that when we close the file, we convert that inline (leaf-level) chunk
043 * into a root chunk, but then look at the size of that root chunk, find that it is greater than
044 * the configured chunk size, and split it into a number of intermediate index blocks that should
045 * really be leaf-level blocks. If more keys were added, we would flush the leaf-level block, add
046 * another entry to the root-level block, and that would prevent us from upgrading the leaf-level
047 * chunk to the root chunk, thus not triggering the bug.
048 */
049@Category({IOTests.class, SmallTests.class})
050public class TestHFileInlineToRootChunkConversion {
051
052  @ClassRule
053  public static final HBaseClassTestRule CLASS_RULE =
054      HBaseClassTestRule.forClass(TestHFileInlineToRootChunkConversion.class);
055
056  private final HBaseTestingUtil testUtil = new HBaseTestingUtil();
057  private final Configuration conf = testUtil.getConfiguration();
058
059  @Test
060  public void testWriteHFile() throws Exception {
061    Path hfPath = new Path(testUtil.getDataTestDir(),
062        TestHFileInlineToRootChunkConversion.class.getSimpleName() + ".hfile");
063    int maxChunkSize = 1024;
064    FileSystem fs = FileSystem.get(conf);
065    CacheConfig cacheConf = new CacheConfig(conf);
066    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);
067    HFileContext context = new HFileContextBuilder().withBlockSize(16).build();
068    HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)
069            .withFileContext(context)
070            .withPath(fs, hfPath).create();
071    List<byte[]> keys = new ArrayList<>();
072    StringBuilder sb = new StringBuilder();
073
074    for (int i = 0; i < 4; ++i) {
075      sb.append("key" + String.format("%05d", i));
076      sb.append("_");
077      for (int j = 0; j < 100; ++j) {
078        sb.append('0' + j);
079      }
080      String keyStr = sb.toString();
081      sb.setLength(0);
082
083      byte[] k = Bytes.toBytes(keyStr);
084      keys.add(k);
085      byte[] v = Bytes.toBytes("value" + i);
086      hfw.append(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
087        .setRow(k)
088        .setFamily(HConstants.EMPTY_BYTE_ARRAY)
089        .setQualifier(HConstants.EMPTY_BYTE_ARRAY)
090        .setTimestamp(HConstants.LATEST_TIMESTAMP)
091        .setType(KeyValue.Type.Maximum.getCode())
092        .setValue(v).build());
093    }
094    hfw.close();
095
096    HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);
097    // Scanner doesn't do Cells yet.  Fix.
098    HFileScanner scanner = reader.getScanner(conf, true, true);
099    for (int i = 0; i < keys.size(); ++i) {
100      scanner.seekTo(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
101        .setRow(keys.get(i))
102        .setFamily(HConstants.EMPTY_BYTE_ARRAY)
103        .setQualifier(HConstants.EMPTY_BYTE_ARRAY)
104        .setTimestamp(HConstants.LATEST_TIMESTAMP)
105        .setType(KeyValue.Type.Maximum.getCode())
106        .setValue(HConstants.EMPTY_BYTE_ARRAY).build());
107    }
108    reader.close();
109  }
110}