001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertTrue;
022import static org.junit.Assert.fail;
023
024import java.io.DataOutputStream;
025import java.io.IOException;
026import java.nio.ByteBuffer;
027import java.util.ArrayList;
028import java.util.Collection;
029import java.util.List;
030import org.apache.hadoop.conf.Configuration;
031import org.apache.hadoop.hbase.ExtendedCell;
032import org.apache.hadoop.hbase.HBaseClassTestRule;
033import org.apache.hadoop.hbase.HBaseConfiguration;
034import org.apache.hadoop.hbase.HConstants;
035import org.apache.hadoop.hbase.KeyValue;
036import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
037import org.apache.hadoop.hbase.io.ByteBuffAllocator;
038import org.apache.hadoop.hbase.io.HeapSize;
039import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
040import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
041import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
042import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
043import org.apache.hadoop.hbase.nio.ByteBuff;
044import org.apache.hadoop.hbase.testclassification.IOTests;
045import org.apache.hadoop.hbase.testclassification.MediumTests;
046import org.apache.hadoop.hbase.util.ChecksumType;
047import org.apache.hadoop.hbase.util.RedundantKVGenerator;
048import org.junit.ClassRule;
049import org.junit.Test;
050import org.junit.experimental.categories.Category;
051import org.junit.runner.RunWith;
052import org.junit.runners.Parameterized;
053import org.junit.runners.Parameterized.Parameters;
054
055@RunWith(Parameterized.class)
056@Category({ IOTests.class, MediumTests.class })
057public class TestHFileDataBlockEncoder {
058
059  @ClassRule
060  public static final HBaseClassTestRule CLASS_RULE =
061    HBaseClassTestRule.forClass(TestHFileDataBlockEncoder.class);
062
063  private final Configuration conf = HBaseConfiguration.create();
064  private final RedundantKVGenerator generator = new RedundantKVGenerator();
065  private HFileDataBlockEncoder blockEncoder;
066  private boolean includesMemstoreTS;
067
068  /**
069   * Create test for given data block encoding configuration.
070   * @param blockEncoder What kind of encoding policy will be used.
071   */
072  public TestHFileDataBlockEncoder(HFileDataBlockEncoder blockEncoder, boolean includesMemstoreTS) {
073    this.blockEncoder = blockEncoder;
074    this.includesMemstoreTS = includesMemstoreTS;
075    System.err.println("Encoding: " + blockEncoder.getDataBlockEncoding() + ", includesMemstoreTS: "
076      + includesMemstoreTS);
077  }
078
079  /**
080   * Test putting and taking out blocks into cache with different encoding options.
081   */
082  @Test
083  public void testEncodingWithCache() throws IOException {
084    testEncodingWithCacheInternals(false);
085    testEncodingWithCacheInternals(true);
086  }
087
088  private void testEncodingWithCacheInternals(boolean useTag) throws IOException {
089    List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag);
090    HFileBlock block = getSampleHFileBlock(kvs, useTag);
091    HFileBlock cacheBlock = createBlockOnDisk(conf, kvs, block, useTag);
092
093    LruBlockCache blockCache = new LruBlockCache(8 * 1024 * 1024, 32 * 1024);
094    BlockCacheKey cacheKey = new BlockCacheKey("test", 0);
095    blockCache.cacheBlock(cacheKey, cacheBlock);
096
097    HeapSize heapSize = blockCache.getBlock(cacheKey, false, false, true);
098    assertTrue(heapSize instanceof HFileBlock);
099
100    HFileBlock returnedBlock = (HFileBlock) heapSize;
101
102    if (blockEncoder.getDataBlockEncoding() == DataBlockEncoding.NONE) {
103      assertEquals(block.getBufferReadOnly(), returnedBlock.getBufferReadOnly());
104    } else {
105      if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) {
106        System.out.println(blockEncoder);
107      }
108      assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType());
109    }
110  }
111
112  /** Test for HBASE-5746. */
113  @Test
114  public void testHeaderSizeInCacheWithoutChecksum() throws Exception {
115    testHeaderSizeInCacheWithoutChecksumInternals(false);
116    testHeaderSizeInCacheWithoutChecksumInternals(true);
117  }
118
119  private void testHeaderSizeInCacheWithoutChecksumInternals(boolean useTags) throws IOException {
120    int headerSize = HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
121    // Create some KVs and create the block with old-style header.
122    List<KeyValue> kvs = generator.generateTestKeyValues(60, useTags);
123    ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS);
124    int size = keyValues.limit();
125    ByteBuffer buf = ByteBuffer.allocate(size + headerSize);
126    buf.position(headerSize);
127    keyValues.rewind();
128    buf.put(keyValues);
129    HFileContext hfileContext =
130      new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS)
131        .withIncludesTags(useTags).withBlockSize(0).withChecksumType(ChecksumType.NULL).build();
132    HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf),
133      HFileBlock.FILL_HEADER, 0, 0, -1, hfileContext, ByteBuffAllocator.HEAP);
134    HFileBlock cacheBlock = createBlockOnDisk(conf, kvs, block, useTags);
135    assertEquals(headerSize, cacheBlock.getDummyHeaderForVersion().length);
136  }
137
138  /**
139   * Test encoding.
140   */
141  @Test
142  public void testEncoding() throws IOException {
143    testEncodingInternals(false);
144    testEncodingInternals(true);
145  }
146
147  /**
148   * Test encoding with offheap keyvalue. This test just verifies if the encoders work with DBB and
149   * does not use the getXXXArray() API
150   */
151  @Test
152  public void testEncodingWithOffheapKeyValue() throws IOException {
153    // usually we have just block without headers, but don't complicate that
154    try {
155      List<ExtendedCell> kvs = generator.generateTestExtendedOffheapKeyValues(60, true);
156      HFileContext meta = new HFileContextBuilder().withIncludesMvcc(includesMemstoreTS)
157        .withIncludesTags(true).withHBaseCheckSum(true).withCompression(Algorithm.NONE)
158        .withBlockSize(0).withChecksumType(ChecksumType.NULL).build();
159      writeBlock(conf, kvs, meta, true);
160    } catch (IllegalArgumentException e) {
161      fail("No exception should have been thrown");
162    }
163  }
164
165  private void testEncodingInternals(boolean useTag) throws IOException {
166    // usually we have just block without headers, but don't complicate that
167    List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag);
168    HFileBlock block = getSampleHFileBlock(kvs, useTag);
169    HFileBlock blockOnDisk = createBlockOnDisk(conf, kvs, block, useTag);
170
171    if (blockEncoder.getDataBlockEncoding() != DataBlockEncoding.NONE) {
172      assertEquals(BlockType.ENCODED_DATA, blockOnDisk.getBlockType());
173      assertEquals(blockEncoder.getDataBlockEncoding().getId(),
174        blockOnDisk.getDataBlockEncodingId());
175    } else {
176      assertEquals(BlockType.DATA, blockOnDisk.getBlockType());
177    }
178  }
179
180  private HFileBlock getSampleHFileBlock(List<KeyValue> kvs, boolean useTag) {
181    ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS);
182    int size = keyValues.limit();
183    ByteBuffer buf = ByteBuffer.allocate(size + HConstants.HFILEBLOCK_HEADER_SIZE);
184    buf.position(HConstants.HFILEBLOCK_HEADER_SIZE);
185    keyValues.rewind();
186    buf.put(keyValues);
187    HFileContext meta = new HFileContextBuilder().withIncludesMvcc(includesMemstoreTS)
188      .withIncludesTags(useTag).withHBaseCheckSum(true).withCompression(Algorithm.NONE)
189      .withBlockSize(0).withChecksumType(ChecksumType.NULL).build();
190    HFileBlock b = new HFileBlock(BlockType.DATA, size, size, -1, ByteBuff.wrap(buf),
191      HFileBlock.FILL_HEADER, 0, 0, -1, meta, ByteBuffAllocator.HEAP);
192    return b;
193  }
194
195  private HFileBlock createBlockOnDisk(Configuration conf, List<KeyValue> kvs, HFileBlock block,
196    boolean useTags) throws IOException {
197    int size;
198    HFileBlockEncodingContext context =
199      new HFileBlockDefaultEncodingContext(conf, blockEncoder.getDataBlockEncoding(),
200        HConstants.HFILEBLOCK_DUMMY_HEADER, block.getHFileContext());
201
202    ByteArrayOutputStream baos = new ByteArrayOutputStream();
203    baos.write(block.getDummyHeaderForVersion());
204    DataOutputStream dos = new DataOutputStream(baos);
205    blockEncoder.startBlockEncoding(context, dos);
206    for (KeyValue kv : kvs) {
207      blockEncoder.encode(kv, context, dos);
208    }
209    blockEncoder.endBlockEncoding(context, dos, baos.getBuffer(), BlockType.DATA);
210    byte[] encodedBytes = baos.toByteArray();
211    size = encodedBytes.length - block.getDummyHeaderForVersion().length;
212    return new HFileBlock(context.getBlockType(), size, size, -1,
213      ByteBuff.wrap(ByteBuffer.wrap(encodedBytes)), HFileBlock.FILL_HEADER, 0,
214      block.getOnDiskDataSizeWithHeader(), -1, block.getHFileContext(), ByteBuffAllocator.HEAP);
215  }
216
217  private void writeBlock(Configuration conf, List<ExtendedCell> kvs, HFileContext fileContext,
218    boolean useTags) throws IOException {
219    HFileBlockEncodingContext context = new HFileBlockDefaultEncodingContext(conf,
220      blockEncoder.getDataBlockEncoding(), HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
221
222    ByteArrayOutputStream baos = new ByteArrayOutputStream();
223    baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
224    DataOutputStream dos = new DataOutputStream(baos);
225    blockEncoder.startBlockEncoding(context, dos);
226    for (ExtendedCell kv : kvs) {
227      blockEncoder.encode(kv, context, dos);
228    }
229  }
230
231  /** Returns All possible data block encoding configurations */
232  @Parameters
233  public static Collection<Object[]> getAllConfigurations() {
234    List<Object[]> configurations = new ArrayList<>();
235
236    for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) {
237      for (boolean includesMemstoreTS : new boolean[] { false, true }) {
238        HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE)
239          ? NoOpDataBlockEncoder.INSTANCE
240          : new HFileDataBlockEncoderImpl(diskAlgo);
241        configurations.add(new Object[] { dbe, new Boolean(includesMemstoreTS) });
242      }
243    }
244
245    return configurations;
246  }
247}