001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertTrue; 022import static org.junit.Assert.fail; 023 024import java.io.DataOutputStream; 025import java.io.IOException; 026import java.nio.ByteBuffer; 027import java.util.ArrayList; 028import java.util.Collection; 029import java.util.List; 030import org.apache.hadoop.hbase.Cell; 031import org.apache.hadoop.hbase.HBaseClassTestRule; 032import org.apache.hadoop.hbase.HConstants; 033import org.apache.hadoop.hbase.KeyValue; 034import org.apache.hadoop.hbase.io.ByteArrayOutputStream; 035import org.apache.hadoop.hbase.io.HeapSize; 036import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; 037import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; 038import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; 039import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; 040import org.apache.hadoop.hbase.testclassification.IOTests; 041import org.apache.hadoop.hbase.testclassification.SmallTests; 042import org.apache.hadoop.hbase.util.ChecksumType; 043import org.apache.hadoop.hbase.util.RedundantKVGenerator; 044import org.junit.ClassRule; 045import org.junit.Test; 046import org.junit.experimental.categories.Category; 047import org.junit.runner.RunWith; 048import org.junit.runners.Parameterized; 049import org.junit.runners.Parameterized.Parameters; 050 051@RunWith(Parameterized.class) 052@Category({IOTests.class, SmallTests.class}) 053public class TestHFileDataBlockEncoder { 054 055 @ClassRule 056 public static final HBaseClassTestRule CLASS_RULE = 057 HBaseClassTestRule.forClass(TestHFileDataBlockEncoder.class); 058 059 private HFileDataBlockEncoder blockEncoder; 060 private RedundantKVGenerator generator = new RedundantKVGenerator(); 061 private boolean includesMemstoreTS; 062 063 /** 064 * Create test for given data block encoding configuration. 065 * @param blockEncoder What kind of encoding policy will be used. 066 */ 067 public TestHFileDataBlockEncoder(HFileDataBlockEncoder blockEncoder, 068 boolean includesMemstoreTS) { 069 this.blockEncoder = blockEncoder; 070 this.includesMemstoreTS = includesMemstoreTS; 071 System.err.println("Encoding: " + blockEncoder.getDataBlockEncoding() 072 + ", includesMemstoreTS: " + includesMemstoreTS); 073 } 074 075 /** 076 * Test putting and taking out blocks into cache with different 077 * encoding options. 078 */ 079 @Test 080 public void testEncodingWithCache() throws IOException { 081 testEncodingWithCacheInternals(false); 082 testEncodingWithCacheInternals(true); 083 } 084 085 private void testEncodingWithCacheInternals(boolean useTag) throws IOException { 086 List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag); 087 HFileBlock block = getSampleHFileBlock(kvs, useTag); 088 HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTag); 089 090 LruBlockCache blockCache = 091 new LruBlockCache(8 * 1024 * 1024, 32 * 1024); 092 BlockCacheKey cacheKey = new BlockCacheKey("test", 0); 093 blockCache.cacheBlock(cacheKey, cacheBlock); 094 095 HeapSize heapSize = blockCache.getBlock(cacheKey, false, false, true); 096 assertTrue(heapSize instanceof HFileBlock); 097 098 HFileBlock returnedBlock = (HFileBlock) heapSize;; 099 100 if (blockEncoder.getDataBlockEncoding() == 101 DataBlockEncoding.NONE) { 102 assertEquals(block.getBufferReadOnly(), returnedBlock.getBufferReadOnly()); 103 } else { 104 if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) { 105 System.out.println(blockEncoder); 106 } 107 assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType()); 108 } 109 } 110 111 /** Test for HBASE-5746. */ 112 @Test 113 public void testHeaderSizeInCacheWithoutChecksum() throws Exception { 114 testHeaderSizeInCacheWithoutChecksumInternals(false); 115 testHeaderSizeInCacheWithoutChecksumInternals(true); 116 } 117 118 private void testHeaderSizeInCacheWithoutChecksumInternals(boolean useTags) throws IOException { 119 int headerSize = HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM; 120 // Create some KVs and create the block with old-style header. 121 List<KeyValue> kvs = generator.generateTestKeyValues(60, useTags); 122 ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS); 123 int size = keyValues.limit(); 124 ByteBuffer buf = ByteBuffer.allocate(size + headerSize); 125 buf.position(headerSize); 126 keyValues.rewind(); 127 buf.put(keyValues); 128 HFileContext hfileContext = new HFileContextBuilder().withHBaseCheckSum(false) 129 .withIncludesMvcc(includesMemstoreTS) 130 .withIncludesTags(useTags) 131 .withBlockSize(0) 132 .withChecksumType(ChecksumType.NULL) 133 .build(); 134 HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf, 135 HFileBlock.FILL_HEADER, 0, 136 0, -1, hfileContext); 137 HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTags); 138 assertEquals(headerSize, cacheBlock.getDummyHeaderForVersion().length); 139 } 140 141 /** 142 * Test encoding. 143 * @throws IOException 144 */ 145 @Test 146 public void testEncoding() throws IOException { 147 testEncodingInternals(false); 148 testEncodingInternals(true); 149 } 150 151 /** 152 * Test encoding with offheap keyvalue. This test just verifies if the encoders 153 * work with DBB and does not use the getXXXArray() API 154 * @throws IOException 155 */ 156 @Test 157 public void testEncodingWithOffheapKeyValue() throws IOException { 158 // usually we have just block without headers, but don't complicate that 159 try { 160 List<Cell> kvs = generator.generateTestExtendedOffheapKeyValues(60, true); 161 HFileContext meta = new HFileContextBuilder().withIncludesMvcc(includesMemstoreTS) 162 .withIncludesTags(true).withHBaseCheckSum(true).withCompression(Algorithm.NONE) 163 .withBlockSize(0).withChecksumType(ChecksumType.NULL).build(); 164 writeBlock(kvs, meta, true); 165 } catch (IllegalArgumentException e) { 166 fail("No exception should have been thrown"); 167 } 168 } 169 170 private void testEncodingInternals(boolean useTag) throws IOException { 171 // usually we have just block without headers, but don't complicate that 172 List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag); 173 HFileBlock block = getSampleHFileBlock(kvs, useTag); 174 HFileBlock blockOnDisk = createBlockOnDisk(kvs, block, useTag); 175 176 if (blockEncoder.getDataBlockEncoding() != 177 DataBlockEncoding.NONE) { 178 assertEquals(BlockType.ENCODED_DATA, blockOnDisk.getBlockType()); 179 assertEquals(blockEncoder.getDataBlockEncoding().getId(), 180 blockOnDisk.getDataBlockEncodingId()); 181 } else { 182 assertEquals(BlockType.DATA, blockOnDisk.getBlockType()); 183 } 184 } 185 186 private HFileBlock getSampleHFileBlock(List<KeyValue> kvs, boolean useTag) { 187 ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS); 188 int size = keyValues.limit(); 189 ByteBuffer buf = ByteBuffer.allocate(size + HConstants.HFILEBLOCK_HEADER_SIZE); 190 buf.position(HConstants.HFILEBLOCK_HEADER_SIZE); 191 keyValues.rewind(); 192 buf.put(keyValues); 193 HFileContext meta = new HFileContextBuilder() 194 .withIncludesMvcc(includesMemstoreTS) 195 .withIncludesTags(useTag) 196 .withHBaseCheckSum(true) 197 .withCompression(Algorithm.NONE) 198 .withBlockSize(0) 199 .withChecksumType(ChecksumType.NULL) 200 .build(); 201 HFileBlock b = new HFileBlock(BlockType.DATA, size, size, -1, buf, 202 HFileBlock.FILL_HEADER, 0, 203 0, -1, meta); 204 return b; 205 } 206 207 private HFileBlock createBlockOnDisk(List<KeyValue> kvs, HFileBlock block, boolean useTags) 208 throws IOException { 209 int size; 210 HFileBlockEncodingContext context = new HFileBlockDefaultEncodingContext( 211 blockEncoder.getDataBlockEncoding(), HConstants.HFILEBLOCK_DUMMY_HEADER, 212 block.getHFileContext()); 213 214 ByteArrayOutputStream baos = new ByteArrayOutputStream(); 215 baos.write(block.getDummyHeaderForVersion()); 216 DataOutputStream dos = new DataOutputStream(baos); 217 blockEncoder.startBlockEncoding(context, dos); 218 for (KeyValue kv : kvs) { 219 blockEncoder.encode(kv, context, dos); 220 } 221 blockEncoder.endBlockEncoding(context, dos, baos.getBuffer(), BlockType.DATA); 222 byte[] encodedBytes = baos.toByteArray(); 223 size = encodedBytes.length - block.getDummyHeaderForVersion().length; 224 return new HFileBlock(context.getBlockType(), size, size, -1, ByteBuffer.wrap(encodedBytes), 225 HFileBlock.FILL_HEADER, 0, block.getOnDiskDataSizeWithHeader(), -1, 226 block.getHFileContext()); 227 } 228 229 private void writeBlock(List<Cell> kvs, HFileContext fileContext, boolean useTags) 230 throws IOException { 231 HFileBlockEncodingContext context = new HFileBlockDefaultEncodingContext( 232 blockEncoder.getDataBlockEncoding(), HConstants.HFILEBLOCK_DUMMY_HEADER, 233 fileContext); 234 235 ByteArrayOutputStream baos = new ByteArrayOutputStream(); 236 baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER); 237 DataOutputStream dos = new DataOutputStream(baos); 238 blockEncoder.startBlockEncoding(context, dos); 239 for (Cell kv : kvs) { 240 blockEncoder.encode(kv, context, dos); 241 } 242 } 243 244 /** 245 * @return All possible data block encoding configurations 246 */ 247 @Parameters 248 public static Collection<Object[]> getAllConfigurations() { 249 List<Object[]> configurations = new ArrayList<>(); 250 251 for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) { 252 for (boolean includesMemstoreTS : new boolean[] { false, true }) { 253 HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE) ? 254 NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(diskAlgo); 255 configurations.add(new Object[] { dbe, new Boolean(includesMemstoreTS) }); 256 } 257 } 258 259 return configurations; 260 } 261}