001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import java.io.IOException; 021import java.util.ArrayList; 022import java.util.Arrays; 023import java.util.Collection; 024import java.util.List; 025import java.util.Random; 026import org.apache.hadoop.conf.Configuration; 027import org.apache.hadoop.fs.FSDataInputStream; 028import org.apache.hadoop.fs.FileSystem; 029import org.apache.hadoop.fs.Path; 030import org.apache.hadoop.hbase.ArrayBackedTag; 031import org.apache.hadoop.hbase.Cell; 032import org.apache.hadoop.hbase.CellComparator; 033import org.apache.hadoop.hbase.CellComparatorImpl; 034import org.apache.hadoop.hbase.HBaseClassTestRule; 035import org.apache.hadoop.hbase.HBaseTestingUtility; 036import org.apache.hadoop.hbase.HConstants; 037import org.apache.hadoop.hbase.KeyValue; 038import org.apache.hadoop.hbase.Tag; 039import org.apache.hadoop.hbase.io.ByteBuffAllocator; 040import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; 041import org.apache.hadoop.hbase.io.compress.Compression; 042import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; 043import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; 044import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; 045import org.apache.hadoop.hbase.nio.ByteBuff; 046import org.apache.hadoop.hbase.testclassification.IOTests; 047import org.apache.hadoop.hbase.testclassification.MediumTests; 048import org.apache.hadoop.hbase.util.Bytes; 049import org.apache.hadoop.hbase.util.Writables; 050import org.apache.hadoop.io.Text; 051import org.junit.Assert; 052import org.junit.Before; 053import org.junit.ClassRule; 054import org.junit.Test; 055import org.junit.experimental.categories.Category; 056import org.junit.runner.RunWith; 057import org.junit.runners.Parameterized; 058import org.slf4j.Logger; 059import org.slf4j.LoggerFactory; 060 061/** 062 * Testing writing a version 3 {@link HFile} for all encoded blocks 063 */ 064@RunWith(Parameterized.class) 065@Category({ IOTests.class, MediumTests.class }) 066public class TestHFileWriterV3WithDataEncoders { 067 068 @ClassRule 069 public static final HBaseClassTestRule CLASS_RULE = 070 HBaseClassTestRule.forClass(TestHFileWriterV3WithDataEncoders.class); 071 072 private static final Logger LOG = 073 LoggerFactory.getLogger(TestHFileWriterV3WithDataEncoders.class); 074 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); 075 private static final Random RNG = new Random(9713312); // Just a fixed seed. 076 077 private Configuration conf; 078 private FileSystem fs; 079 private boolean useTags; 080 private DataBlockEncoding dataBlockEncoding; 081 082 public TestHFileWriterV3WithDataEncoders(boolean useTags, DataBlockEncoding dataBlockEncoding) { 083 this.useTags = useTags; 084 this.dataBlockEncoding = dataBlockEncoding; 085 } 086 087 @Parameterized.Parameters 088 public static Collection<Object[]> parameters() { 089 DataBlockEncoding[] dataBlockEncodings = DataBlockEncoding.values(); 090 Object[][] params = new Object[dataBlockEncodings.length * 2 - 2][]; 091 int i = 0; 092 for (DataBlockEncoding dataBlockEncoding : dataBlockEncodings) { 093 if (dataBlockEncoding == DataBlockEncoding.NONE) { 094 continue; 095 } 096 params[i++] = new Object[] { false, dataBlockEncoding }; 097 params[i++] = new Object[] { true, dataBlockEncoding }; 098 } 099 return Arrays.asList(params); 100 } 101 102 @Before 103 public void setUp() throws IOException { 104 conf = TEST_UTIL.getConfiguration(); 105 fs = FileSystem.get(conf); 106 } 107 108 @Test 109 public void testHFileFormatV3() throws IOException { 110 testHFileFormatV3Internals(useTags); 111 } 112 113 private void testHFileFormatV3Internals(boolean useTags) throws IOException { 114 Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "testHFileFormatV3"); 115 final Compression.Algorithm compressAlgo = Compression.Algorithm.GZ; 116 final int entryCount = 10000; 117 writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false, useTags); 118 } 119 120 @Test 121 public void testMidKeyInHFile() throws IOException { 122 testMidKeyInHFileInternals(useTags); 123 } 124 125 private void testMidKeyInHFileInternals(boolean useTags) throws IOException { 126 Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "testMidKeyInHFile"); 127 Compression.Algorithm compressAlgo = Compression.Algorithm.NONE; 128 int entryCount = 50000; 129 writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, true, useTags); 130 } 131 132 private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm compressAlgo, 133 int entryCount, boolean findMidKey, boolean useTags) throws IOException { 134 135 HFileContext context = new HFileContextBuilder().withBlockSize(4096).withIncludesTags(useTags) 136 .withDataBlockEncoding(dataBlockEncoding).withCellComparator(CellComparatorImpl.COMPARATOR) 137 .withCompression(compressAlgo).build(); 138 CacheConfig cacheConfig = new CacheConfig(conf); 139 HFile.Writer writer = new HFile.WriterFactory(conf, cacheConfig).withPath(fs, hfilePath) 140 .withFileContext(context).create(); 141 142 List<KeyValue> keyValues = new ArrayList<>(entryCount); 143 writeKeyValues(entryCount, useTags, writer, RNG, keyValues); 144 145 FSDataInputStream fsdis = fs.open(hfilePath); 146 147 long fileSize = fs.getFileStatus(hfilePath).getLen(); 148 FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, fileSize); 149 150 Assert.assertEquals(3, trailer.getMajorVersion()); 151 Assert.assertEquals(entryCount, trailer.getEntryCount()); 152 HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo) 153 .withIncludesMvcc(true).withIncludesTags(useTags).withDataBlockEncoding(dataBlockEncoding) 154 .withHBaseCheckSum(true).build(); 155 ReaderContext readerContext = 156 new ReaderContextBuilder().withInputStreamWrapper(new FSDataInputStreamWrapper(fsdis)) 157 .withFilePath(hfilePath).withFileSystem(fs).withFileSize(fileSize).build(); 158 HFileBlock.FSReader blockReader = 159 new HFileBlock.FSReaderImpl(readerContext, meta, ByteBuffAllocator.HEAP, conf); 160 // Comparator class name is stored in the trailer in version 3. 161 CellComparator comparator = trailer.createComparator(); 162 HFileBlockIndex.BlockIndexReader dataBlockIndexReader = 163 new HFileBlockIndex.CellBasedKeyBlockIndexReader(comparator, trailer.getNumDataIndexLevels()); 164 HFileBlockIndex.BlockIndexReader metaBlockIndexReader = 165 new HFileBlockIndex.ByteArrayKeyBlockIndexReader(1); 166 167 HFileBlock.BlockIterator blockIter = blockReader.blockRange(trailer.getLoadOnOpenDataOffset(), 168 fileSize - trailer.getTrailerSize()); 169 // Data index. We also read statistics about the block index written after 170 // the root level. 171 dataBlockIndexReader.readMultiLevelIndexRoot( 172 blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount()); 173 174 FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fs, hfilePath); 175 readerContext = new ReaderContextBuilder().withFilePath(hfilePath).withFileSize(fileSize) 176 .withFileSystem(wrapper.getHfs()).withInputStreamWrapper(wrapper).build(); 177 HFileInfo hfile = new HFileInfo(readerContext, conf); 178 HFile.Reader reader = new HFilePreadReader(readerContext, hfile, cacheConfig, conf); 179 hfile.initMetaAndIndex(reader); 180 if (findMidKey) { 181 Cell midkey = dataBlockIndexReader.midkey(reader); 182 Assert.assertNotNull("Midkey should not be null", midkey); 183 } 184 185 // Meta index. 186 metaBlockIndexReader.readRootIndex( 187 blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX).getByteStream(), 188 trailer.getMetaIndexCount()); 189 // File info 190 HFileInfo fileInfo = new HFileInfo(); 191 fileInfo.read(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream()); 192 byte[] keyValueFormatVersion = fileInfo.get(HFileWriterImpl.KEY_VALUE_VERSION); 193 boolean includeMemstoreTS = 194 keyValueFormatVersion != null && Bytes.toInt(keyValueFormatVersion) > 0; 195 196 // Counters for the number of key/value pairs and the number of blocks 197 int entriesRead = 0; 198 int blocksRead = 0; 199 long memstoreTS = 0; 200 201 DataBlockEncoder encoder = dataBlockEncoding.getEncoder(); 202 long curBlockPos = scanBlocks(entryCount, context, keyValues, fsdis, trailer, meta, blockReader, 203 entriesRead, blocksRead, encoder); 204 205 // Meta blocks. We can scan until the load-on-open data offset (which is 206 // the root block index offset in version 2) because we are not testing 207 // intermediate-level index blocks here. 208 209 int metaCounter = 0; 210 while (fsdis.getPos() < trailer.getLoadOnOpenDataOffset()) { 211 LOG.info("Current offset: {}, scanning until {}", fsdis.getPos(), 212 trailer.getLoadOnOpenDataOffset()); 213 HFileBlock block = 214 blockReader.readBlockData(curBlockPos, -1, false, false, true).unpack(context, blockReader); 215 Assert.assertEquals(BlockType.META, block.getBlockType()); 216 Text t = new Text(); 217 ByteBuff buf = block.getBufferWithoutHeader(); 218 if (Writables.getWritable(buf.array(), buf.arrayOffset(), buf.limit(), t) == null) { 219 throw new IOException( 220 "Failed to deserialize block " + this + " into a " + t.getClass().getSimpleName()); 221 } 222 Text expectedText = (metaCounter == 0 ? new Text("Paris") 223 : metaCounter == 1 ? new Text("Moscow") 224 : new Text("Washington, D.C.")); 225 Assert.assertEquals(expectedText, t); 226 LOG.info("Read meta block data: " + t); 227 ++metaCounter; 228 curBlockPos += block.getOnDiskSizeWithHeader(); 229 } 230 231 fsdis.close(); 232 reader.close(); 233 } 234 235 private long scanBlocks(int entryCount, HFileContext context, List<KeyValue> keyValues, 236 FSDataInputStream fsdis, FixedFileTrailer trailer, HFileContext meta, 237 HFileBlock.FSReader blockReader, int entriesRead, int blocksRead, DataBlockEncoder encoder) 238 throws IOException { 239 // Scan blocks the way the reader would scan them 240 fsdis.seek(0); 241 long curBlockPos = 0; 242 while (curBlockPos <= trailer.getLastDataBlockOffset()) { 243 HFileBlockDecodingContext ctx = blockReader.getBlockDecodingContext(); 244 HFileBlock block = 245 blockReader.readBlockData(curBlockPos, -1, false, false, true).unpack(context, blockReader); 246 Assert.assertEquals(BlockType.ENCODED_DATA, block.getBlockType()); 247 ByteBuff origBlock = block.getBufferReadOnly(); 248 int pos = block.headerSize() + DataBlockEncoding.ID_SIZE; 249 origBlock.position(pos); 250 origBlock.limit(pos + block.getUncompressedSizeWithoutHeader() - DataBlockEncoding.ID_SIZE); 251 ByteBuff buf = origBlock.slice(); 252 DataBlockEncoder.EncodedSeeker seeker = 253 encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta)); 254 seeker.setCurrentBuffer(buf); 255 Cell res = seeker.getCell(); 256 KeyValue kv = keyValues.get(entriesRead); 257 Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv)); 258 ++entriesRead; 259 while (seeker.next()) { 260 res = seeker.getCell(); 261 kv = keyValues.get(entriesRead); 262 Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv)); 263 ++entriesRead; 264 } 265 ++blocksRead; 266 curBlockPos += block.getOnDiskSizeWithHeader(); 267 } 268 LOG.info("Finished reading: entries={}, blocksRead = {}", entriesRead, blocksRead); 269 Assert.assertEquals(entryCount, entriesRead); 270 return curBlockPos; 271 } 272 273 private void writeKeyValues(int entryCount, boolean useTags, HFile.Writer writer, Random rand, 274 List<KeyValue> keyValues) throws IOException { 275 276 for (int i = 0; i < entryCount; ++i) { 277 byte[] keyBytes = RandomKeyValueUtil.randomOrderedKey(rand, i); 278 279 // A random-length random value. 280 byte[] valueBytes = RandomKeyValueUtil.randomValue(rand); 281 KeyValue keyValue = null; 282 if (useTags) { 283 ArrayList<Tag> tags = new ArrayList<>(); 284 for (int j = 0; j < 1 + rand.nextInt(4); j++) { 285 byte[] tagBytes = new byte[16]; 286 rand.nextBytes(tagBytes); 287 tags.add(new ArrayBackedTag((byte) 1, tagBytes)); 288 } 289 keyValue = 290 new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP, valueBytes, tags); 291 } else { 292 keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP, valueBytes); 293 } 294 writer.append(keyValue); 295 keyValues.add(keyValue); 296 } 297 298 // Add in an arbitrary order. They will be sorted lexicographically by 299 // the key. 300 writer.appendMetaBlock("CAPITAL_OF_USA", new Text("Washington, D.C.")); 301 writer.appendMetaBlock("CAPITAL_OF_RUSSIA", new Text("Moscow")); 302 writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris")); 303 304 writer.close(); 305 } 306 307}