001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY;
021import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY;
022import static org.apache.hadoop.hbase.io.ByteBuffAllocator.BUFFER_SIZE_KEY;
023import static org.apache.hadoop.hbase.io.ByteBuffAllocator.MAX_BUFFER_COUNT_KEY;
024import static org.apache.hadoop.hbase.io.ByteBuffAllocator.MIN_ALLOCATE_SIZE_KEY;
025import static org.apache.hadoop.hbase.io.hfile.BlockCacheFactory.BLOCKCACHE_POLICY_KEY;
026import static org.apache.hadoop.hbase.io.hfile.CacheConfig.EVICT_BLOCKS_ON_CLOSE_KEY;
027import static org.junit.Assert.assertEquals;
028import static org.junit.Assert.assertFalse;
029import static org.junit.Assert.assertNull;
030import static org.junit.Assert.assertTrue;
031import static org.junit.Assert.fail;
032
033import java.io.DataInput;
034import java.io.DataOutput;
035import java.io.IOException;
036import java.nio.ByteBuffer;
037import java.util.ArrayList;
038import java.util.Arrays;
039import java.util.List;
040import java.util.Objects;
041import java.util.Random;
042import java.util.concurrent.ThreadLocalRandom;
043import org.apache.hadoop.conf.Configuration;
044import org.apache.hadoop.fs.FSDataInputStream;
045import org.apache.hadoop.fs.FSDataOutputStream;
046import org.apache.hadoop.fs.FileStatus;
047import org.apache.hadoop.fs.FileSystem;
048import org.apache.hadoop.fs.Path;
049import org.apache.hadoop.hbase.ArrayBackedTag;
050import org.apache.hadoop.hbase.ByteBufferKeyValue;
051import org.apache.hadoop.hbase.Cell;
052import org.apache.hadoop.hbase.CellBuilder;
053import org.apache.hadoop.hbase.CellBuilderFactory;
054import org.apache.hadoop.hbase.CellBuilderType;
055import org.apache.hadoop.hbase.CellComparatorImpl;
056import org.apache.hadoop.hbase.CellUtil;
057import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
058import org.apache.hadoop.hbase.HBaseClassTestRule;
059import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
060import org.apache.hadoop.hbase.HBaseConfiguration;
061import org.apache.hadoop.hbase.HBaseTestingUtility;
062import org.apache.hadoop.hbase.HConstants;
063import org.apache.hadoop.hbase.KeyValue;
064import org.apache.hadoop.hbase.KeyValue.Type;
065import org.apache.hadoop.hbase.KeyValueUtil;
066import org.apache.hadoop.hbase.MetaCellComparator;
067import org.apache.hadoop.hbase.PrivateCellUtil;
068import org.apache.hadoop.hbase.Tag;
069import org.apache.hadoop.hbase.io.ByteBuffAllocator;
070import org.apache.hadoop.hbase.io.compress.Compression;
071import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
072import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
073import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
074import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
075import org.apache.hadoop.hbase.io.hfile.ReaderContext.ReaderType;
076import org.apache.hadoop.hbase.nio.ByteBuff;
077import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
078import org.apache.hadoop.hbase.testclassification.IOTests;
079import org.apache.hadoop.hbase.testclassification.SmallTests;
080import org.apache.hadoop.hbase.util.ByteBufferUtils;
081import org.apache.hadoop.hbase.util.Bytes;
082import org.apache.hadoop.io.Writable;
083import org.junit.Assert;
084import org.junit.BeforeClass;
085import org.junit.ClassRule;
086import org.junit.Rule;
087import org.junit.Test;
088import org.junit.experimental.categories.Category;
089import org.junit.rules.TestName;
090import org.mockito.Mockito;
091import org.slf4j.Logger;
092import org.slf4j.LoggerFactory;
093
094/**
095 * test hfile features.
096 */
097@Category({ IOTests.class, SmallTests.class })
098public class TestHFile {
099
100  @ClassRule
101  public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestHFile.class);
102
103  @Rule
104  public TestName testName = new TestName();
105
106  private static final Logger LOG = LoggerFactory.getLogger(TestHFile.class);
107  private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;
108  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
109  private static String ROOT_DIR = TEST_UTIL.getDataTestDir("TestHFile").toString();
110  private final int minBlockSize = 512;
111  private static String localFormatter = "%010d";
112  private static CacheConfig cacheConf;
113  private static Configuration conf;
114  private static FileSystem fs;
115
116  @BeforeClass
117  public static void setUp() throws Exception {
118    conf = TEST_UTIL.getConfiguration();
119    cacheConf = new CacheConfig(conf);
120    fs = TEST_UTIL.getTestFileSystem();
121  }
122
123  public static Reader createReaderFromStream(ReaderContext context, CacheConfig cacheConf,
124    Configuration conf) throws IOException {
125    HFileInfo fileInfo = new HFileInfo(context, conf);
126    Reader preadReader = HFile.createReader(context, fileInfo, cacheConf, conf);
127    fileInfo.initMetaAndIndex(preadReader);
128    preadReader.close();
129    context = new ReaderContextBuilder()
130      .withFileSystemAndPath(context.getFileSystem(), context.getFilePath())
131      .withReaderType(ReaderType.STREAM).build();
132    Reader streamReader = HFile.createReader(context, fileInfo, cacheConf, conf);
133    return streamReader;
134  }
135
136  private ByteBuffAllocator initAllocator(boolean reservoirEnabled, int bufSize, int bufCount,
137    int minAllocSize) {
138    Configuration that = HBaseConfiguration.create(conf);
139    that.setInt(BUFFER_SIZE_KEY, bufSize);
140    that.setInt(MAX_BUFFER_COUNT_KEY, bufCount);
141    // All ByteBuffers will be allocated from the buffers.
142    that.setInt(MIN_ALLOCATE_SIZE_KEY, minAllocSize);
143    return ByteBuffAllocator.create(that, reservoirEnabled);
144  }
145
146  private void fillByteBuffAllocator(ByteBuffAllocator alloc, int bufCount) {
147    // Fill the allocator with bufCount ByteBuffer
148    List<ByteBuff> buffs = new ArrayList<>();
149    for (int i = 0; i < bufCount; i++) {
150      buffs.add(alloc.allocateOneBuffer());
151      Assert.assertEquals(alloc.getFreeBufferCount(), 0);
152    }
153    buffs.forEach(ByteBuff::release);
154    Assert.assertEquals(alloc.getFreeBufferCount(), bufCount);
155  }
156
157  @Test
158  public void testReaderWithoutBlockCache() throws Exception {
159    int bufCount = 32;
160    // AllByteBuffers will be allocated from the buffers.
161    ByteBuffAllocator alloc = initAllocator(true, 64 * 1024, bufCount, 0);
162    fillByteBuffAllocator(alloc, bufCount);
163    // start write to store file.
164    Path path = writeStoreFile();
165    try {
166      readStoreFile(path, conf, alloc);
167    } catch (Exception e) {
168      // fail test
169      assertTrue(false);
170    }
171    Assert.assertEquals(bufCount, alloc.getFreeBufferCount());
172    alloc.clean();
173  }
174
175  /**
176   * Test case for HBASE-22127 in LruBlockCache.
177   */
178  @Test
179  public void testReaderWithLRUBlockCache() throws Exception {
180    int bufCount = 1024, blockSize = 64 * 1024;
181    ByteBuffAllocator alloc = initAllocator(true, bufCount, blockSize, 0);
182    fillByteBuffAllocator(alloc, bufCount);
183    Path storeFilePath = writeStoreFile();
184    // Open the file reader with LRUBlockCache
185    BlockCache lru = new LruBlockCache(1024 * 1024 * 32, blockSize, true, conf);
186    CacheConfig cacheConfig = new CacheConfig(conf, null, lru, alloc);
187    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConfig, true, conf);
188    long offset = 0;
189    while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
190      BlockCacheKey key = new BlockCacheKey(storeFilePath.getName(), offset);
191      HFileBlock block = reader.readBlock(offset, -1, true, true, false, true, null, null);
192      offset += block.getOnDiskSizeWithHeader();
193      // Ensure the block is an heap one.
194      Cacheable cachedBlock = lru.getBlock(key, false, false, true);
195      Assert.assertNotNull(cachedBlock);
196      Assert.assertTrue(cachedBlock instanceof HFileBlock);
197      Assert.assertFalse(((HFileBlock) cachedBlock).isSharedMem());
198      // Should never allocate off-heap block from allocator because ensure that it's LRU.
199      Assert.assertEquals(bufCount, alloc.getFreeBufferCount());
200      block.release(); // return back the ByteBuffer back to allocator.
201    }
202    reader.close();
203    Assert.assertEquals(bufCount, alloc.getFreeBufferCount());
204    alloc.clean();
205    lru.shutdown();
206  }
207
208  private BlockCache initCombinedBlockCache(final String l1CachePolicy) {
209    Configuration that = HBaseConfiguration.create(conf);
210    that.setFloat(BUCKET_CACHE_SIZE_KEY, 32); // 32MB for bucket cache.
211    that.set(BUCKET_CACHE_IOENGINE_KEY, "offheap");
212    that.set(BLOCKCACHE_POLICY_KEY, l1CachePolicy);
213    BlockCache bc = BlockCacheFactory.createBlockCache(that);
214    Assert.assertNotNull(bc);
215    Assert.assertTrue(bc instanceof CombinedBlockCache);
216    return bc;
217  }
218
219  /**
220   * Test case for HBASE-22127 in CombinedBlockCache
221   */
222  @Test
223  public void testReaderWithCombinedBlockCache() throws Exception {
224    int bufCount = 1024, blockSize = 64 * 1024;
225    ByteBuffAllocator alloc = initAllocator(true, bufCount, blockSize, 0);
226    fillByteBuffAllocator(alloc, bufCount);
227    Path storeFilePath = writeStoreFile();
228    // Open the file reader with CombinedBlockCache
229    BlockCache combined = initCombinedBlockCache("LRU");
230    conf.setBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, true);
231    CacheConfig cacheConfig = new CacheConfig(conf, null, combined, alloc);
232    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConfig, true, conf);
233    long offset = 0;
234    while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
235      BlockCacheKey key = new BlockCacheKey(storeFilePath.getName(), offset);
236      HFileBlock block = reader.readBlock(offset, -1, true, true, false, true, null, null);
237      offset += block.getOnDiskSizeWithHeader();
238      // Read the cached block.
239      Cacheable cachedBlock = combined.getBlock(key, false, false, true);
240      try {
241        Assert.assertNotNull(cachedBlock);
242        Assert.assertTrue(cachedBlock instanceof HFileBlock);
243        HFileBlock hfb = (HFileBlock) cachedBlock;
244        // Data block will be cached in BucketCache, so it should be an off-heap block.
245        if (hfb.getBlockType().isData()) {
246          Assert.assertTrue(hfb.isSharedMem());
247        } else {
248          // Non-data block will be cached in LRUBlockCache, so it must be an on-heap block.
249          Assert.assertFalse(hfb.isSharedMem());
250        }
251      } finally {
252        cachedBlock.release();
253      }
254      block.release(); // return back the ByteBuffer back to allocator.
255    }
256    reader.close();
257    combined.shutdown();
258    Assert.assertEquals(bufCount, alloc.getFreeBufferCount());
259    alloc.clean();
260  }
261
262  private void readStoreFile(Path storeFilePath, Configuration conf, ByteBuffAllocator alloc)
263    throws Exception {
264    // Open the file reader with block cache disabled.
265    CacheConfig cache = new CacheConfig(conf, null, null, alloc);
266    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cache, true, conf);
267    long offset = 0;
268    while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
269      HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null);
270      offset += block.getOnDiskSizeWithHeader();
271      block.release(); // return back the ByteBuffer back to allocator.
272    }
273    reader.close();
274  }
275
276  private Path writeStoreFile() throws IOException {
277    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "TestHFile");
278    HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).build();
279    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, fs).withOutputDir(storeFileParentDir)
280      .withFileContext(meta).build();
281    final int rowLen = 32;
282    Random rand = ThreadLocalRandom.current();
283    for (int i = 0; i < 1000; ++i) {
284      byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i);
285      byte[] v = RandomKeyValueUtil.randomValue(rand);
286      int cfLen = rand.nextInt(k.length - rowLen + 1);
287      KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen,
288        k.length - rowLen - cfLen, rand.nextLong(), generateKeyType(rand), v, 0, v.length);
289      sfw.append(kv);
290    }
291
292    sfw.close();
293    return sfw.getPath();
294  }
295
296  public static KeyValue.Type generateKeyType(Random rand) {
297    if (rand.nextBoolean()) {
298      // Let's make half of KVs puts.
299      return KeyValue.Type.Put;
300    } else {
301      KeyValue.Type keyType = KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];
302      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum) {
303        throw new RuntimeException("Generated an invalid key type: " + keyType + ". "
304          + "Probably the layout of KeyValue.Type has changed.");
305      }
306      return keyType;
307    }
308  }
309
310  /**
311   * Test empty HFile. Test all features work reasonably when hfile is empty of entries. n
312   */
313  @Test
314  public void testEmptyHFile() throws IOException {
315    Path f = new Path(ROOT_DIR, testName.getMethodName());
316    HFileContext context = new HFileContextBuilder().withIncludesTags(false).build();
317    Writer w =
318      HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).withFileContext(context).create();
319    w.close();
320    Reader r = HFile.createReader(fs, f, cacheConf, true, conf);
321    assertFalse(r.getFirstKey().isPresent());
322    assertFalse(r.getLastKey().isPresent());
323  }
324
325  /**
326   * Create 0-length hfile and show that it fails
327   */
328  @Test
329  public void testCorrupt0LengthHFile() throws IOException {
330    Path f = new Path(ROOT_DIR, testName.getMethodName());
331    FSDataOutputStream fsos = fs.create(f);
332    fsos.close();
333
334    try {
335      Reader r = HFile.createReader(fs, f, cacheConf, true, conf);
336    } catch (CorruptHFileException | IllegalArgumentException che) {
337      // Expected failure
338      return;
339    }
340    fail("Should have thrown exception");
341  }
342
343  @Test
344  public void testCorruptOutOfOrderHFileWrite() throws IOException {
345    Path path = new Path(ROOT_DIR, testName.getMethodName());
346    FSDataOutputStream mockedOutputStream = Mockito.mock(FSDataOutputStream.class);
347    String columnFamily = "MyColumnFamily";
348    String tableName = "MyTableName";
349    HFileContext fileContext =
350      new HFileContextBuilder().withHFileName(testName.getMethodName() + "HFile")
351        .withBlockSize(minBlockSize).withColumnFamily(Bytes.toBytes(columnFamily))
352        .withTableName(Bytes.toBytes(tableName)).withHBaseCheckSum(false)
353        .withCompression(Compression.Algorithm.NONE).withCompressTags(false).build();
354    HFileWriterImpl writer =
355      new HFileWriterImpl(conf, cacheConf, path, mockedOutputStream, fileContext);
356    CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
357    byte[] row = Bytes.toBytes("foo");
358    byte[] qualifier = Bytes.toBytes("qualifier");
359    byte[] cf = Bytes.toBytes(columnFamily);
360    byte[] val = Bytes.toBytes("fooVal");
361    long firstTS = 100L;
362    long secondTS = 101L;
363    Cell firstCell = cellBuilder.setRow(row).setValue(val).setTimestamp(firstTS)
364      .setQualifier(qualifier).setFamily(cf).setType(Cell.Type.Put).build();
365    Cell secondCell = cellBuilder.setRow(row).setValue(val).setTimestamp(secondTS)
366      .setQualifier(qualifier).setFamily(cf).setType(Cell.Type.Put).build();
367    // second Cell will sort "higher" than the first because later timestamps should come first
368    writer.append(firstCell);
369    try {
370      writer.append(secondCell);
371    } catch (IOException ie) {
372      String message = ie.getMessage();
373      Assert.assertTrue(message.contains("not lexically larger"));
374      Assert.assertTrue(message.contains(tableName));
375      Assert.assertTrue(message.contains(columnFamily));
376      return;
377    }
378    Assert.fail("Exception wasn't thrown even though Cells were appended in the wrong order!");
379  }
380
381  public static void truncateFile(FileSystem fs, Path src, Path dst) throws IOException {
382    FileStatus fst = fs.getFileStatus(src);
383    long len = fst.getLen();
384    len = len / 2;
385
386    // create a truncated hfile
387    FSDataOutputStream fdos = fs.create(dst);
388    byte[] buf = new byte[(int) len];
389    FSDataInputStream fdis = fs.open(src);
390    fdis.read(buf);
391    fdos.write(buf);
392    fdis.close();
393    fdos.close();
394  }
395
396  /**
397   * Create a truncated hfile and verify that exception thrown.
398   */
399  @Test
400  public void testCorruptTruncatedHFile() throws IOException {
401    Path f = new Path(ROOT_DIR, testName.getMethodName());
402    HFileContext context = new HFileContextBuilder().build();
403    Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(this.fs, f).withFileContext(context)
404      .create();
405    writeSomeRecords(w, 0, 100, false);
406    w.close();
407
408    Path trunc = new Path(f.getParent(), "trucated");
409    truncateFile(fs, w.getPath(), trunc);
410
411    try {
412      HFile.createReader(fs, trunc, cacheConf, true, conf);
413    } catch (CorruptHFileException | IllegalArgumentException che) {
414      // Expected failure
415      return;
416    }
417    fail("Should have thrown exception");
418  }
419
420  // write some records into the hfile
421  // write them twice
422  private int writeSomeRecords(Writer writer, int start, int n, boolean useTags)
423    throws IOException {
424    String value = "value";
425    KeyValue kv;
426    for (int i = start; i < (start + n); i++) {
427      String key = String.format(localFormatter, Integer.valueOf(i));
428      if (useTags) {
429        Tag t = new ArrayBackedTag((byte) 1, "myTag1");
430        Tag[] tags = new Tag[1];
431        tags[0] = t;
432        kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"),
433          HConstants.LATEST_TIMESTAMP, Bytes.toBytes(value + key), tags);
434        writer.append(kv);
435      } else {
436        kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"),
437          Bytes.toBytes(value + key));
438        writer.append(kv);
439      }
440    }
441    return (start + n);
442  }
443
444  private void readAllRecords(HFileScanner scanner) throws IOException {
445    readAndCheckbytes(scanner, 0, 100);
446  }
447
448  // read the records and check
449  private int readAndCheckbytes(HFileScanner scanner, int start, int n) throws IOException {
450    String value = "value";
451    int i = start;
452    for (; i < (start + n); i++) {
453      ByteBuffer key = ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey());
454      ByteBuffer val = scanner.getValue();
455      String keyStr = String.format(localFormatter, Integer.valueOf(i));
456      String valStr = value + keyStr;
457      KeyValue kv = new KeyValue(Bytes.toBytes(keyStr), Bytes.toBytes("family"),
458        Bytes.toBytes("qual"), Bytes.toBytes(valStr));
459      byte[] keyBytes =
460        new KeyValue.KeyOnlyKeyValue(Bytes.toBytes(key), 0, Bytes.toBytes(key).length).getKey();
461      assertTrue("bytes for keys do not match " + keyStr + " " + Bytes.toString(Bytes.toBytes(key)),
462        Arrays.equals(kv.getKey(), keyBytes));
463      byte[] valBytes = Bytes.toBytes(val);
464      assertTrue("bytes for vals do not match " + valStr + " " + Bytes.toString(valBytes),
465        Arrays.equals(Bytes.toBytes(valStr), valBytes));
466      if (!scanner.next()) {
467        break;
468      }
469    }
470    assertEquals(i, start + n - 1);
471    return (start + n);
472  }
473
474  private byte[] getSomeKey(int rowId) {
475    KeyValue kv = new KeyValue(String.format(localFormatter, Integer.valueOf(rowId)).getBytes(),
476      Bytes.toBytes("family"), Bytes.toBytes("qual"), HConstants.LATEST_TIMESTAMP, Type.Put);
477    return kv.getKey();
478  }
479
480  private void writeRecords(Writer writer, boolean useTags) throws IOException {
481    writeSomeRecords(writer, 0, 100, useTags);
482    writer.close();
483  }
484
485  private FSDataOutputStream createFSOutput(Path name) throws IOException {
486    // if (fs.exists(name)) fs.delete(name, true);
487    FSDataOutputStream fout = fs.create(name);
488    return fout;
489  }
490
491  /**
492   * test none codecs n
493   */
494  void basicWithSomeCodec(String codec, boolean useTags) throws IOException {
495    if (useTags) {
496      conf.setInt("hfile.format.version", 3);
497    }
498    Path ncHFile = new Path(ROOT_DIR, "basic.hfile." + codec.toString() + useTags);
499    FSDataOutputStream fout = createFSOutput(ncHFile);
500    HFileContext meta = new HFileContextBuilder().withBlockSize(minBlockSize)
501      .withCompression(HFileWriterImpl.compressionByName(codec)).build();
502    Writer writer =
503      HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).create();
504    LOG.info(Objects.toString(writer));
505    writeRecords(writer, useTags);
506    fout.close();
507    FSDataInputStream fin = fs.open(ncHFile);
508    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, ncHFile).build();
509    Reader reader = createReaderFromStream(context, cacheConf, conf);
510    System.out.println(cacheConf.toString());
511    // Load up the index.
512    // Get a scanner that caches and that does not use pread.
513    HFileScanner scanner = reader.getScanner(conf, true, false);
514    // Align scanner at start of the file.
515    scanner.seekTo();
516    readAllRecords(scanner);
517    int seekTo = scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50)));
518    System.out.println(seekTo);
519    assertTrue("location lookup failed",
520      scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))) == 0);
521    // read the key and see if it matches
522    ByteBuffer readKey = ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey());
523    assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), Bytes.toBytes(readKey)));
524
525    scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0)));
526    ByteBuffer val1 = scanner.getValue();
527    scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0)));
528    ByteBuffer val2 = scanner.getValue();
529    assertTrue(Arrays.equals(Bytes.toBytes(val1), Bytes.toBytes(val2)));
530
531    reader.close();
532    fin.close();
533    fs.delete(ncHFile, true);
534  }
535
536  @Test
537  public void testTFileFeatures() throws IOException {
538    testHFilefeaturesInternals(false);
539    testHFilefeaturesInternals(true);
540  }
541
542  protected void testHFilefeaturesInternals(boolean useTags) throws IOException {
543    basicWithSomeCodec("none", useTags);
544    basicWithSomeCodec("gz", useTags);
545  }
546
547  private void writeNumMetablocks(Writer writer, int n) {
548    for (int i = 0; i < n; i++) {
549      writer.appendMetaBlock("HFileMeta" + i, new Writable() {
550        private int val;
551
552        public Writable setVal(int val) {
553          this.val = val;
554          return this;
555        }
556
557        @Override
558        public void write(DataOutput out) throws IOException {
559          out.write(("something to test" + val).getBytes());
560        }
561
562        @Override
563        public void readFields(DataInput in) throws IOException {
564        }
565      }.setVal(i));
566    }
567  }
568
569  private void someTestingWithMetaBlock(Writer writer) {
570    writeNumMetablocks(writer, 10);
571  }
572
573  private void readNumMetablocks(Reader reader, int n) throws IOException {
574    for (int i = 0; i < n; i++) {
575      ByteBuff actual = reader.getMetaBlock("HFileMeta" + i, false).getBufferWithoutHeader();
576      ByteBuffer expected = ByteBuffer.wrap(("something to test" + i).getBytes());
577      assertEquals("failed to match metadata", Bytes.toStringBinary(expected), Bytes.toStringBinary(
578        actual.array(), actual.arrayOffset() + actual.position(), actual.capacity()));
579    }
580  }
581
582  private void someReadingWithMetaBlock(Reader reader) throws IOException {
583    readNumMetablocks(reader, 10);
584  }
585
586  private void metablocks(final String compress) throws Exception {
587    Path mFile = new Path(ROOT_DIR, "meta.hfile");
588    FSDataOutputStream fout = createFSOutput(mFile);
589    HFileContext meta =
590      new HFileContextBuilder().withCompression(HFileWriterImpl.compressionByName(compress))
591        .withBlockSize(minBlockSize).build();
592    Writer writer =
593      HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).create();
594    someTestingWithMetaBlock(writer);
595    writer.close();
596    fout.close();
597    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, mFile).build();
598    Reader reader = createReaderFromStream(context, cacheConf, conf);
599    // No data -- this should return false.
600    assertFalse(reader.getScanner(conf, false, false).seekTo());
601    someReadingWithMetaBlock(reader);
602    fs.delete(mFile, true);
603    reader.close();
604  }
605
606  // test meta blocks for hfiles
607  @Test
608  public void testMetaBlocks() throws Exception {
609    metablocks("none");
610    metablocks("gz");
611  }
612
613  @Test
614  public void testNullMetaBlocks() throws Exception {
615    for (Compression.Algorithm compressAlgo : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
616      Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
617      FSDataOutputStream fout = createFSOutput(mFile);
618      HFileContext meta =
619        new HFileContextBuilder().withCompression(compressAlgo).withBlockSize(minBlockSize).build();
620      Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout)
621        .withFileContext(meta).create();
622      KeyValue kv = new KeyValue("foo".getBytes(), "f1".getBytes(), null, "value".getBytes());
623      writer.append(kv);
624      writer.close();
625      fout.close();
626      Reader reader = HFile.createReader(fs, mFile, cacheConf, true, conf);
627      assertNull(reader.getMetaBlock("non-existant", false));
628    }
629  }
630
631  /**
632   * Make sure the ordinals for our compression algorithms do not change on us.
633   */
634  @Test
635  public void testCompressionOrdinance() {
636    assertTrue(Compression.Algorithm.LZO.ordinal() == 0);
637    assertTrue(Compression.Algorithm.GZ.ordinal() == 1);
638    assertTrue(Compression.Algorithm.NONE.ordinal() == 2);
639    assertTrue(Compression.Algorithm.SNAPPY.ordinal() == 3);
640    assertTrue(Compression.Algorithm.LZ4.ordinal() == 4);
641  }
642
643  @Test
644  public void testShortMidpointSameQual() {
645    Cell left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"), 11,
646      KeyValue.Type.Maximum.getCode(), HConstants.EMPTY_BYTE_ARRAY);
647    Cell right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"), 9,
648      KeyValue.Type.Maximum.getCode(), HConstants.EMPTY_BYTE_ARRAY);
649    Cell mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
650    assertTrue(
651      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) <= 0);
652    assertTrue(
653      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) == 0);
654  }
655
656  private Cell getCell(byte[] row, byte[] family, byte[] qualifier) {
657    return ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row)
658      .setFamily(family).setQualifier(qualifier).setTimestamp(HConstants.LATEST_TIMESTAMP)
659      .setType(KeyValue.Type.Maximum.getCode()).setValue(HConstants.EMPTY_BYTE_ARRAY).build();
660  }
661
662  @Test
663  public void testGetShortMidpoint() {
664    Cell left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
665    Cell right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
666    Cell mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
667    assertTrue(
668      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) <= 0);
669    assertTrue(
670      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) <= 0);
671
672    left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
673    right = CellUtil.createCell(Bytes.toBytes("b"), Bytes.toBytes("a"), Bytes.toBytes("a"));
674    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
675    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
676    assertTrue(
677      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) <= 0);
678
679    left = CellUtil.createCell(Bytes.toBytes("g"), Bytes.toBytes("a"), Bytes.toBytes("a"));
680    right = CellUtil.createCell(Bytes.toBytes("i"), Bytes.toBytes("a"), Bytes.toBytes("a"));
681    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
682    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
683    assertTrue(
684      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) <= 0);
685
686    left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
687    right = CellUtil.createCell(Bytes.toBytes("bbbbbbb"), Bytes.toBytes("a"), Bytes.toBytes("a"));
688    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
689    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
690    assertTrue(
691      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) < 0);
692    assertEquals(1, mid.getRowLength());
693
694    left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
695    right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("b"), Bytes.toBytes("a"));
696    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
697    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
698    assertTrue(
699      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) <= 0);
700
701    left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
702    right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("aaaaaaaa"), Bytes.toBytes("b"));
703    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
704    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
705    assertTrue(
706      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) < 0);
707    assertEquals(2, mid.getFamilyLength());
708
709    left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
710    right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("aaaaaaaaa"));
711    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
712    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
713    assertTrue(
714      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) < 0);
715    assertEquals(2, mid.getQualifierLength());
716
717    left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
718    right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("b"));
719    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
720    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
721    assertTrue(
722      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) <= 0);
723    assertEquals(1, mid.getQualifierLength());
724
725    // Verify boundary conditions
726    left = getCell(Bytes.toBytes("a"), Bytes.toBytes("a"), new byte[] { 0x00, (byte) 0xFE });
727    right = getCell(Bytes.toBytes("a"), Bytes.toBytes("a"), new byte[] { 0x00, (byte) 0xFF });
728    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
729    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
730    assertTrue(
731      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) == 0);
732    left = getCell(Bytes.toBytes("a"), Bytes.toBytes("a"), new byte[] { 0x00, 0x12 });
733    right = getCell(Bytes.toBytes("a"), Bytes.toBytes("a"), new byte[] { 0x00, 0x12, 0x00 });
734    mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right);
735    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
736    assertTrue(
737      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) == 0);
738
739    // Assert that if meta comparator, it returns the right cell -- i.e. no
740    // optimization done.
741    left = CellUtil.createCell(Bytes.toBytes("g"), Bytes.toBytes("a"), Bytes.toBytes("a"));
742    right = CellUtil.createCell(Bytes.toBytes("i"), Bytes.toBytes("a"), Bytes.toBytes("a"));
743    mid = HFileWriterImpl.getMidpoint(MetaCellComparator.META_COMPARATOR, left, right);
744    assertTrue(PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, left, mid) < 0);
745    assertTrue(
746      PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, mid, right) == 0);
747
748    /**
749     * See HBASE-7845
750     */
751    byte[] rowA = Bytes.toBytes("rowA");
752    byte[] rowB = Bytes.toBytes("rowB");
753
754    byte[] family = Bytes.toBytes("family");
755    byte[] qualA = Bytes.toBytes("qfA");
756    byte[] qualB = Bytes.toBytes("qfB");
757    final CellComparatorImpl keyComparator = CellComparatorImpl.COMPARATOR;
758    // verify that faked shorter rowkey could be generated
759    long ts = 5;
760    KeyValue kv1 = new KeyValue(Bytes.toBytes("the quick brown fox"), family, qualA, ts, Type.Put);
761    KeyValue kv2 = new KeyValue(Bytes.toBytes("the who test text"), family, qualA, ts, Type.Put);
762    Cell newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
763    assertTrue(keyComparator.compare(kv1, newKey) < 0);
764    assertTrue((keyComparator.compare(kv2, newKey)) > 0);
765    byte[] expectedArray = Bytes.toBytes("the r");
766    Bytes.equals(newKey.getRowArray(), newKey.getRowOffset(), newKey.getRowLength(), expectedArray,
767      0, expectedArray.length);
768
769    // verify: same with "row + family + qualifier", return rightKey directly
770    kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 5, Type.Put);
771    kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 0, Type.Put);
772    assertTrue(keyComparator.compare(kv1, kv2) < 0);
773    newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
774    assertTrue(keyComparator.compare(kv1, newKey) < 0);
775    assertTrue((keyComparator.compare(kv2, newKey)) == 0);
776    kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, -5, Type.Put);
777    kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, -10, Type.Put);
778    assertTrue(keyComparator.compare(kv1, kv2) < 0);
779    newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
780    assertTrue(keyComparator.compare(kv1, newKey) < 0);
781    assertTrue((keyComparator.compare(kv2, newKey)) == 0);
782
783    // verify: same with row, different with qualifier
784    kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 5, Type.Put);
785    kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualB, 5, Type.Put);
786    assertTrue(keyComparator.compare(kv1, kv2) < 0);
787    newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
788    assertTrue(keyComparator.compare(kv1, newKey) < 0);
789    assertTrue((keyComparator.compare(kv2, newKey)) > 0);
790    assertTrue(Arrays.equals(CellUtil.cloneFamily(newKey), family));
791    assertTrue(Arrays.equals(CellUtil.cloneQualifier(newKey), qualB));
792    assertTrue(newKey.getTimestamp() == HConstants.LATEST_TIMESTAMP);
793    assertTrue(newKey.getTypeByte() == Type.Maximum.getCode());
794
795    // verify metaKeyComparator's getShortMidpointKey output
796    final CellComparatorImpl metaKeyComparator = MetaCellComparator.META_COMPARATOR;
797    kv1 = new KeyValue(Bytes.toBytes("ilovehbase123"), family, qualA, 5, Type.Put);
798    kv2 = new KeyValue(Bytes.toBytes("ilovehbase234"), family, qualA, 0, Type.Put);
799    newKey = HFileWriterImpl.getMidpoint(metaKeyComparator, kv1, kv2);
800    assertTrue(metaKeyComparator.compare(kv1, newKey) < 0);
801    assertTrue((metaKeyComparator.compare(kv2, newKey) == 0));
802
803    // verify common fix scenario
804    kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, ts, Type.Put);
805    kv2 = new KeyValue(Bytes.toBytes("ilovehbaseandhdfs"), family, qualA, ts, Type.Put);
806    assertTrue(keyComparator.compare(kv1, kv2) < 0);
807    newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
808    assertTrue(keyComparator.compare(kv1, newKey) < 0);
809    assertTrue((keyComparator.compare(kv2, newKey)) > 0);
810    expectedArray = Bytes.toBytes("ilovehbasea");
811    Bytes.equals(newKey.getRowArray(), newKey.getRowOffset(), newKey.getRowLength(), expectedArray,
812      0, expectedArray.length);
813    // verify only 1 offset scenario
814    kv1 = new KeyValue(Bytes.toBytes("100abcdefg"), family, qualA, ts, Type.Put);
815    kv2 = new KeyValue(Bytes.toBytes("101abcdefg"), family, qualA, ts, Type.Put);
816    assertTrue(keyComparator.compare(kv1, kv2) < 0);
817    newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
818    assertTrue(keyComparator.compare(kv1, newKey) < 0);
819    assertTrue((keyComparator.compare(kv2, newKey)) > 0);
820    expectedArray = Bytes.toBytes("101");
821    Bytes.equals(newKey.getRowArray(), newKey.getRowOffset(), newKey.getRowLength(), expectedArray,
822      0, expectedArray.length);
823  }
824
825  @Test
826  public void testDBEShipped() throws IOException {
827    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
828      DataBlockEncoder encoder = encoding.getEncoder();
829      if (encoder == null) {
830        continue;
831      }
832      Path f = new Path(ROOT_DIR, testName.getMethodName() + "_" + encoding);
833      HFileContext context =
834        new HFileContextBuilder().withIncludesTags(false).withDataBlockEncoding(encoding).build();
835      HFileWriterImpl writer = (HFileWriterImpl) HFile.getWriterFactory(conf, cacheConf)
836        .withPath(fs, f).withFileContext(context).create();
837
838      KeyValue kv = new KeyValue(Bytes.toBytes("testkey1"), Bytes.toBytes("family"),
839        Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
840      KeyValue kv2 = new KeyValue(Bytes.toBytes("testkey2"), Bytes.toBytes("family"),
841        Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
842      KeyValue kv3 = new KeyValue(Bytes.toBytes("testkey3"), Bytes.toBytes("family"),
843        Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
844
845      ByteBuffer buffer = ByteBuffer.wrap(kv.getBuffer());
846      ByteBuffer buffer2 = ByteBuffer.wrap(kv2.getBuffer());
847      ByteBuffer buffer3 = ByteBuffer.wrap(kv3.getBuffer());
848
849      writer.append(new ByteBufferKeyValue(buffer, 0, buffer.remaining()));
850      writer.beforeShipped();
851
852      // pollute first cell's backing ByteBuffer
853      ByteBufferUtils.copyFromBufferToBuffer(buffer3, buffer);
854
855      // write another cell, if DBE not Shipped, test will fail
856      writer.append(new ByteBufferKeyValue(buffer2, 0, buffer2.remaining()));
857      writer.close();
858    }
859  }
860
861  /**
862   * Test case for CombinedBlockCache with TinyLfu as L1 cache
863   */
864  @Test
865  public void testReaderWithTinyLfuCombinedBlockCache() throws Exception {
866    testReaderCombinedCache("TinyLfu");
867  }
868
869  /**
870   * Test case for CombinedBlockCache with AdaptiveLRU as L1 cache
871   */
872  @Test
873  public void testReaderWithAdaptiveLruCombinedBlockCache() throws Exception {
874    testReaderCombinedCache("AdaptiveLRU");
875  }
876
877  /**
878   * Test case for CombinedBlockCache with AdaptiveLRU as L1 cache
879   */
880  @Test
881  public void testReaderWithLruCombinedBlockCache() throws Exception {
882    testReaderCombinedCache("LRU");
883  }
884
885  private void testReaderCombinedCache(final String l1CachePolicy) throws Exception {
886    int bufCount = 1024;
887    int blockSize = 64 * 1024;
888    ByteBuffAllocator alloc = initAllocator(true, bufCount, blockSize, 0);
889    fillByteBuffAllocator(alloc, bufCount);
890    Path storeFilePath = writeStoreFile();
891    // Open the file reader with CombinedBlockCache
892    BlockCache combined = initCombinedBlockCache(l1CachePolicy);
893    conf.setBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, true);
894    CacheConfig cacheConfig = new CacheConfig(conf, null, combined, alloc);
895    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConfig, true, conf);
896    long offset = 0;
897    Cacheable cachedBlock = null;
898    while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
899      BlockCacheKey key = new BlockCacheKey(storeFilePath.getName(), offset);
900      HFileBlock block = reader.readBlock(offset, -1, true, true, false, true, null, null);
901      offset += block.getOnDiskSizeWithHeader();
902      // Read the cached block.
903      cachedBlock = combined.getBlock(key, false, false, true);
904      try {
905        Assert.assertNotNull(cachedBlock);
906        Assert.assertTrue(cachedBlock instanceof HFileBlock);
907        HFileBlock hfb = (HFileBlock) cachedBlock;
908        // Data block will be cached in BucketCache, so it should be an off-heap block.
909        if (hfb.getBlockType().isData()) {
910          Assert.assertTrue(hfb.isSharedMem());
911        } else if (!l1CachePolicy.equals("TinyLfu")) {
912          Assert.assertFalse(hfb.isSharedMem());
913        }
914      } finally {
915        cachedBlock.release();
916      }
917      block.release(); // return back the ByteBuffer back to allocator.
918    }
919    reader.close();
920    combined.shutdown();
921    if (cachedBlock != null) {
922      Assert.assertEquals(0, cachedBlock.refCnt());
923    }
924    Assert.assertEquals(bufCount, alloc.getFreeBufferCount());
925    alloc.clean();
926  }
927
928}