001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import static org.junit.Assert.assertFalse;
021import static org.junit.Assert.assertTrue;
022
023import java.io.IOException;
024import java.util.Random;
025
026import org.apache.hadoop.conf.Configuration;
027import org.apache.hadoop.fs.FileSystem;
028import org.apache.hadoop.fs.Path;
029import org.apache.hadoop.hbase.HBaseClassTestRule;
030import org.apache.hadoop.hbase.HBaseConfiguration;
031import org.apache.hadoop.hbase.HBaseTestingUtility;
032import org.apache.hadoop.hbase.KeyValue;
033import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
034import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
035import org.apache.hadoop.hbase.fs.HFileSystem;
036import org.apache.hadoop.hbase.io.ByteBuffAllocator;
037import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
038import org.apache.hadoop.hbase.testclassification.IOTests;
039import org.apache.hadoop.hbase.testclassification.MediumTests;
040import org.apache.hadoop.hbase.util.Bytes;
041import org.junit.Before;
042import org.junit.ClassRule;
043import org.junit.Test;
044import org.junit.experimental.categories.Category;
045
046@Category({IOTests.class, MediumTests.class})
047public class TestPrefetch {
048
049  @ClassRule
050  public static final HBaseClassTestRule CLASS_RULE =
051      HBaseClassTestRule.forClass(TestPrefetch.class);
052
053  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
054
055  private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;
056  private static final int DATA_BLOCK_SIZE = 2048;
057  private static final int NUM_KV = 1000;
058  private static final Random RNG = new Random();
059
060  private Configuration conf;
061  private CacheConfig cacheConf;
062  private FileSystem fs;
063  private BlockCache blockCache;
064
065  @Before
066  public void setUp() throws IOException {
067    conf = TEST_UTIL.getConfiguration();
068    conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
069    fs = HFileSystem.get(conf);
070    blockCache = BlockCacheFactory.createBlockCache(conf);
071    cacheConf = new CacheConfig(conf, blockCache);
072  }
073
074  @Test
075  public void testPrefetchSetInHCDWorks() {
076    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder
077        .newBuilder(Bytes.toBytes("f")).setPrefetchBlocksOnOpen(true).build();
078    Configuration c = HBaseConfiguration.create();
079    assertFalse(c.getBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, false));
080    CacheConfig cc =
081        new CacheConfig(c, columnFamilyDescriptor, blockCache, ByteBuffAllocator.HEAP);
082    assertTrue(cc.shouldPrefetchOnOpen());
083  }
084
085  @Test
086  public void testPrefetch() throws Exception {
087    Path storeFile = writeStoreFile("TestPrefetch");
088    readStoreFile(storeFile);
089  }
090
091  @Test
092  public void testPrefetchRace() throws Exception {
093    for (int i = 0; i < 10; i++) {
094      Path storeFile = writeStoreFile("TestPrefetchRace-" + i);
095      readStoreFileLikeScanner(storeFile);
096    }
097  }
098
099  /**
100   * Read a storefile in the same manner as a scanner -- using non-positional reads and
101   * without waiting for prefetch to complete.
102   */
103  private void readStoreFileLikeScanner(Path storeFilePath) throws Exception {
104    // Open the file
105    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);
106    do {
107      long offset = 0;
108      while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
109        HFileBlock block = reader.readBlock(offset, -1, false, /*pread=*/false,
110            false, true, null, null);
111        offset += block.getOnDiskSizeWithHeader();
112      }
113    } while (!reader.prefetchComplete());
114  }
115
116  private void readStoreFile(Path storeFilePath) throws Exception {
117    // Open the file
118    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);
119
120    while (!reader.prefetchComplete()) {
121      // Sleep for a bit
122      Thread.sleep(1000);
123    }
124
125    // Check that all of the data blocks were preloaded
126    BlockCache blockCache = cacheConf.getBlockCache().get();
127    long offset = 0;
128    while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
129      HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null);
130      BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);
131      boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null;
132      if (block.getBlockType() == BlockType.DATA || block.getBlockType() == BlockType.ROOT_INDEX
133          || block.getBlockType() == BlockType.INTERMEDIATE_INDEX) {
134        assertTrue(isCached);
135      }
136      offset += block.getOnDiskSizeWithHeader();
137    }
138  }
139
140  private Path writeStoreFile(String fname) throws IOException {
141    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname);
142    HFileContext meta = new HFileContextBuilder()
143      .withBlockSize(DATA_BLOCK_SIZE)
144      .build();
145    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs)
146      .withOutputDir(storeFileParentDir)
147      .withFileContext(meta)
148      .build();
149
150    final int rowLen = 32;
151    for (int i = 0; i < NUM_KV; ++i) {
152      byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
153      byte[] v = RandomKeyValueUtil.randomValue(RNG);
154      int cfLen = RNG.nextInt(k.length - rowLen + 1);
155      KeyValue kv = new KeyValue(
156          k, 0, rowLen,
157          k, rowLen, cfLen,
158          k, rowLen + cfLen, k.length - rowLen - cfLen,
159          RNG.nextLong(),
160          generateKeyType(RNG),
161          v, 0, v.length);
162      sfw.append(kv);
163    }
164
165    sfw.close();
166    return sfw.getPath();
167  }
168
169  public static KeyValue.Type generateKeyType(Random rand) {
170    if (rand.nextBoolean()) {
171      // Let's make half of KVs puts.
172      return KeyValue.Type.Put;
173    } else {
174      KeyValue.Type keyType =
175          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];
176      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)
177      {
178        throw new RuntimeException("Generated an invalid key type: " + keyType
179            + ". " + "Probably the layout of KeyValue.Type has changed.");
180      }
181      return keyType;
182    }
183  }
184
185}