001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import static org.junit.Assert.*;
021
022import java.io.IOException;
023import java.util.Random;
024import org.apache.hadoop.conf.Configuration;
025import org.apache.hadoop.fs.FileSystem;
026import org.apache.hadoop.fs.Path;
027import org.apache.hadoop.hbase.CellComparatorImpl;
028import org.apache.hadoop.hbase.HBaseClassTestRule;
029import org.apache.hadoop.hbase.HBaseConfiguration;
030import org.apache.hadoop.hbase.HBaseTestingUtility;
031import org.apache.hadoop.hbase.HColumnDescriptor;
032import org.apache.hadoop.hbase.KeyValue;
033import org.apache.hadoop.hbase.fs.HFileSystem;
034import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
035import org.apache.hadoop.hbase.testclassification.IOTests;
036import org.apache.hadoop.hbase.testclassification.SmallTests;
037import org.apache.hadoop.hbase.util.Bytes;
038import org.junit.Before;
039import org.junit.ClassRule;
040import org.junit.Test;
041import org.junit.experimental.categories.Category;
042
043@Category({IOTests.class, SmallTests.class})
044public class TestPrefetch {
045
046  @ClassRule
047  public static final HBaseClassTestRule CLASS_RULE =
048      HBaseClassTestRule.forClass(TestPrefetch.class);
049
050  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
051
052  private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;
053  private static final int DATA_BLOCK_SIZE = 2048;
054  private static final int NUM_KV = 1000;
055  private static final Random RNG = new Random();
056
057  private Configuration conf;
058  private CacheConfig cacheConf;
059  private FileSystem fs;
060
061  @Before
062  public void setUp() throws IOException {
063    conf = TEST_UTIL.getConfiguration();
064    conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
065    fs = HFileSystem.get(conf);
066    CacheConfig.blockCacheDisabled = false;
067    CacheConfig.instantiateBlockCache(conf);
068    cacheConf = new CacheConfig(conf);
069  }
070
071  @Test
072  public void testPrefetchSetInHCDWorks() {
073    HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes("f"));
074    hcd.setPrefetchBlocksOnOpen(true);
075    Configuration c = HBaseConfiguration.create();
076    assertFalse(c.getBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, false));
077    CacheConfig cc = new CacheConfig(c, hcd);
078    assertTrue(cc.shouldPrefetchOnOpen());
079  }
080
081  @Test
082  public void testPrefetch() throws Exception {
083    Path storeFile = writeStoreFile("TestPrefetch");
084    readStoreFile(storeFile);
085  }
086
087  @Test
088  public void testPrefetchRace() throws Exception {
089    for (int i = 0; i < 10; i++) {
090      Path storeFile = writeStoreFile("TestPrefetchRace-" + i);
091      readStoreFileLikeScanner(storeFile);
092    }
093  }
094
095  /**
096   * Read a storefile in the same manner as a scanner -- using non-positional reads and
097   * without waiting for prefetch to complete.
098   */
099  private void readStoreFileLikeScanner(Path storeFilePath) throws Exception {
100    // Open the file
101    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);
102    do {
103      long offset = 0;
104      while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
105        HFileBlock block = reader.readBlock(offset, -1, false, /*pread=*/false,
106            false, true, null, null);
107        offset += block.getOnDiskSizeWithHeader();
108      }
109    } while (!reader.prefetchComplete());
110  }
111
112  private void readStoreFile(Path storeFilePath) throws Exception {
113    // Open the file
114    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);
115
116    while (!reader.prefetchComplete()) {
117      // Sleep for a bit
118      Thread.sleep(1000);
119    }
120
121    // Check that all of the data blocks were preloaded
122    BlockCache blockCache = cacheConf.getBlockCache();
123    long offset = 0;
124    while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
125      HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null);
126      BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);
127      boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null;
128      if (block.getBlockType() == BlockType.DATA ||
129          block.getBlockType() == BlockType.ROOT_INDEX ||
130          block.getBlockType() == BlockType.INTERMEDIATE_INDEX) {
131        assertTrue(isCached);
132      }
133      offset += block.getOnDiskSizeWithHeader();
134    }
135  }
136
137  private Path writeStoreFile(String fname) throws IOException {
138    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname);
139    HFileContext meta = new HFileContextBuilder()
140      .withBlockSize(DATA_BLOCK_SIZE)
141      .build();
142    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs)
143      .withOutputDir(storeFileParentDir)
144      .withComparator(CellComparatorImpl.COMPARATOR)
145      .withFileContext(meta)
146      .build();
147
148    final int rowLen = 32;
149    for (int i = 0; i < NUM_KV; ++i) {
150      byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
151      byte[] v = RandomKeyValueUtil.randomValue(RNG);
152      int cfLen = RNG.nextInt(k.length - rowLen + 1);
153      KeyValue kv = new KeyValue(
154          k, 0, rowLen,
155          k, rowLen, cfLen,
156          k, rowLen + cfLen, k.length - rowLen - cfLen,
157          RNG.nextLong(),
158          generateKeyType(RNG),
159          v, 0, v.length);
160      sfw.append(kv);
161    }
162
163    sfw.close();
164    return sfw.getPath();
165  }
166
167  public static KeyValue.Type generateKeyType(Random rand) {
168    if (rand.nextBoolean()) {
169      // Let's make half of KVs puts.
170      return KeyValue.Type.Put;
171    } else {
172      KeyValue.Type keyType =
173          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];
174      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)
175      {
176        throw new RuntimeException("Generated an invalid key type: " + keyType
177            + ". " + "Probably the layout of KeyValue.Type has changed.");
178      }
179      return keyType;
180    }
181  }
182
183}