001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertTrue;
022
023import java.io.IOException;
024import java.util.List;
025import org.apache.hadoop.conf.Configuration;
026import org.apache.hadoop.fs.FileSystem;
027import org.apache.hadoop.fs.Path;
028import org.apache.hadoop.hbase.ArrayBackedTag;
029import org.apache.hadoop.hbase.Cell;
030import org.apache.hadoop.hbase.HBaseClassTestRule;
031import org.apache.hadoop.hbase.HBaseTestingUtil;
032import org.apache.hadoop.hbase.KeyValue;
033import org.apache.hadoop.hbase.KeyValueUtil;
034import org.apache.hadoop.hbase.PrivateCellUtil;
035import org.apache.hadoop.hbase.Tag;
036import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
037import org.apache.hadoop.hbase.io.hfile.CacheConfig;
038import org.apache.hadoop.hbase.io.hfile.HFileContext;
039import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
040import org.apache.hadoop.hbase.io.hfile.ReaderContext;
041import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder;
042import org.apache.hadoop.hbase.testclassification.RegionServerTests;
043import org.apache.hadoop.hbase.testclassification.SmallTests;
044import org.apache.hadoop.hbase.util.Bytes;
045import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
046import org.junit.BeforeClass;
047import org.junit.ClassRule;
048import org.junit.Test;
049import org.junit.experimental.categories.Category;
050
051@Category({ RegionServerTests.class, SmallTests.class })
052public class TestStoreFileScannerWithTagCompression {
053
054  @ClassRule
055  public static final HBaseClassTestRule CLASS_RULE =
056    HBaseClassTestRule.forClass(TestStoreFileScannerWithTagCompression.class);
057
058  private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
059  private static Configuration conf = TEST_UTIL.getConfiguration();
060  private static CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
061  private static Path ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFileScannerWithTagCompression");
062  private static FileSystem fs = null;
063
064  @BeforeClass
065  public static void setUp() throws IOException {
066    conf.setInt("hfile.format.version", 3);
067    fs = FileSystem.get(conf);
068  }
069
070  @Test
071  public void testReseek() throws Exception {
072    // write the file
073    if (!fs.exists(ROOT_DIR)) {
074      fs.mkdirs(ROOT_DIR);
075    }
076    Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR);
077    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true)
078      .withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build();
079    // Make a store file and write data to it.
080    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(f)
081      .withFileContext(meta).build();
082
083    writeStoreFile(writer);
084    writer.close();
085
086    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
087    StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true);
088    storeFileInfo.initHFileInfo(context);
089    StoreFileReader reader = storeFileInfo.createReader(context, cacheConf);
090    storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader());
091    StoreFileScanner s = reader.getStoreFileScanner(false, false, false, 0, 0, false);
092    try {
093      // Now do reseek with empty KV to position to the beginning of the file
094      KeyValue k = KeyValueUtil.createFirstOnRow(Bytes.toBytes("k2"));
095      s.reseek(k);
096      Cell kv = s.next();
097      kv = s.next();
098      kv = s.next();
099      byte[] key5 = Bytes.toBytes("k5");
100      assertTrue(
101        Bytes.equals(key5, 0, key5.length, kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()));
102      List<Tag> tags = PrivateCellUtil.getTags(kv);
103      assertEquals(1, tags.size());
104      assertEquals("tag3", Bytes.toString(Tag.cloneValue(tags.get(0))));
105    } finally {
106      s.close();
107    }
108  }
109
110  private void writeStoreFile(final StoreFileWriter writer) throws IOException {
111    byte[] fam = Bytes.toBytes("f");
112    byte[] qualifier = Bytes.toBytes("q");
113    long now = EnvironmentEdgeManager.currentTime();
114    byte[] b = Bytes.toBytes("k1");
115    Tag t1 = new ArrayBackedTag((byte) 1, "tag1");
116    Tag t2 = new ArrayBackedTag((byte) 2, "tag2");
117    Tag t3 = new ArrayBackedTag((byte) 3, "tag3");
118    try {
119      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t1 }));
120      b = Bytes.toBytes("k3");
121      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t2, t1 }));
122      b = Bytes.toBytes("k4");
123      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t3 }));
124      b = Bytes.toBytes("k5");
125      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t3 }));
126    } finally {
127      writer.close();
128    }
129  }
130}