001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertTrue;
022
023import java.io.IOException;
024import java.util.List;
025import java.util.concurrent.atomic.AtomicInteger;
026import org.apache.hadoop.conf.Configuration;
027import org.apache.hadoop.fs.FileSystem;
028import org.apache.hadoop.fs.Path;
029import org.apache.hadoop.hbase.ArrayBackedTag;
030import org.apache.hadoop.hbase.Cell;
031import org.apache.hadoop.hbase.HBaseClassTestRule;
032import org.apache.hadoop.hbase.HBaseTestingUtility;
033import org.apache.hadoop.hbase.KeyValue;
034import org.apache.hadoop.hbase.KeyValueUtil;
035import org.apache.hadoop.hbase.PrivateCellUtil;
036import org.apache.hadoop.hbase.Tag;
037import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
038import org.apache.hadoop.hbase.io.hfile.CacheConfig;
039import org.apache.hadoop.hbase.io.hfile.HFileContext;
040import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
041import org.apache.hadoop.hbase.io.hfile.HFileInfo;
042import org.apache.hadoop.hbase.io.hfile.ReaderContext;
043import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder;
044import org.apache.hadoop.hbase.testclassification.RegionServerTests;
045import org.apache.hadoop.hbase.testclassification.SmallTests;
046import org.apache.hadoop.hbase.util.Bytes;
047import org.junit.BeforeClass;
048import org.junit.ClassRule;
049import org.junit.Test;
050import org.junit.experimental.categories.Category;
051
052@Category({RegionServerTests.class, SmallTests.class})
053public class TestStoreFileScannerWithTagCompression {
054
055  @ClassRule
056  public static final HBaseClassTestRule CLASS_RULE =
057      HBaseClassTestRule.forClass(TestStoreFileScannerWithTagCompression.class);
058
059  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
060  private static Configuration conf = TEST_UTIL.getConfiguration();
061  private static CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
062  private static String ROOT_DIR = TEST_UTIL.getDataTestDir(
063      "TestStoreFileScannerWithTagCompression").toString();
064  private static FileSystem fs = null;
065
066  @BeforeClass
067  public static void setUp() throws IOException {
068    conf.setInt("hfile.format.version", 3);
069    fs = FileSystem.get(conf);
070  }
071
072  @Test
073  public void testReseek() throws Exception {
074    // write the file
075    Path f = new Path(ROOT_DIR, "testReseek");
076    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true)
077        .withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build();
078    // Make a store file and write data to it.
079    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(f)
080        .withFileContext(meta).build();
081
082    writeStoreFile(writer);
083    writer.close();
084
085    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
086    HFileInfo fileInfo = new HFileInfo(context, conf);
087    StoreFileReader reader =
088        new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
089    fileInfo.initMetaAndIndex(reader.getHFileReader());
090    StoreFileScanner s = reader.getStoreFileScanner(false, false, false, 0, 0, false);
091    try {
092      // Now do reseek with empty KV to position to the beginning of the file
093      KeyValue k = KeyValueUtil.createFirstOnRow(Bytes.toBytes("k2"));
094      s.reseek(k);
095      Cell kv = s.next();
096      kv = s.next();
097      kv = s.next();
098      byte[] key5 = Bytes.toBytes("k5");
099      assertTrue(Bytes.equals(key5, 0, key5.length, kv.getRowArray(), kv.getRowOffset(),
100          kv.getRowLength()));
101      List<Tag> tags = PrivateCellUtil.getTags(kv);
102      assertEquals(1, tags.size());
103      assertEquals("tag3", Bytes.toString(Tag.cloneValue(tags.get(0))));
104    } finally {
105      s.close();
106    }
107  }
108
109  private void writeStoreFile(final StoreFileWriter writer) throws IOException {
110    byte[] fam = Bytes.toBytes("f");
111    byte[] qualifier = Bytes.toBytes("q");
112    long now = System.currentTimeMillis();
113    byte[] b = Bytes.toBytes("k1");
114    Tag t1 = new ArrayBackedTag((byte) 1, "tag1");
115    Tag t2 = new ArrayBackedTag((byte) 2, "tag2");
116    Tag t3 = new ArrayBackedTag((byte) 3, "tag3");
117    try {
118      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t1 }));
119      b = Bytes.toBytes("k3");
120      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t2, t1 }));
121      b = Bytes.toBytes("k4");
122      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t3 }));
123      b = Bytes.toBytes("k5");
124      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t3 }));
125    } finally {
126      writer.close();
127    }
128  }
129}