001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertTrue;
022
023import java.io.IOException;
024import java.util.List;
025import java.util.concurrent.atomic.AtomicInteger;
026import org.apache.hadoop.conf.Configuration;
027import org.apache.hadoop.fs.FileSystem;
028import org.apache.hadoop.fs.Path;
029import org.apache.hadoop.hbase.ArrayBackedTag;
030import org.apache.hadoop.hbase.Cell;
031import org.apache.hadoop.hbase.HBaseClassTestRule;
032import org.apache.hadoop.hbase.HBaseTestingUtility;
033import org.apache.hadoop.hbase.KeyValue;
034import org.apache.hadoop.hbase.KeyValueUtil;
035import org.apache.hadoop.hbase.PrivateCellUtil;
036import org.apache.hadoop.hbase.Tag;
037import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
038import org.apache.hadoop.hbase.io.hfile.CacheConfig;
039import org.apache.hadoop.hbase.io.hfile.HFileContext;
040import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
041import org.apache.hadoop.hbase.io.hfile.HFileInfo;
042import org.apache.hadoop.hbase.io.hfile.ReaderContext;
043import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder;
044import org.apache.hadoop.hbase.testclassification.RegionServerTests;
045import org.apache.hadoop.hbase.testclassification.SmallTests;
046import org.apache.hadoop.hbase.util.Bytes;
047import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
048import org.junit.BeforeClass;
049import org.junit.ClassRule;
050import org.junit.Test;
051import org.junit.experimental.categories.Category;
052
053@Category({ RegionServerTests.class, SmallTests.class })
054public class TestStoreFileScannerWithTagCompression {
055
056  @ClassRule
057  public static final HBaseClassTestRule CLASS_RULE =
058    HBaseClassTestRule.forClass(TestStoreFileScannerWithTagCompression.class);
059
060  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
061  private static Configuration conf = TEST_UTIL.getConfiguration();
062  private static CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
063  private static String ROOT_DIR =
064    TEST_UTIL.getDataTestDir("TestStoreFileScannerWithTagCompression").toString();
065  private static FileSystem fs = null;
066
067  @BeforeClass
068  public static void setUp() throws IOException {
069    conf.setInt("hfile.format.version", 3);
070    fs = FileSystem.get(conf);
071  }
072
073  @Test
074  public void testReseek() throws Exception {
075    // write the file
076    Path f = new Path(ROOT_DIR, "testReseek");
077    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true)
078      .withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build();
079    // Make a store file and write data to it.
080    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(f)
081      .withFileContext(meta).build();
082
083    writeStoreFile(writer);
084    writer.close();
085
086    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
087    HFileInfo fileInfo = new HFileInfo(context, conf);
088    StoreFileReader reader =
089      new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
090    fileInfo.initMetaAndIndex(reader.getHFileReader());
091    StoreFileScanner s = reader.getStoreFileScanner(false, false, false, 0, 0, false);
092    try {
093      // Now do reseek with empty KV to position to the beginning of the file
094      KeyValue k = KeyValueUtil.createFirstOnRow(Bytes.toBytes("k2"));
095      s.reseek(k);
096      Cell kv = s.next();
097      kv = s.next();
098      kv = s.next();
099      byte[] key5 = Bytes.toBytes("k5");
100      assertTrue(
101        Bytes.equals(key5, 0, key5.length, kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()));
102      List<Tag> tags = PrivateCellUtil.getTags(kv);
103      assertEquals(1, tags.size());
104      assertEquals("tag3", Bytes.toString(Tag.cloneValue(tags.get(0))));
105    } finally {
106      s.close();
107    }
108  }
109
110  private void writeStoreFile(final StoreFileWriter writer) throws IOException {
111    byte[] fam = Bytes.toBytes("f");
112    byte[] qualifier = Bytes.toBytes("q");
113    long now = EnvironmentEdgeManager.currentTime();
114    byte[] b = Bytes.toBytes("k1");
115    Tag t1 = new ArrayBackedTag((byte) 1, "tag1");
116    Tag t2 = new ArrayBackedTag((byte) 2, "tag2");
117    Tag t3 = new ArrayBackedTag((byte) 3, "tag3");
118    try {
119      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t1 }));
120      b = Bytes.toBytes("k3");
121      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t2, t1 }));
122      b = Bytes.toBytes("k4");
123      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t3 }));
124      b = Bytes.toBytes("k5");
125      writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t3 }));
126    } finally {
127      writer.close();
128    }
129  }
130}