001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertNotNull; 022import static org.junit.Assert.assertTrue; 023 024import java.io.IOException; 025import java.util.Set; 026import org.apache.hadoop.conf.Configuration; 027import org.apache.hadoop.fs.FileSystem; 028import org.apache.hadoop.fs.Path; 029import org.apache.hadoop.hbase.HBaseClassTestRule; 030import org.apache.hadoop.hbase.HBaseTestingUtil; 031import org.apache.hadoop.hbase.KeyValue; 032import org.apache.hadoop.hbase.TableName; 033import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 034import org.apache.hadoop.hbase.client.RegionInfo; 035import org.apache.hadoop.hbase.client.RegionInfoBuilder; 036import org.apache.hadoop.hbase.io.hfile.CacheConfig; 037import org.apache.hadoop.hbase.io.hfile.HFileContext; 038import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; 039import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; 040import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; 041import org.apache.hadoop.hbase.testclassification.RegionServerTests; 042import org.apache.hadoop.hbase.testclassification.SmallTests; 043import org.apache.hadoop.hbase.util.Bytes; 044import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; 045import org.junit.Before; 046import org.junit.ClassRule; 047import org.junit.Rule; 048import org.junit.Test; 049import org.junit.experimental.categories.Category; 050import org.junit.rules.TestName; 051 052/** 053 * Test StoreFileScanner 054 */ 055@Category({ RegionServerTests.class, SmallTests.class }) 056public class TestStoreFileScanner { 057 058 @ClassRule 059 public static final HBaseClassTestRule CLASS_RULE = 060 HBaseClassTestRule.forClass(TestStoreFileScanner.class); 061 062 private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 063 private static final String TEST_FAMILY = "cf"; 064 065 @Rule 066 public TestName name = new TestName(); 067 068 private Configuration conf; 069 private Path testDir; 070 private FileSystem fs; 071 private CacheConfig cacheConf; 072 073 @Before 074 public void setUp() throws IOException { 075 conf = TEST_UTIL.getConfiguration(); 076 testDir = TEST_UTIL.getDataTestDir(name.getMethodName()); 077 fs = testDir.getFileSystem(conf); 078 cacheConf = new CacheConfig(conf); 079 } 080 081 private void writeStoreFile(final StoreFileWriter writer) throws IOException { 082 long now = EnvironmentEdgeManager.currentTime(); 083 byte[] family = Bytes.toBytes(TEST_FAMILY); 084 byte[] qualifier = Bytes.toBytes("col"); 085 for (char d = 'a'; d <= 'z'; d++) { 086 for (char e = 'a'; e <= 'z'; e++) { 087 byte[] row = new byte[] { (byte) d, (byte) e }; 088 writer.append(new KeyValue(row, family, qualifier, now, row)); 089 } 090 } 091 } 092 093 @Test 094 public void testGetFilesRead() throws Exception { 095 // Setup: region info, region fs, and HFile context; create store file and write data. 096 final RegionInfo hri = 097 RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); 098 HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, 099 new Path(testDir, hri.getTable().getNameAsString()), hri); 100 HFileContext hFileContext = new HFileContextBuilder().withBlockSize(8 * 1024).build(); 101 102 StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs) 103 .withFilePath(regionFs.createTempName()).withFileContext(hFileContext).build(); 104 writeStoreFile(writer); 105 Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); 106 writer.close(); 107 108 // Open HStoreFile and reader; get qualified path and create StoreFileScanner. 109 StoreFileTracker sft = StoreFileTrackerFactory.create(conf, false, 110 StoreContext.getBuilder() 111 .withFamilyStoreDirectoryPath(new Path(regionFs.getRegionDir(), TEST_FAMILY)) 112 .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)) 113 .withRegionFileSystem(regionFs).build()); 114 HStoreFile file = new HStoreFile(fs, hsfPath, conf, cacheConf, BloomType.NONE, true, sft); 115 file.initReader(); 116 StoreFileReader r = file.getReader(); 117 assertNotNull(r); 118 Path qualifiedPath = fs.makeQualified(hsfPath); 119 StoreFileScanner scanner = r.getStoreFileScanner(false, false, false, 0, 0, false); 120 121 // Before close: getFilesRead must be empty. 122 Set<Path> filesRead = scanner.getFilesRead(); 123 assertTrue("Should return empty set before closing scanner", filesRead.isEmpty()); 124 125 scanner.close(); 126 127 // After close: set must contain the single qualified store file path. 128 filesRead = scanner.getFilesRead(); 129 assertEquals("Should return set with one file path after closing", 1, filesRead.size()); 130 assertTrue("Should contain the qualified file path", filesRead.contains(qualifiedPath)); 131 } 132}