001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static org.junit.jupiter.api.Assertions.assertEquals; 021import static org.junit.jupiter.api.Assertions.assertNotNull; 022import static org.junit.jupiter.api.Assertions.assertTrue; 023 024import java.io.IOException; 025import java.util.Set; 026import org.apache.hadoop.conf.Configuration; 027import org.apache.hadoop.fs.FileSystem; 028import org.apache.hadoop.fs.Path; 029import org.apache.hadoop.hbase.HBaseTestingUtil; 030import org.apache.hadoop.hbase.KeyValue; 031import org.apache.hadoop.hbase.TableName; 032import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 033import org.apache.hadoop.hbase.client.RegionInfo; 034import org.apache.hadoop.hbase.client.RegionInfoBuilder; 035import org.apache.hadoop.hbase.io.hfile.CacheConfig; 036import org.apache.hadoop.hbase.io.hfile.HFileContext; 037import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; 038import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; 039import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; 040import org.apache.hadoop.hbase.testclassification.RegionServerTests; 041import org.apache.hadoop.hbase.testclassification.SmallTests; 042import org.apache.hadoop.hbase.util.Bytes; 043import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; 044import org.junit.jupiter.api.BeforeEach; 045import org.junit.jupiter.api.Tag; 046import org.junit.jupiter.api.Test; 047import org.junit.jupiter.api.TestInfo; 048 049/** 050 * Test StoreFileScanner 051 */ 052@Tag(RegionServerTests.TAG) 053@Tag(SmallTests.TAG) 054public class TestStoreFileScanner { 055 private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 056 private static final String TEST_FAMILY = "cf"; 057 058 private String methodName; 059 060 private Configuration conf; 061 private Path testDir; 062 private FileSystem fs; 063 private CacheConfig cacheConf; 064 065 @BeforeEach 066 public void setUp(TestInfo testInfo) throws IOException { 067 this.methodName = testInfo.getTestMethod().get().getName(); 068 conf = TEST_UTIL.getConfiguration(); 069 testDir = TEST_UTIL.getDataTestDir(methodName); 070 fs = testDir.getFileSystem(conf); 071 cacheConf = new CacheConfig(conf); 072 } 073 074 private void writeStoreFile(final StoreFileWriter writer) throws IOException { 075 long now = EnvironmentEdgeManager.currentTime(); 076 byte[] family = Bytes.toBytes(TEST_FAMILY); 077 byte[] qualifier = Bytes.toBytes("col"); 078 for (char d = 'a'; d <= 'z'; d++) { 079 for (char e = 'a'; e <= 'z'; e++) { 080 byte[] row = new byte[] { (byte) d, (byte) e }; 081 writer.append(new KeyValue(row, family, qualifier, now, row)); 082 } 083 } 084 } 085 086 @Test 087 public void testGetFilesRead() throws Exception { 088 // Setup: region info, region fs, and HFile context; create store file and write data. 089 final RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(methodName)).build(); 090 HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, 091 new Path(testDir, hri.getTable().getNameAsString()), hri); 092 HFileContext hFileContext = new HFileContextBuilder().withBlockSize(8 * 1024).build(); 093 094 StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs) 095 .withFilePath(regionFs.createTempName()).withFileContext(hFileContext).build(); 096 writeStoreFile(writer); 097 Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); 098 writer.close(); 099 100 // Open HStoreFile and reader; get qualified path and create StoreFileScanner. 101 StoreFileTracker sft = StoreFileTrackerFactory.create(conf, false, 102 StoreContext.getBuilder() 103 .withFamilyStoreDirectoryPath(new Path(regionFs.getRegionDir(), TEST_FAMILY)) 104 .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)) 105 .withRegionFileSystem(regionFs).build()); 106 HStoreFile file = new HStoreFile(fs, hsfPath, conf, cacheConf, BloomType.NONE, true, sft); 107 file.initReader(); 108 StoreFileReader r = file.getReader(); 109 assertNotNull(r); 110 Path qualifiedPath = fs.makeQualified(hsfPath); 111 StoreFileScanner scanner = r.getStoreFileScanner(false, false, false, 0, 0, false); 112 113 // Before close: getFilesRead must be empty. 114 Set<Path> filesRead = scanner.getFilesRead(); 115 assertTrue(filesRead.isEmpty(), "Should return empty set before closing scanner"); 116 117 scanner.close(); 118 119 // After close: set must contain the single qualified store file path. 120 filesRead = scanner.getFilesRead(); 121 assertEquals(1, filesRead.size(), "Should return set with one file path after closing"); 122 assertTrue(filesRead.contains(qualifiedPath), "Should contain the qualified file path"); 123 } 124}