001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.master.cleaner;
019
020import static org.junit.jupiter.api.Assertions.assertEquals;
021import static org.junit.jupiter.api.Assertions.assertFalse;
022import static org.junit.jupiter.api.Assertions.assertTrue;
023
024import java.io.IOException;
025import java.util.Collections;
026import org.apache.hadoop.conf.Configuration;
027import org.apache.hadoop.fs.FileStatus;
028import org.apache.hadoop.fs.FileSystem;
029import org.apache.hadoop.fs.Path;
030import org.apache.hadoop.hbase.HBaseTestingUtil;
031import org.apache.hadoop.hbase.Server;
032import org.apache.hadoop.hbase.TableName;
033import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
034import org.apache.hadoop.hbase.client.RegionInfo;
035import org.apache.hadoop.hbase.client.RegionInfoBuilder;
036import org.apache.hadoop.hbase.io.HFileLink;
037import org.apache.hadoop.hbase.regionserver.BloomType;
038import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
039import org.apache.hadoop.hbase.regionserver.HStoreFile;
040import org.apache.hadoop.hbase.regionserver.StoreContext;
041import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
042import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker;
043import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
044import org.apache.hadoop.hbase.testclassification.MasterTests;
045import org.apache.hadoop.hbase.testclassification.MediumTests;
046import org.apache.hadoop.hbase.util.CommonFSUtils;
047import org.apache.hadoop.hbase.util.HFileArchiveUtil;
048import org.apache.hadoop.hbase.util.MockServer;
049import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
050import org.junit.jupiter.api.AfterAll;
051import org.junit.jupiter.api.AfterEach;
052import org.junit.jupiter.api.BeforeAll;
053import org.junit.jupiter.api.BeforeEach;
054import org.junit.jupiter.api.Tag;
055import org.junit.jupiter.api.Test;
056import org.junit.jupiter.api.TestInfo;
057
058/**
059 * Test the HFileLink Cleaner. HFiles with links cannot be deleted until a link is present.
060 */
061@Tag(MasterTests.TAG)
062@Tag(MediumTests.TAG)
063public class TestHFileLinkCleaner {
064
065  private Configuration conf;
066  private Path rootDir;
067  private FileSystem fs;
068  private TableName tableName;
069  private TableName tableLinkName;
070  private String hfileName;
071  private String familyName;
072  private RegionInfo hri;
073  private RegionInfo hriLink;
074  private Path archiveDir;
075  private Path archiveStoreDir;
076  private Path familyPath;
077  private Path hfilePath;
078  private Path familyLinkPath;
079  private String hfileLinkName;
080  private Path linkBackRefDir;
081  private Path linkBackRef;
082  private FileStatus[] backRefs;
083  private HFileCleaner cleaner;
084  private StoreFileTracker sft;
085  private HFileLink hfileLink;
086  private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
087  private static DirScanPool POOL;
088  private static final long TTL = 1000;
089
090  @BeforeAll
091  public static void setUp() {
092    POOL = DirScanPool.getHFileCleanerScanPool(TEST_UTIL.getConfiguration());
093  }
094
095  @AfterAll
096  public static void tearDown() {
097    POOL.shutdownNow();
098  }
099
100  @BeforeEach
101  public void configureDirectoriesAndLinks(TestInfo testInfo) throws IOException {
102    conf = TEST_UTIL.getConfiguration();
103    CommonFSUtils.setRootDir(conf, TEST_UTIL.getDataTestDir());
104    conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, HFileLinkCleaner.class.getName());
105    rootDir = CommonFSUtils.getRootDir(conf);
106    fs = FileSystem.get(conf);
107
108    tableName = TableName.valueOf(testInfo.getTestMethod().get().getName());
109    tableLinkName = TableName.valueOf(testInfo.getTestMethod().get().getName() + "-link");
110    hfileName = "1234567890";
111    familyName = "cf";
112
113    hri = RegionInfoBuilder.newBuilder(tableName).build();
114    hriLink = RegionInfoBuilder.newBuilder(tableLinkName).build();
115
116    archiveDir = HFileArchiveUtil.getArchivePath(conf);
117    archiveStoreDir =
118      HFileArchiveUtil.getStoreArchivePath(conf, tableName, hri.getEncodedName(), familyName);
119
120    // Create hfile /hbase/table-link/region/cf/getEncodedName.HFILE(conf);
121    familyPath = getFamilyDirPath(archiveDir, tableName, hri.getEncodedName(), familyName);
122    fs.mkdirs(familyPath);
123    hfilePath = new Path(familyPath, hfileName);
124    fs.createNewFile(hfilePath);
125
126    HRegionFileSystem regionFS = HRegionFileSystem.create(conf, fs,
127      CommonFSUtils.getTableDir(rootDir, tableLinkName), hriLink);
128    sft = StoreFileTrackerFactory.create(conf, true,
129      StoreContext.getBuilder()
130        .withFamilyStoreDirectoryPath(new Path(regionFS.getRegionDir(), familyName))
131        .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(familyName))
132        .withRegionFileSystem(regionFS).build());
133    createLink(sft, true);
134
135    // Initialize cleaner
136    conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, TTL);
137    Server server = new DummyServer();
138    cleaner = new HFileCleaner(1000, server, conf, fs, archiveDir, POOL);
139  }
140
141  private void createLink(StoreFileTracker sft, boolean createBackReference) throws IOException {
142    // Create link to hfile
143    familyLinkPath = getFamilyDirPath(rootDir, tableLinkName, hriLink.getEncodedName(), familyName);
144    fs.mkdirs(familyLinkPath);
145    hfileLink =
146      sft.createHFileLink(hri.getTable(), hri.getEncodedName(), hfileName, createBackReference);
147    hfileLinkName = hfileName;
148    linkBackRefDir = HFileLink.getBackReferencesDir(archiveStoreDir, hfileName);
149    assertTrue(fs.exists(linkBackRefDir));
150    backRefs = fs.listStatus(linkBackRefDir);
151    assertEquals(1, backRefs.length);
152    linkBackRef = backRefs[0].getPath();
153  }
154
155  @AfterEach
156  public void cleanup() throws IOException, InterruptedException {
157    // HFile can be removed
158    Thread.sleep(TTL * 2);
159    cleaner.chore();
160    assertFalse(fs.exists(hfilePath), "HFile should be deleted");
161    // Remove everything
162    for (int i = 0; i < 4; ++i) {
163      Thread.sleep(TTL * 2);
164      cleaner.chore();
165    }
166    assertFalse(fs.exists(CommonFSUtils.getTableDir(archiveDir, tableName)),
167      "HFile should be deleted");
168    assertFalse(fs.exists(CommonFSUtils.getTableDir(archiveDir, tableLinkName)),
169      "Link should be deleted");
170  }
171
172  @Test
173  public void testHFileLinkCleaning() throws Exception {
174    // Link backref cannot be removed
175    cleaner.chore();
176    // CommonFSUtils.
177    assertTrue(fs.exists(linkBackRef));
178    assertTrue(fs.exists(hfilePath));
179
180    // Link backref can be removed
181    fs.rename(CommonFSUtils.getTableDir(rootDir, tableLinkName),
182      CommonFSUtils.getTableDir(archiveDir, tableLinkName));
183    cleaner.chore();
184    assertFalse(fs.exists(linkBackRef), "Link should be deleted");
185  }
186
187  @Test
188  public void testHFileLinkByRemovingReference() throws Exception {
189    // Link backref cannot be removed
190    cleaner.chore();
191    assertTrue(fs.exists(linkBackRef));
192    assertTrue(fs.exists(hfilePath));
193
194    // simulate after removing the reference in data directory, the Link backref can be removed
195    Path linkPath = new Path(familyLinkPath,
196      HFileLink.createHFileLinkName(hri.getTable(), hri.getEncodedName(), hfileName));
197    HStoreFile storeFile =
198      new HStoreFile(new StoreFileInfo(conf, fs, linkPath, hfileLink), BloomType.NONE, null);
199    sft.removeStoreFiles(Collections.singletonList(storeFile));
200
201    cleaner.chore();
202    assertFalse(fs.exists(linkBackRef), "Link should be deleted");
203  }
204
205  @Test
206  public void testHFileLinkEmptyBackReferenceDirectory() throws Exception {
207    // simulate and remove the back reference
208    fs.delete(linkBackRef, false);
209    assertTrue(fs.exists(linkBackRefDir), "back reference directory still exists");
210    cleaner.chore();
211    assertFalse(fs.exists(linkBackRefDir), "back reference directory should be deleted");
212  }
213
214  private static Path getFamilyDirPath(final Path rootDir, final TableName table,
215    final String region, final String family) {
216    return new Path(new Path(CommonFSUtils.getTableDir(rootDir, table), region), family);
217  }
218
219  static class DummyServer extends MockServer {
220
221    @Override
222    public Configuration getConfiguration() {
223      return TEST_UTIL.getConfiguration();
224    }
225
226    @Override
227    public ZKWatcher getZooKeeper() {
228      try {
229        return new ZKWatcher(getConfiguration(), "dummy server", this);
230      } catch (IOException e) {
231        e.printStackTrace();
232      }
233      return null;
234    }
235  }
236}