001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.master.cleaner;
019
020import static org.apache.hadoop.hbase.master.HMaster.HBASE_MASTER_CLEANER_INTERVAL;
021import static org.apache.hadoop.hbase.master.cleaner.HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS_PLUGINS;
022
023import java.io.FileNotFoundException;
024import java.io.IOException;
025import org.apache.hadoop.conf.Configuration;
026import org.apache.hadoop.fs.Path;
027import org.apache.hadoop.hbase.HBaseClassTestRule;
028import org.apache.hadoop.hbase.HBaseTestingUtil;
029import org.apache.hadoop.hbase.TableName;
030import org.apache.hadoop.hbase.client.Admin;
031import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
032import org.apache.hadoop.hbase.client.Table;
033import org.apache.hadoop.hbase.client.TableDescriptor;
034import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
035import org.apache.hadoop.hbase.testclassification.LargeTests;
036import org.apache.hadoop.hbase.util.Bytes;
037import org.apache.hadoop.hbase.util.HFileArchiveUtil;
038import org.apache.hadoop.hdfs.DistributedFileSystem;
039import org.junit.AfterClass;
040import org.junit.BeforeClass;
041import org.junit.ClassRule;
042import org.junit.Rule;
043import org.junit.Test;
044import org.junit.experimental.categories.Category;
045import org.junit.rules.TestName;
046
047@Category(LargeTests.class)
048public class TestCleanerClearHFiles {
049
050  @ClassRule
051  public static final HBaseClassTestRule CLASS_RULE =
052    HBaseClassTestRule.forClass(TestCleanerClearHFiles.class);
053
054  @Rule
055  public TestName name = new TestName();
056
057  private static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
058  private static Configuration conf = TEST_UTIL.getConfiguration();
059  private static Admin admin = null;
060
061  private static final byte[] COLUMN_FAMILY = Bytes.toBytes("CF");
062
063  private static final String TABLE1 = "table1";
064  private static final String TABLE2 = "table2";
065  private static final String DEFAULT_ARCHIVE_SUBDIRS_PREFIX = "data/default/";
066
067  @BeforeClass
068  public static void setupBeforeClass() throws Exception {
069    conf.setStrings(HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS,
070      DEFAULT_ARCHIVE_SUBDIRS_PREFIX + TABLE1);
071    conf.setStrings(HFILE_CLEANER_CUSTOM_PATHS_PLUGINS, HFileLinkCleaner.class.getName());
072
073    conf.setInt(TimeToLiveHFileCleaner.TTL_CONF_KEY, 10);
074    conf.setInt(HBASE_MASTER_CLEANER_INTERVAL, 20000);
075
076    TEST_UTIL.startMiniCluster();
077    admin = TEST_UTIL.getAdmin();
078  }
079
080  @AfterClass
081  public static void tearDownAfterClass() throws Exception {
082    TEST_UTIL.shutdownMiniCluster();
083  }
084
085  @Test
086  public void testClearArchive() throws Exception {
087    DistributedFileSystem fs = TEST_UTIL.getDFSCluster().getFileSystem();
088    Table table1 = createTable(TEST_UTIL, TableName.valueOf(TABLE1));
089    Table table2 = createTable(TEST_UTIL, TableName.valueOf(TABLE2));
090
091    admin.disableTable(table1.getName());
092    admin.deleteTable(table1.getName());
093    admin.disableTable(table2.getName());
094    admin.deleteTable(table2.getName());
095
096    Path archiveDir = HFileArchiveUtil.getArchivePath(conf);
097    Path archiveTable1Path = new Path(archiveDir, DEFAULT_ARCHIVE_SUBDIRS_PREFIX + TABLE1);
098    Path archiveTable2Path = new Path(archiveDir, DEFAULT_ARCHIVE_SUBDIRS_PREFIX + TABLE2);
099
100    TEST_UTIL.waitFor(10000, () -> !notExistOrEmptyDir(archiveTable1Path, fs)
101      && !notExistOrEmptyDir(archiveTable2Path, fs));
102
103    TEST_UTIL.waitFor(30000,
104      () -> notExistOrEmptyDir(archiveTable1Path, fs) && notExistOrEmptyDir(archiveTable2Path, fs));
105  }
106
107  private boolean notExistOrEmptyDir(Path dir, DistributedFileSystem fs) {
108    try {
109      return fs.listStatus(dir).length == 0;
110    } catch (Exception e) {
111      return e instanceof FileNotFoundException;
112    }
113  }
114
115  private Table createTable(HBaseTestingUtil util, TableName tableName) throws IOException {
116    TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName)
117      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(COLUMN_FAMILY).build()).build();
118    return util.createTable(td, null);
119  }
120}