001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.backup;
019
020import static org.junit.Assert.assertFalse;
021import static org.junit.Assert.assertTrue;
022
023import java.io.IOException;
024import java.util.ArrayList;
025import java.util.IdentityHashMap;
026import java.util.List;
027import org.apache.hadoop.conf.Configuration;
028import org.apache.hadoop.fs.FileStatus;
029import org.apache.hadoop.fs.FileSystem;
030import org.apache.hadoop.fs.Path;
031import org.apache.hadoop.hbase.HBaseClassTestRule;
032import org.apache.hadoop.hbase.HBaseTestingUtil;
033import org.apache.hadoop.hbase.TableName;
034import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
035import org.apache.hadoop.hbase.client.Connection;
036import org.apache.hadoop.hbase.client.ConnectionFactory;
037import org.apache.hadoop.hbase.testclassification.MasterTests;
038import org.apache.hadoop.hbase.testclassification.SmallTests;
039import org.apache.hadoop.hbase.util.Bytes;
040import org.junit.After;
041import org.junit.AfterClass;
042import org.junit.Before;
043import org.junit.BeforeClass;
044import org.junit.ClassRule;
045import org.junit.Test;
046import org.junit.experimental.categories.Category;
047import org.slf4j.Logger;
048import org.slf4j.LoggerFactory;
049
050@Category({ MasterTests.class, SmallTests.class })
051public class TestBackupHFileCleaner {
052
053  @ClassRule
054  public static final HBaseClassTestRule CLASS_RULE =
055    HBaseClassTestRule.forClass(TestBackupHFileCleaner.class);
056
057  private static final Logger LOG = LoggerFactory.getLogger(TestBackupHFileCleaner.class);
058  private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
059  private static Configuration conf = TEST_UTIL.getConfiguration();
060  private static TableName tableName = TableName.valueOf("backup.hfile.cleaner");
061  private static String famName = "fam";
062  static FileSystem fs = null;
063  Path root;
064
065  @BeforeClass
066  public static void setUpBeforeClass() throws Exception {
067    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
068    TEST_UTIL.startMiniZKCluster();
069    TEST_UTIL.startMiniCluster(1);
070    fs = FileSystem.get(conf);
071  }
072
073  @AfterClass
074  public static void tearDownAfterClass() throws Exception {
075    if (fs != null) {
076      fs.close();
077    }
078    TEST_UTIL.shutdownMiniCluster();
079  }
080
081  @Before
082  public void setup() throws IOException {
083    root = TEST_UTIL.getDataTestDirOnTestFS();
084  }
085
086  @After
087  public void cleanup() {
088    try {
089      fs.delete(root, true);
090    } catch (IOException e) {
091      LOG.warn("Failed to delete files recursively from path " + root);
092    }
093  }
094
095  @Test
096  public void testGetDeletableFiles() throws IOException {
097    // 1. Create a file
098    Path file = new Path(root, "testIsFileDeletableWithNoHFileRefs");
099    fs.createNewFile(file);
100    // 2. Assert file is successfully created
101    assertTrue("Test file not created!", fs.exists(file));
102    BackupHFileCleaner cleaner = new BackupHFileCleaner();
103    cleaner.setConf(conf);
104    cleaner.setCheckForFullyBackedUpTables(false);
105    List<FileStatus> stats = new ArrayList<>();
106    // Prime the cleaner
107    cleaner.getDeletableFiles(stats);
108    // 3. Assert that file as is should be deletable
109    FileStatus stat = fs.getFileStatus(file);
110    stats.add(stat);
111    Iterable<FileStatus> deletable = cleaner.getDeletableFiles(stats);
112    boolean found = false;
113    for (FileStatus stat1 : deletable) {
114      if (stat.equals(stat1)) {
115        found = true;
116      }
117    }
118    assertTrue(
119      "Cleaner should allow to delete this file as there is no hfile reference " + "for it.",
120      found);
121
122    // 4. Add the file as bulk load
123    List<Path> list = new ArrayList<>(1);
124    list.add(file);
125    try (Connection conn = ConnectionFactory.createConnection(conf);
126      BackupSystemTable sysTbl = new BackupSystemTable(conn)) {
127      List<TableName> sTableList = new ArrayList<>();
128      sTableList.add(tableName);
129      @SuppressWarnings("unchecked")
130      IdentityHashMap<byte[], List<Path>>[] maps = new IdentityHashMap[1];
131      maps[0] = new IdentityHashMap<>();
132      maps[0].put(Bytes.toBytes(famName), list);
133      sysTbl.writeBulkLoadedFiles(sTableList, maps, "1");
134    }
135
136    // 5. Assert file should not be deletable
137    deletable = cleaner.getDeletableFiles(stats);
138    found = false;
139    for (FileStatus stat1 : deletable) {
140      if (stat.equals(stat1)) {
141        found = true;
142      }
143    }
144    assertFalse(
145      "Cleaner should not allow to delete this file as there is a hfile reference " + "for it.",
146      found);
147  }
148}