001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.backup;
019
020import static org.junit.Assert.assertFalse;
021import static org.junit.Assert.assertTrue;
022
023import java.io.IOException;
024import java.util.ArrayList;
025import java.util.HashMap;
026import java.util.List;
027import java.util.Map;
028import org.apache.hadoop.conf.Configuration;
029import org.apache.hadoop.fs.FileStatus;
030import org.apache.hadoop.fs.FileSystem;
031import org.apache.hadoop.fs.Path;
032import org.apache.hadoop.hbase.HBaseClassTestRule;
033import org.apache.hadoop.hbase.HBaseTestingUtil;
034import org.apache.hadoop.hbase.TableName;
035import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
036import org.apache.hadoop.hbase.client.Connection;
037import org.apache.hadoop.hbase.client.ConnectionFactory;
038import org.apache.hadoop.hbase.testclassification.MasterTests;
039import org.apache.hadoop.hbase.testclassification.SmallTests;
040import org.apache.hadoop.hbase.util.Bytes;
041import org.junit.After;
042import org.junit.AfterClass;
043import org.junit.Before;
044import org.junit.BeforeClass;
045import org.junit.ClassRule;
046import org.junit.Test;
047import org.junit.experimental.categories.Category;
048import org.slf4j.Logger;
049import org.slf4j.LoggerFactory;
050
051@Category({ MasterTests.class, SmallTests.class })
052public class TestBackupHFileCleaner {
053
054  @ClassRule
055  public static final HBaseClassTestRule CLASS_RULE =
056    HBaseClassTestRule.forClass(TestBackupHFileCleaner.class);
057
058  private static final Logger LOG = LoggerFactory.getLogger(TestBackupHFileCleaner.class);
059  private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
060  private static Configuration conf = TEST_UTIL.getConfiguration();
061  private static TableName tableName = TableName.valueOf("backup.hfile.cleaner");
062  private static String famName = "fam";
063  static FileSystem fs = null;
064  Path root;
065
066  /**
067   * @throws Exception if starting the mini cluster or getting the filesystem fails
068   */
069  @BeforeClass
070  public static void setUpBeforeClass() throws Exception {
071    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
072    TEST_UTIL.startMiniZKCluster();
073    TEST_UTIL.startMiniCluster(1);
074    fs = FileSystem.get(conf);
075  }
076
077  /**
078   * @throws Exception if closing the filesystem or shutting down the mini cluster fails
079   */
080  @AfterClass
081  public static void tearDownAfterClass() throws Exception {
082    if (fs != null) {
083      fs.close();
084    }
085    TEST_UTIL.shutdownMiniCluster();
086  }
087
088  @Before
089  public void setup() throws IOException {
090    root = TEST_UTIL.getDataTestDirOnTestFS();
091  }
092
093  @After
094  public void cleanup() {
095    try {
096      fs.delete(root, true);
097    } catch (IOException e) {
098      LOG.warn("Failed to delete files recursively from path " + root);
099    }
100  }
101
102  @Test
103  public void testGetDeletableFiles() throws IOException {
104    // 1. Create a file
105    Path file = new Path(root, "testIsFileDeletableWithNoHFileRefs");
106    fs.createNewFile(file);
107    // 2. Assert file is successfully created
108    assertTrue("Test file not created!", fs.exists(file));
109    BackupHFileCleaner cleaner = new BackupHFileCleaner();
110    cleaner.setConf(conf);
111    cleaner.setCheckForFullyBackedUpTables(false);
112    // 3. Assert that file as is should be deletable
113    List<FileStatus> stats = new ArrayList<>();
114    FileStatus stat = fs.getFileStatus(file);
115    stats.add(stat);
116    Iterable<FileStatus> deletable = cleaner.getDeletableFiles(stats);
117    deletable = cleaner.getDeletableFiles(stats);
118    boolean found = false;
119    for (FileStatus stat1 : deletable) {
120      if (stat.equals(stat1)) {
121        found = true;
122      }
123    }
124    assertTrue(
125      "Cleaner should allow to delete this file as there is no hfile reference " + "for it.",
126      found);
127
128    // 4. Add the file as bulk load
129    List<Path> list = new ArrayList<>(1);
130    list.add(file);
131    try (Connection conn = ConnectionFactory.createConnection(conf);
132      BackupSystemTable sysTbl = new BackupSystemTable(conn)) {
133      List<TableName> sTableList = new ArrayList<>();
134      sTableList.add(tableName);
135      Map<byte[], List<Path>>[] maps = new Map[1];
136      maps[0] = new HashMap<>();
137      maps[0].put(Bytes.toBytes(famName), list);
138      sysTbl.writeBulkLoadedFiles(sTableList, maps, "1");
139    }
140
141    // 5. Assert file should not be deletable
142    deletable = cleaner.getDeletableFiles(stats);
143    deletable = cleaner.getDeletableFiles(stats);
144    found = false;
145    for (FileStatus stat1 : deletable) {
146      if (stat.equals(stat1)) {
147        found = true;
148      }
149    }
150    assertFalse(
151      "Cleaner should not allow to delete this file as there is a hfile reference " + "for it.",
152      found);
153  }
154}