001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver;
019
020import java.io.IOException;
021import java.util.ArrayList;
022import org.apache.hadoop.conf.Configuration;
023import org.apache.hadoop.fs.FileSystem;
024import org.apache.hadoop.fs.Path;
025import org.apache.hadoop.hbase.HBaseClassTestRule;
026import org.apache.hadoop.hbase.HBaseTestingUtil;
027import org.apache.hadoop.hbase.TableName;
028import org.apache.hadoop.hbase.client.SnapshotDescription;
029import org.apache.hadoop.hbase.client.SnapshotType;
030import org.apache.hadoop.hbase.client.Table;
031import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
032import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
033import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
034import org.apache.hadoop.hbase.testclassification.MediumTests;
035import org.apache.hadoop.hbase.testclassification.RegionServerTests;
036import org.apache.hadoop.hbase.util.Bytes;
037import org.apache.hadoop.hbase.util.CommonFSUtils;
038import org.apache.hadoop.hbase.util.RegionSplitter;
039import org.junit.After;
040import org.junit.Before;
041import org.junit.ClassRule;
042import org.junit.Test;
043import org.junit.experimental.categories.Category;
044import org.slf4j.Logger;
045import org.slf4j.LoggerFactory;
046
047import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
048import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
049
050@Category({ RegionServerTests.class, MediumTests.class })
051public class TestRSSnapshotVerifier {
052  private static final Logger LOG = LoggerFactory.getLogger(TestRSSnapshotVerifier.class);
053
054  @ClassRule
055  public static final HBaseClassTestRule CLASS_RULE =
056    HBaseClassTestRule.forClass(TestRSSnapshotVerifier.class);
057
058  private HBaseTestingUtil TEST_UTIL;
059  private final TableName tableName = TableName.valueOf("TestRSSnapshotVerifier");
060  private final byte[] cf = Bytes.toBytes("cf");
061  private final SnapshotDescription snapshot =
062    new SnapshotDescription("test-snapshot", tableName, SnapshotType.FLUSH);
063  private SnapshotProtos.SnapshotDescription snapshotProto =
064    ProtobufUtil.createHBaseProtosSnapshotDesc(snapshot);
065
066  @Before
067  public void setup() throws Exception {
068    TEST_UTIL = new HBaseTestingUtil();
069    TEST_UTIL.startMiniCluster(3);
070    final byte[][] splitKeys = new RegionSplitter.HexStringSplit().split(10);
071    Table table = TEST_UTIL.createTable(tableName, cf, splitKeys);
072    TEST_UTIL.loadTable(table, cf, false);
073    TEST_UTIL.getAdmin().flush(tableName);
074
075    // prepare unverified snapshot
076    Configuration conf = TEST_UTIL.getConfiguration();
077    snapshotProto = SnapshotDescriptionUtils.validate(snapshotProto, conf);
078    Path rootDir = CommonFSUtils.getRootDir(conf);
079    Path workingDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(snapshotProto, rootDir, conf);
080    FileSystem workingDirFs = workingDir.getFileSystem(conf);
081    if (!workingDirFs.exists(workingDir)) {
082      workingDirFs.mkdirs(workingDir);
083    }
084    ForeignExceptionDispatcher monitor = new ForeignExceptionDispatcher(snapshot.getName());
085    SnapshotManifest manifest =
086      SnapshotManifest.create(conf, workingDirFs, workingDir, snapshotProto, monitor);
087    manifest.addTableDescriptor(
088      TEST_UTIL.getHBaseCluster().getMaster().getTableDescriptors().get(tableName));
089    SnapshotDescriptionUtils.writeSnapshotInfo(snapshotProto, workingDir, workingDirFs);
090    TEST_UTIL.getHBaseCluster().getRegions(tableName).forEach(r -> {
091      try {
092        r.addRegionToSnapshot(snapshotProto, monitor);
093      } catch (IOException e) {
094        LOG.warn("Failed snapshot region {}", r.getRegionInfo());
095      }
096    });
097    manifest.consolidate();
098  }
099
100  @Test(expected = org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException.class)
101  public void testVerifyStoreFile() throws Exception {
102    RSSnapshotVerifier verifier =
103      TEST_UTIL.getHBaseCluster().getRegionServer(0).getRsSnapshotVerifier();
104    HRegion region = TEST_UTIL.getHBaseCluster().getRegions(tableName).stream()
105      .filter(r -> !r.getStore(cf).getStorefiles().isEmpty()).findFirst().get();
106    Path filePath = new ArrayList<>(region.getStore(cf).getStorefiles()).get(0).getPath();
107    TEST_UTIL.getDFSCluster().getFileSystem().delete(filePath, true);
108    LOG.info("delete store file {}", filePath);
109    verifier.verifyRegion(snapshotProto, region.getRegionInfo());
110  }
111
112  @After
113  public void teardown() throws Exception {
114    TEST_UTIL.shutdownMiniCluster();
115  }
116}