001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.master;
019
020import static org.mockito.Mockito.mock;
021import static org.mockito.Mockito.when;
022
023import java.io.IOException;
024import java.util.concurrent.TimeUnit;
025import org.apache.hadoop.conf.Configuration;
026import org.apache.hadoop.fs.Path;
027import org.apache.hadoop.hbase.ChoreService;
028import org.apache.hadoop.hbase.HBaseZKTestingUtil;
029import org.apache.hadoop.hbase.Server;
030import org.apache.hadoop.hbase.ServerName;
031import org.apache.hadoop.hbase.TableName;
032import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
033import org.apache.hadoop.hbase.client.Delete;
034import org.apache.hadoop.hbase.client.Result;
035import org.apache.hadoop.hbase.client.ResultScanner;
036import org.apache.hadoop.hbase.client.Scan;
037import org.apache.hadoop.hbase.client.TableDescriptor;
038import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
039import org.apache.hadoop.hbase.master.cleaner.DirScanPool;
040import org.apache.hadoop.hbase.master.region.MasterRegion;
041import org.apache.hadoop.hbase.master.region.MasterRegionFactory;
042import org.apache.hadoop.hbase.master.region.MasterRegionParams;
043import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
044import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
045import org.apache.hadoop.hbase.util.CommonFSUtils;
046import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
047import org.junit.AfterClass;
048import org.junit.BeforeClass;
049
050public abstract class MasterStateStoreTestBase {
051
052  protected static HBaseZKTestingUtil UTIL = new HBaseZKTestingUtil();
053
054  protected static MasterRegion REGION;
055
056  protected static ChoreService CHORE_SERVICE;
057
058  protected static DirScanPool HFILE_CLEANER_POOL;
059
060  protected static DirScanPool LOG_CLEANER_POOL;
061
062  protected static TableDescriptor TD =
063    TableDescriptorBuilder.newBuilder(TableName.valueOf("test:local"))
064      .setColumnFamily(ColumnFamilyDescriptorBuilder.of(MasterRegionFactory.STATE_FAMILY)).build();
065
066  @BeforeClass
067  public static void setUpBeforeClass() throws Exception {
068    Configuration conf = UTIL.getConfiguration();
069    conf.setBoolean(MemStoreLAB.USEMSLAB_KEY, false);
070    // Runs on local filesystem. Test does not need sync. Turn off checks.
071    conf.setBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, false);
072    CHORE_SERVICE = new ChoreService("TestMasterStateStore");
073    HFILE_CLEANER_POOL = DirScanPool.getHFileCleanerScanPool(conf);
074    LOG_CLEANER_POOL = DirScanPool.getLogCleanerScanPool(conf);
075    Server server = mock(Server.class);
076    when(server.getConfiguration()).thenReturn(conf);
077    when(server.getServerName())
078      .thenReturn(ServerName.valueOf("localhost", 12345, EnvironmentEdgeManager.currentTime()));
079    when(server.getChoreService()).thenReturn(CHORE_SERVICE);
080    Path testDir = UTIL.getDataTestDir();
081    CommonFSUtils.setRootDir(conf, testDir);
082    MasterRegionParams params = new MasterRegionParams();
083    TableDescriptor td = TableDescriptorBuilder
084      .newBuilder(TD).setValue(StoreFileTrackerFactory.TRACKER_IMPL, conf
085        .get(StoreFileTrackerFactory.TRACKER_IMPL, StoreFileTrackerFactory.Trackers.DEFAULT.name()))
086      .build();
087    params.server(server).regionDirName("local").tableDescriptor(td)
088      .flushSize(TableDescriptorBuilder.DEFAULT_MEMSTORE_FLUSH_SIZE).flushPerChanges(1_000_000)
089      .flushIntervalMs(TimeUnit.MINUTES.toMillis(15)).compactMin(4).maxWals(32).useHsync(false)
090      .ringBufferSlotCount(16).rollPeriodMs(TimeUnit.MINUTES.toMillis(15))
091      .archivedWalSuffix(MasterRegionFactory.ARCHIVED_WAL_SUFFIX)
092      .archivedHFileSuffix(MasterRegionFactory.ARCHIVED_HFILE_SUFFIX);
093    REGION = MasterRegion.create(params);
094    UTIL.startMiniZKCluster();
095  }
096
097  @AfterClass
098  public static void tearDownAfterClass() throws IOException {
099    REGION.close(true);
100    HFILE_CLEANER_POOL.shutdownNow();
101    LOG_CLEANER_POOL.shutdownNow();
102    CHORE_SERVICE.shutdown();
103    UTIL.shutdownMiniZKCluster();
104    UTIL.cleanupTestDir();
105  }
106
107  protected static void cleanup() throws IOException {
108    try (ResultScanner scanner = REGION.getScanner(new Scan())) {
109      for (;;) {
110        Result result = scanner.next();
111        if (result == null) {
112          break;
113        }
114        REGION.update(r -> r.delete(new Delete(result.getRow())));
115      }
116    }
117  }
118}