001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.master.balancer;
019
020import static org.junit.jupiter.api.Assertions.assertNull;
021import static org.junit.jupiter.api.Assertions.fail;
022import static org.mockito.Mockito.mock;
023import static org.mockito.Mockito.when;
024
025import java.util.ArrayList;
026import java.util.HashMap;
027import java.util.List;
028import java.util.Map;
029import java.util.Random;
030import java.util.TreeMap;
031import java.util.concurrent.ThreadLocalRandom;
032import org.apache.hadoop.conf.Configuration;
033import org.apache.hadoop.hbase.ClusterMetrics;
034import org.apache.hadoop.hbase.HBaseConfiguration;
035import org.apache.hadoop.hbase.HConstants;
036import org.apache.hadoop.hbase.RegionMetrics;
037import org.apache.hadoop.hbase.ServerMetrics;
038import org.apache.hadoop.hbase.ServerName;
039import org.apache.hadoop.hbase.Size;
040import org.apache.hadoop.hbase.TableName;
041import org.apache.hadoop.hbase.client.RegionInfo;
042import org.apache.hadoop.hbase.client.TableDescriptor;
043import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
044import org.apache.hadoop.hbase.master.RegionPlan;
045import org.apache.hadoop.hbase.testclassification.LargeTests;
046import org.apache.hadoop.hbase.util.Bytes;
047import org.junit.jupiter.api.BeforeAll;
048import org.junit.jupiter.api.Tag;
049import org.junit.jupiter.api.Test;
050import org.slf4j.Logger;
051import org.slf4j.LoggerFactory;
052
053import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
054
055@Tag(LargeTests.TAG)
056public class TestCacheAwareLoadBalancer extends BalancerTestBase {
057
058  private static final Logger LOG = LoggerFactory.getLogger(TestCacheAwareLoadBalancer.class);
059
060  private static CacheAwareLoadBalancer loadBalancer;
061
062  static List<ServerName> servers;
063
064  static List<TableDescriptor> tableDescs;
065
066  static TableName[] tables = new TableName[] { TableName.valueOf("dt1"), TableName.valueOf("dt2"),
067    TableName.valueOf("dt3"), TableName.valueOf("dt4") };
068
069  private static List<ServerName> generateServers(int numServers) {
070    List<ServerName> servers = new ArrayList<>(numServers);
071    Random rand = ThreadLocalRandom.current();
072    for (int i = 0; i < numServers; i++) {
073      String host = "server" + rand.nextInt(100000);
074      int port = rand.nextInt(60000);
075      servers.add(ServerName.valueOf(host, port, -1));
076    }
077    return servers;
078  }
079
080  private static List<TableDescriptor> constructTableDesc(boolean hasBogusTable) {
081    List<TableDescriptor> tds = Lists.newArrayList();
082    for (int i = 0; i < tables.length; i++) {
083      TableDescriptor htd = TableDescriptorBuilder.newBuilder(tables[i]).build();
084      tds.add(htd);
085    }
086    return tds;
087  }
088
089  private ServerMetrics mockServerMetricsWithRegionCacheInfo(ServerName server,
090    List<RegionInfo> regionsOnServer, float currentCacheRatio, List<RegionInfo> oldRegionCacheInfo,
091    int oldRegionCachedSize, int regionSize) {
092    ServerMetrics serverMetrics = mock(ServerMetrics.class);
093    Map<byte[], RegionMetrics> regionLoadMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
094    for (RegionInfo info : regionsOnServer) {
095      RegionMetrics rl = mock(RegionMetrics.class);
096      when(rl.getReadRequestCount()).thenReturn(0L);
097      when(rl.getWriteRequestCount()).thenReturn(0L);
098      when(rl.getMemStoreSize()).thenReturn(Size.ZERO);
099      when(rl.getStoreFileSize()).thenReturn(Size.ZERO);
100      when(rl.getCurrentRegionCachedRatio()).thenReturn(currentCacheRatio);
101      when(rl.getRegionSizeMB()).thenReturn(new Size(regionSize, Size.Unit.MEGABYTE));
102      regionLoadMap.put(info.getRegionName(), rl);
103    }
104    when(serverMetrics.getRegionMetrics()).thenReturn(regionLoadMap);
105    Map<String, Integer> oldCacheRatioMap = new HashMap<>();
106    for (RegionInfo info : oldRegionCacheInfo) {
107      oldCacheRatioMap.put(info.getEncodedName(), oldRegionCachedSize);
108    }
109    when(serverMetrics.getRegionCachedInfo()).thenReturn(oldCacheRatioMap);
110    return serverMetrics;
111  }
112
113  @BeforeAll
114  public static void beforeAllTests() throws Exception {
115    servers = generateServers(3);
116    tableDescs = constructTableDesc(false);
117    Configuration conf = HBaseConfiguration.create();
118    conf.set(HConstants.BUCKET_CACHE_PERSISTENT_PATH_KEY, "prefetch_file_list");
119    loadBalancer = new CacheAwareLoadBalancer();
120    loadBalancer.setClusterInfoProvider(new DummyClusterInfoProvider(conf));
121    loadBalancer.loadConf(conf);
122  }
123
124  @Test
125  public void testBalancerNotThrowNPEWhenBalancerPlansIsNull() throws Exception {
126    Map<ServerName, List<RegionInfo>> clusterState = new HashMap<>();
127    ServerName server0 = servers.get(0);
128    ServerName server1 = servers.get(1);
129    ServerName server2 = servers.get(2);
130
131    List<RegionInfo> regionsOnServer0 = randomRegions(5);
132    List<RegionInfo> regionsOnServer1 = randomRegions(5);
133    List<RegionInfo> regionsOnServer2 = randomRegions(5);
134
135    clusterState.put(server0, regionsOnServer0);
136    clusterState.put(server1, regionsOnServer1);
137    clusterState.put(server2, regionsOnServer2);
138
139    // Mock cluster metrics
140    Map<ServerName, ServerMetrics> serverMetricsMap = new TreeMap<>();
141    serverMetricsMap.put(server0, mockServerMetricsWithRegionCacheInfo(server0, regionsOnServer0,
142      0.0f, new ArrayList<>(), 0, 10));
143    serverMetricsMap.put(server1, mockServerMetricsWithRegionCacheInfo(server1, regionsOnServer1,
144      0.0f, new ArrayList<>(), 0, 10));
145    serverMetricsMap.put(server2, mockServerMetricsWithRegionCacheInfo(server2, regionsOnServer2,
146      0.0f, new ArrayList<>(), 0, 10));
147
148    ClusterMetrics clusterMetrics = mock(ClusterMetrics.class);
149    when(clusterMetrics.getLiveServerMetrics()).thenReturn(serverMetricsMap);
150    loadBalancer.updateClusterMetrics(clusterMetrics);
151
152    Map<TableName, Map<ServerName, List<RegionInfo>>> LoadOfAllTable =
153      (Map) mockClusterServersWithTables(clusterState);
154    try {
155      List<RegionPlan> plans = loadBalancer.balanceCluster(LoadOfAllTable);
156      assertNull(plans);
157    } catch (NullPointerException npe) {
158      fail("NPE should not be thrown");
159    }
160  }
161}