001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertNotEquals;
022
023import java.util.HashMap;
024import java.util.Map;
025import org.apache.hadoop.fs.StorageType;
026import org.apache.hadoop.hbase.testclassification.MiscTests;
027import org.apache.hadoop.hbase.testclassification.SmallTests;
028import org.apache.hadoop.hbase.util.DNS;
029import org.junit.ClassRule;
030import org.junit.Test;
031import org.junit.experimental.categories.Category;
032
033@Category({ MiscTests.class, SmallTests.class })
034public class TestHDFSBlocksDistribution {
035
036  @ClassRule
037  public static final HBaseClassTestRule CLASS_RULE =
038    HBaseClassTestRule.forClass(TestHDFSBlocksDistribution.class);
039
040  @Test
041  public void testAddHostsAndBlockWeight() throws Exception {
042    HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
043    distribution.addHostsAndBlockWeight(null, 100);
044    assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
045    distribution.addHostsAndBlockWeight(new String[0], 100);
046    assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
047    distribution.addHostsAndBlockWeight(new String[] { "test" }, 101);
048    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
049    distribution.addHostsAndBlockWeight(new String[] { "test" }, 202);
050    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
051    assertEquals("test host should have weight 303", 303,
052      distribution.getHostAndWeights().get("test").getWeight());
053    distribution.addHostsAndBlockWeight(new String[] { "testTwo" }, 222);
054    assertEquals("Should be two hosts", 2, distribution.getHostAndWeights().size());
055    assertEquals("Total weight should be 525", 525, distribution.getUniqueBlocksTotalWeight());
056    distribution.addHostsAndBlockWeight(new String[] { "test" }, 100,
057      new StorageType[] { StorageType.SSD });
058    assertEquals("test host should have weight 403", 403,
059      distribution.getHostAndWeights().get("test").getWeight());
060    assertEquals("test host should have weight for ssd 100", 100,
061      distribution.getHostAndWeights().get("test").getWeightForSsd());
062  }
063
064  public class MockHDFSBlocksDistribution extends HDFSBlocksDistribution {
065    @Override
066    public Map<String, HostAndWeight> getHostAndWeights() {
067      HashMap<String, HostAndWeight> map = new HashMap<>();
068      map.put("test", new HostAndWeight(null, 100, 0));
069      return map;
070    }
071
072  }
073
074  @Test
075  public void testAdd() throws Exception {
076    HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
077    distribution.add(new MockHDFSBlocksDistribution());
078    assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
079    distribution.addHostsAndBlockWeight(new String[] { "test" }, 10);
080    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
081    distribution.add(new MockHDFSBlocksDistribution());
082    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
083    assertEquals("Total weight should be 10", 10, distribution.getUniqueBlocksTotalWeight());
084  }
085
086  @Test
087  public void testLocalHostCompatibility() throws Exception {
088    String currentHost = DNS.getDefaultHost("default", "default");
089    HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
090    assertEquals("Locality should be 0.0", 0.0, distribution.getBlockLocalityIndex(currentHost),
091      0.01);
092    distribution.addHostsAndBlockWeight(new String[] { "localhost" }, 10);
093    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
094    assertEquals("Locality should be 0.0", 0.0, distribution.getBlockLocalityIndex("test"), 0.01);
095    assertNotEquals("Locality should be 0.0", 0.0, distribution.getBlockLocalityIndex(currentHost),
096      0.01);
097  }
098
099}