001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertNotEquals;
022
023import java.util.HashMap;
024import java.util.Map;
025
026import org.apache.hadoop.fs.StorageType;
027import org.apache.hadoop.hbase.testclassification.MiscTests;
028import org.apache.hadoop.hbase.testclassification.SmallTests;
029import org.apache.hadoop.hbase.util.DNS;
030import org.junit.ClassRule;
031import org.junit.Test;
032import org.junit.experimental.categories.Category;
033
034@Category({MiscTests.class, SmallTests.class})
035public class TestHDFSBlocksDistribution {
036
037  @ClassRule
038  public static final HBaseClassTestRule CLASS_RULE =
039      HBaseClassTestRule.forClass(TestHDFSBlocksDistribution.class);
040
041  @Test
042  public void testAddHostsAndBlockWeight() throws Exception {
043    HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
044    distribution.addHostsAndBlockWeight(null, 100);
045    assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
046    distribution.addHostsAndBlockWeight(new String[0], 100);
047    assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
048    distribution.addHostsAndBlockWeight(new String[] {"test"}, 101);
049    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
050    distribution.addHostsAndBlockWeight(new String[] {"test"}, 202);
051    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
052    assertEquals("test host should have weight 303", 303,
053        distribution.getHostAndWeights().get("test").getWeight());
054    distribution.addHostsAndBlockWeight(new String[] {"testTwo"}, 222);
055    assertEquals("Should be two hosts", 2, distribution.getHostAndWeights().size());
056    assertEquals("Total weight should be 525", 525, distribution.getUniqueBlocksTotalWeight());
057    distribution.addHostsAndBlockWeight(new String[] {"test"}, 100
058      , new StorageType[] { StorageType.SSD});
059    assertEquals("test host should have weight 403", 403
060      , distribution.getHostAndWeights().get("test").getWeight());
061    assertEquals("test host should have weight for ssd 100", 100
062      , distribution.getHostAndWeights().get("test").getWeightForSsd());
063  }
064
065  public class MockHDFSBlocksDistribution extends HDFSBlocksDistribution {
066    @Override
067    public Map<String,HostAndWeight> getHostAndWeights() {
068      HashMap<String, HostAndWeight> map = new HashMap<>();
069      map.put("test", new HostAndWeight(null, 100, 0));
070      return map;
071    }
072
073  }
074
075  @Test
076  public void testAdd() throws Exception {
077    HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
078    distribution.add(new MockHDFSBlocksDistribution());
079    assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
080    distribution.addHostsAndBlockWeight(new String[]{"test"}, 10);
081    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
082    distribution.add(new MockHDFSBlocksDistribution());
083    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
084    assertEquals("Total weight should be 10", 10, distribution.getUniqueBlocksTotalWeight());
085  }
086
087  @Test
088  public void testLocalHostCompatibility() throws Exception {
089    String currentHost = DNS.getDefaultHost("default", "default");
090    HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
091    assertEquals("Locality should be 0.0", 0.0,
092      distribution.getBlockLocalityIndex(currentHost), 0.01);
093    distribution.addHostsAndBlockWeight(new String[] { "localhost" }, 10);
094    assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
095    assertEquals("Locality should be 0.0", 0.0,
096      distribution.getBlockLocalityIndex("test"), 0.01);
097    assertNotEquals("Locality should be 0.0", 0.0,
098      distribution.getBlockLocalityIndex(currentHost), 0.01);
099  }
100
101}