001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase; 019 020import static junit.framework.Assert.assertEquals; 021 022import java.util.HashMap; 023import java.util.Map; 024import org.apache.hadoop.hbase.testclassification.MiscTests; 025import org.apache.hadoop.hbase.testclassification.SmallTests; 026import org.junit.ClassRule; 027import org.junit.Test; 028import org.junit.experimental.categories.Category; 029 030@Category({MiscTests.class, SmallTests.class}) 031public class TestHDFSBlocksDistribution { 032 033 @ClassRule 034 public static final HBaseClassTestRule CLASS_RULE = 035 HBaseClassTestRule.forClass(TestHDFSBlocksDistribution.class); 036 037 @Test 038 public void testAddHostsAndBlockWeight() throws Exception { 039 HDFSBlocksDistribution distribution = new HDFSBlocksDistribution(); 040 distribution.addHostsAndBlockWeight(null, 100); 041 assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size()); 042 distribution.addHostsAndBlockWeight(new String[0], 100); 043 assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size()); 044 distribution.addHostsAndBlockWeight(new String[] {"test"}, 101); 045 assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); 046 distribution.addHostsAndBlockWeight(new String[] {"test"}, 202); 047 assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); 048 assertEquals("test host should have weight 303", 303, 049 distribution.getHostAndWeights().get("test").getWeight()); 050 distribution.addHostsAndBlockWeight(new String[] {"testTwo"}, 222); 051 assertEquals("Should be two hosts", 2, distribution.getHostAndWeights().size()); 052 assertEquals("Total weight should be 525", 525, distribution.getUniqueBlocksTotalWeight()); 053 } 054 055 public class MockHDFSBlocksDistribution extends HDFSBlocksDistribution { 056 @Override 057 public Map<String,HostAndWeight> getHostAndWeights() { 058 HashMap<String, HostAndWeight> map = new HashMap<>(); 059 map.put("test", new HostAndWeight(null, 100)); 060 return map; 061 } 062 063 } 064 065 @Test 066 public void testAdd() throws Exception { 067 HDFSBlocksDistribution distribution = new HDFSBlocksDistribution(); 068 distribution.add(new MockHDFSBlocksDistribution()); 069 assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size()); 070 distribution.addHostsAndBlockWeight(new String[]{"test"}, 10); 071 assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); 072 distribution.add(new MockHDFSBlocksDistribution()); 073 assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); 074 assertEquals("Total weight should be 10", 10, distribution.getUniqueBlocksTotalWeight()); 075 } 076}