View Javadoc

1   /**
2    * Copyright 2011 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase;
21  
22  import java.util.ArrayList;
23  import java.util.Comparator;
24  import java.util.List;
25  import java.util.Map;
26  import java.util.NavigableSet;
27  import java.util.TreeMap;
28  import java.util.TreeSet;
29  
30  
31  /**
32   * Data structure to describe the distribution of HDFS blocks amount hosts.
33   *
34   * Adding erroneous data will be ignored silently.
35   */
36  public class HDFSBlocksDistribution {
37    private Map<String,HostAndWeight> hostAndWeights = null;
38    private long uniqueBlocksTotalWeight = 0;
39  
40    /**
41     * Stores the hostname and weight for that hostname.
42     *
43     * This is used when determining the physical locations of the blocks making
44     * up a region.
45     *
46     * To make a prioritized list of the hosts holding the most data of a region,
47     * this class is used to count the total weight for each host.  The weight is
48     * currently just the size of the file.
49     */
50    public static class HostAndWeight {
51  
52      private String host;
53      private long weight;
54  
55      /**
56       * Constructor
57       * @param host the host name
58       * @param weight the weight
59       */
60      public HostAndWeight(String host, long weight) {
61        this.host = host;
62        this.weight = weight;
63      }
64  
65      /**
66       * add weight
67       * @param weight the weight
68       */
69      public void addWeight(long weight) {
70        this.weight += weight;
71      }
72  
73      /**
74       * @return the host name
75       */
76      public String getHost() {
77        return host;
78      }
79  
80      /**
81       * @return the weight
82       */
83      public long getWeight() {
84        return weight;
85      }
86  
87      /**
88       * comparator used to sort hosts based on weight
89       */
90      public static class WeightComparator implements Comparator<HostAndWeight> {
91        @Override
92        public int compare(HostAndWeight l, HostAndWeight r) {
93          if(l.getWeight() == r.getWeight()) {
94            return l.getHost().compareTo(r.getHost());
95          }
96          return l.getWeight() < r.getWeight() ? -1 : 1;
97        }
98      }
99    }
100 
101   /**
102    * Constructor
103    */
104   public HDFSBlocksDistribution() {
105     this.hostAndWeights =
106       new TreeMap<String,HostAndWeight>();
107   }
108 
109   /**
110    * @see java.lang.Object#toString()
111    */
112   @Override
113   public synchronized String toString() {
114     return "number of unique hosts in the disribution=" +
115       this.hostAndWeights.size();
116   }
117 
118   /**
119    * add some weight to a list of hosts, update the value of unique block weight
120    * @param hosts the list of the host
121    * @param weight the weight
122    */
123   public void addHostsAndBlockWeight(String[] hosts, long weight) {
124     if (hosts == null || hosts.length == 0) {
125       // erroneous data
126       return;
127     }
128 
129     addUniqueWeight(weight);
130     for (String hostname : hosts) {
131       addHostAndBlockWeight(hostname, weight);
132     }
133   }
134 
135   /**
136    * add some weight to the total unique weight
137    * @param weight the weight
138    */
139   private void addUniqueWeight(long weight) {
140     uniqueBlocksTotalWeight += weight;
141   }
142 
143 
144   /**
145    * add some weight to a specific host
146    * @param host the host name
147    * @param weight the weight
148    */
149   private void addHostAndBlockWeight(String host, long weight) {
150     if (host == null) {
151       // erroneous data
152       return;
153     }
154 
155     HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
156     if(hostAndWeight == null) {
157       hostAndWeight = new HostAndWeight(host, weight);
158       this.hostAndWeights.put(host, hostAndWeight);
159     } else {
160       hostAndWeight.addWeight(weight);
161     }
162   }
163 
164   /**
165    * @return the hosts and their weights
166    */
167   public Map<String,HostAndWeight> getHostAndWeights() {
168     return this.hostAndWeights;
169   }
170 
171   /**
172    * return the weight for a specific host, that will be the total bytes of all
173    * blocks on the host
174    * @param host the host name
175    * @return the weight of the given host
176    */
177   public long getWeight(String host) {
178     long weight = 0;
179     if (host != null) {
180       HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
181       if(hostAndWeight != null) {
182         weight = hostAndWeight.getWeight();
183       }
184     }
185     return weight;
186   }
187 
188   /**
189    * @return the sum of all unique blocks' weight
190    */
191   public long getUniqueBlocksTotalWeight() {
192     return uniqueBlocksTotalWeight;
193   }
194 
195   /**
196    * return the locality index of a given host
197    * @param host the host name
198    * @return the locality index of the given host
199    */
200   public float getBlockLocalityIndex(String host) {
201     float localityIndex = 0;
202     HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
203     if (hostAndWeight != null && uniqueBlocksTotalWeight != 0) {
204       localityIndex=(float)hostAndWeight.weight/(float)uniqueBlocksTotalWeight;
205     }
206     return localityIndex;
207   }
208 
209 
210   /**
211    * This will add the distribution from input to this object
212    * @param otherBlocksDistribution the other hdfs blocks distribution
213    */
214   public void add(HDFSBlocksDistribution otherBlocksDistribution) {
215     Map<String,HostAndWeight> otherHostAndWeights =
216       otherBlocksDistribution.getHostAndWeights();
217     for (Map.Entry<String, HostAndWeight> otherHostAndWeight:
218       otherHostAndWeights.entrySet()) {
219       addHostAndBlockWeight(otherHostAndWeight.getValue().host,
220         otherHostAndWeight.getValue().weight);
221     }
222     addUniqueWeight(otherBlocksDistribution.getUniqueBlocksTotalWeight());
223   }
224 
225   /**
226    * return the sorted list of hosts in terms of their weights
227    */
228   public List<String> getTopHosts() {
229     HostAndWeight[] hostAndWeights = getTopHostsWithWeights();
230     List<String> topHosts = new ArrayList<String>(hostAndWeights.length);
231     for(HostAndWeight haw : hostAndWeights) {
232       topHosts.add(haw.getHost());
233     }
234     return topHosts;
235   }
236 
237   /**
238    * return the sorted list of hosts in terms of their weights
239    */
240   public HostAndWeight[] getTopHostsWithWeights() {
241     NavigableSet<HostAndWeight> orderedHosts = new TreeSet<HostAndWeight>(
242       new HostAndWeight.WeightComparator());
243     orderedHosts.addAll(this.hostAndWeights.values());
244     return orderedHosts.descendingSet().toArray(new HostAndWeight[orderedHosts.size()]);
245   }
246 
247 }