View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase;
20  
21  import java.util.ArrayList;
22  import java.util.Comparator;
23  import java.util.List;
24  import java.util.Map;
25  import java.util.NavigableSet;
26  import java.util.TreeMap;
27  import java.util.TreeSet;
28  
29  import org.apache.hadoop.hbase.classification.InterfaceAudience;
30  
31  
32  /**
33   * Data structure to describe the distribution of HDFS blocks among hosts.
34   *
35   * Adding erroneous data will be ignored silently.
36   */
37  @InterfaceAudience.Private
38  public class HDFSBlocksDistribution {
39    private Map<String,HostAndWeight> hostAndWeights = null;
40    private long uniqueBlocksTotalWeight = 0;
41  
42    /**
43     * Stores the hostname and weight for that hostname.
44     *
45     * This is used when determining the physical locations of the blocks making
46     * up a region.
47     *
48     * To make a prioritized list of the hosts holding the most data of a region,
49     * this class is used to count the total weight for each host.  The weight is
50     * currently just the size of the file.
51     */
52    public static class HostAndWeight {
53  
54      private String host;
55      private long weight;
56  
57      /**
58       * Constructor
59       * @param host the host name
60       * @param weight the weight
61       */
62      public HostAndWeight(String host, long weight) {
63        this.host = host;
64        this.weight = weight;
65      }
66  
67      /**
68       * add weight
69       * @param weight the weight
70       */
71      public void addWeight(long weight) {
72        this.weight += weight;
73      }
74  
75      /**
76       * @return the host name
77       */
78      public String getHost() {
79        return host;
80      }
81  
82      /**
83       * @return the weight
84       */
85      public long getWeight() {
86        return weight;
87      }
88  
89      /**
90       * comparator used to sort hosts based on weight
91       */
92      public static class WeightComparator implements Comparator<HostAndWeight> {
93        @Override
94        public int compare(HostAndWeight l, HostAndWeight r) {
95          if(l.getWeight() == r.getWeight()) {
96            return l.getHost().compareTo(r.getHost());
97          }
98          return l.getWeight() < r.getWeight() ? -1 : 1;
99        }
100     }
101   }
102 
103   /**
104    * Constructor
105    */
106   public HDFSBlocksDistribution() {
107     this.hostAndWeights =
108       new TreeMap<String,HostAndWeight>();
109   }
110 
111   /**
112    * @see java.lang.Object#toString()
113    */
114   @Override
115   public synchronized String toString() {
116     return "number of unique hosts in the disribution=" +
117       this.hostAndWeights.size();
118   }
119 
120   /**
121    * add some weight to a list of hosts, update the value of unique block weight
122    * @param hosts the list of the host
123    * @param weight the weight
124    */
125   public void addHostsAndBlockWeight(String[] hosts, long weight) {
126     if (hosts == null || hosts.length == 0) {
127       // erroneous data
128       return;
129     }
130 
131     addUniqueWeight(weight);
132     for (String hostname : hosts) {
133       addHostAndBlockWeight(hostname, weight);
134     }
135   }
136 
137   /**
138    * add some weight to the total unique weight
139    * @param weight the weight
140    */
141   private void addUniqueWeight(long weight) {
142     uniqueBlocksTotalWeight += weight;
143   }
144 
145 
146   /**
147    * add some weight to a specific host
148    * @param host the host name
149    * @param weight the weight
150    */
151   private void addHostAndBlockWeight(String host, long weight) {
152     if (host == null) {
153       // erroneous data
154       return;
155     }
156 
157     HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
158     if(hostAndWeight == null) {
159       hostAndWeight = new HostAndWeight(host, weight);
160       this.hostAndWeights.put(host, hostAndWeight);
161     } else {
162       hostAndWeight.addWeight(weight);
163     }
164   }
165 
166   /**
167    * @return the hosts and their weights
168    */
169   public Map<String,HostAndWeight> getHostAndWeights() {
170     return this.hostAndWeights;
171   }
172 
173   /**
174    * return the weight for a specific host, that will be the total bytes of all
175    * blocks on the host
176    * @param host the host name
177    * @return the weight of the given host
178    */
179   public long getWeight(String host) {
180     long weight = 0;
181     if (host != null) {
182       HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
183       if(hostAndWeight != null) {
184         weight = hostAndWeight.getWeight();
185       }
186     }
187     return weight;
188   }
189 
190   /**
191    * @return the sum of all unique blocks' weight
192    */
193   public long getUniqueBlocksTotalWeight() {
194     return uniqueBlocksTotalWeight;
195   }
196 
197   /**
198    * return the locality index of a given host
199    * @param host the host name
200    * @return the locality index of the given host
201    */
202   public float getBlockLocalityIndex(String host) {
203     float localityIndex = 0;
204     HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
205     if (hostAndWeight != null && uniqueBlocksTotalWeight != 0) {
206       localityIndex=(float)hostAndWeight.weight/(float)uniqueBlocksTotalWeight;
207     }
208     return localityIndex;
209   }
210 
211 
212   /**
213    * This will add the distribution from input to this object
214    * @param otherBlocksDistribution the other hdfs blocks distribution
215    */
216   public void add(HDFSBlocksDistribution otherBlocksDistribution) {
217     Map<String,HostAndWeight> otherHostAndWeights =
218       otherBlocksDistribution.getHostAndWeights();
219     for (Map.Entry<String, HostAndWeight> otherHostAndWeight:
220       otherHostAndWeights.entrySet()) {
221       addHostAndBlockWeight(otherHostAndWeight.getValue().host,
222         otherHostAndWeight.getValue().weight);
223     }
224     addUniqueWeight(otherBlocksDistribution.getUniqueBlocksTotalWeight());
225   }
226 
227   /**
228    * return the sorted list of hosts in terms of their weights
229    */
230   public List<String> getTopHosts() {
231     HostAndWeight[] hostAndWeights = getTopHostsWithWeights();
232     List<String> topHosts = new ArrayList<String>(hostAndWeights.length);
233     for(HostAndWeight haw : hostAndWeights) {
234       topHosts.add(haw.getHost());
235     }
236     return topHosts;
237   }
238 
239   /**
240    * return the sorted list of hosts in terms of their weights
241    */
242   public HostAndWeight[] getTopHostsWithWeights() {
243     NavigableSet<HostAndWeight> orderedHosts = new TreeSet<HostAndWeight>(
244       new HostAndWeight.WeightComparator());
245     orderedHosts.addAll(this.hostAndWeights.values());
246     return orderedHosts.descendingSet().toArray(new HostAndWeight[orderedHosts.size()]);
247   }
248 
249 }