001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.mapreduce; 019 020import java.io.IOException; 021import org.apache.hadoop.conf.Configuration; 022import org.apache.hadoop.conf.Configured; 023import org.apache.hadoop.fs.Path; 024import org.apache.hadoop.hbase.Cell; 025import org.apache.hadoop.hbase.CellUtil; 026import org.apache.hadoop.hbase.CompareOperator; 027import org.apache.hadoop.hbase.HBaseConfiguration; 028import org.apache.hadoop.hbase.HConstants; 029import org.apache.hadoop.hbase.client.Result; 030import org.apache.hadoop.hbase.client.Scan; 031import org.apache.hadoop.hbase.filter.Filter; 032import org.apache.hadoop.hbase.filter.PrefixFilter; 033import org.apache.hadoop.hbase.filter.RegexStringComparator; 034import org.apache.hadoop.hbase.filter.RowFilter; 035import org.apache.hadoop.hbase.io.ImmutableBytesWritable; 036import org.apache.hadoop.hbase.util.Bytes; 037import org.apache.hadoop.io.LongWritable; 038import org.apache.hadoop.io.Text; 039import org.apache.hadoop.mapreduce.Job; 040import org.apache.hadoop.mapreduce.Reducer; 041import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 042import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; 043import org.apache.hadoop.util.Tool; 044import org.apache.hadoop.util.ToolRunner; 045import org.apache.yetus.audience.InterfaceAudience; 046import org.slf4j.Logger; 047import org.slf4j.LoggerFactory; 048 049import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; 050 051/** 052 * A job with a a map and reduce phase to count cells in a table. The counter lists the following 053 * stats for a given table: 054 * 055 * <pre> 056 * 1. Total number of rows in the table 057 * 2. Total number of CFs across all rows 058 * 3. Total qualifiers across all rows 059 * 4. Total occurrence of each CF 060 * 5. Total occurrence of each qualifier 061 * 6. Total number of versions of each qualifier. 062 * 7. Total size of serialized cells of each CF. 063 * 8. Total size of serialized cells of each qualifier. 064 * 9. Total size of serialized cells across all rows. 065 * </pre> 066 * 067 * The cellcounter can take optional parameters to use a user supplied row/family/qualifier string 068 * to use in the report and second a regex based or prefix based row filter to restrict the count 069 * operation to a limited subset of rows from the table or a start time and/or end time to limit the 070 * count to a time range. 071 */ 072@InterfaceAudience.Public 073public class CellCounter extends Configured implements Tool { 074 private static final Logger LOG = LoggerFactory.getLogger(CellCounter.class.getName()); 075 076 /** 077 * Name of this 'program'. 078 */ 079 static final String NAME = "CellCounter"; 080 081 private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name"; 082 083 /** 084 * Mapper that runs the count. 085 */ 086 static class CellCounterMapper extends TableMapper<Text, LongWritable> { 087 /** 088 * Counter enumeration to count the actual rows. 089 */ 090 public static enum Counters { 091 ROWS, 092 CELLS, 093 SIZE 094 } 095 096 private Configuration conf; 097 private String separator; 098 099 // state of current row, family, column needs to persist across map() invocations 100 // in order to properly handle scanner batching, where a single qualifier may have too 101 // many versions for a single map() call 102 private byte[] lastRow; 103 private String currentRowKey; 104 byte[] currentFamily = null; 105 String currentFamilyName = null; 106 byte[] currentQualifier = null; 107 // family + qualifier 108 String currentQualifierName = null; 109 // rowkey + family + qualifier 110 String currentRowQualifierName = null; 111 112 @Override 113 protected void setup(Context context) throws IOException, InterruptedException { 114 conf = context.getConfiguration(); 115 separator = conf.get("ReportSeparator", ":"); 116 } 117 118 /** 119 * Maps the data. 120 * @param row The current table row key. 121 * @param values The columns. 122 * @param context The current context. 123 * @throws IOException When something is broken with the data. 124 */ 125 126 @Override 127 @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "NP_NULL_ON_SOME_PATH", 128 justification = "Findbugs is blind to the Precondition null check") 129 public void map(ImmutableBytesWritable row, Result values, Context context) throws IOException { 130 Preconditions.checkState(values != null, "values passed to the map is null"); 131 132 try { 133 byte[] currentRow = values.getRow(); 134 if (lastRow == null || !Bytes.equals(lastRow, currentRow)) { 135 lastRow = currentRow; 136 currentRowKey = Bytes.toStringBinary(currentRow); 137 currentFamily = null; 138 currentQualifier = null; 139 context.getCounter(Counters.ROWS).increment(1); 140 context.write(new Text("Total ROWS"), new LongWritable(1)); 141 } 142 if (!values.isEmpty()) { 143 int cellCount = 0; 144 for (Cell value : values.listCells()) { 145 cellCount++; 146 long size = value.getSerializedSize(); 147 if (currentFamily == null || !CellUtil.matchingFamily(value, currentFamily)) { 148 currentFamily = CellUtil.cloneFamily(value); 149 currentFamilyName = Bytes.toStringBinary(currentFamily); 150 currentQualifier = null; 151 context.getCounter("CF", currentFamilyName).increment(1); 152 if (1 == context.getCounter("CF", currentFamilyName).getValue()) { 153 context.write(new Text("Total Families Across all Rows"), new LongWritable(1)); 154 context.write(new Text(currentFamily), new LongWritable(1)); 155 } 156 context.getCounter(Counters.SIZE).increment(size); 157 context.write(new Text("Total SIZE"), new LongWritable(size)); 158 context.getCounter("CF", currentFamilyName + "_Size").increment(size); 159 context.write(new Text(currentFamilyName + "_Size"), new LongWritable(size)); 160 } 161 if (currentQualifier == null || !CellUtil.matchingQualifier(value, currentQualifier)) { 162 currentQualifier = CellUtil.cloneQualifier(value); 163 currentQualifierName = 164 currentFamilyName + separator + Bytes.toStringBinary(currentQualifier); 165 currentRowQualifierName = currentRowKey + separator + currentQualifierName; 166 167 context.write(new Text("Total Qualifiers across all Rows"), new LongWritable(1)); 168 context.write(new Text(currentQualifierName), new LongWritable(1)); 169 context.getCounter("Q", currentQualifierName + "_Size").increment(size); 170 context.write(new Text(currentQualifierName + "_Size"), new LongWritable(size)); 171 } 172 // Increment versions 173 context.write(new Text(currentRowQualifierName + "_Versions"), new LongWritable(1)); 174 } 175 context.getCounter(Counters.CELLS).increment(cellCount); 176 } 177 } catch (InterruptedException e) { 178 LOG.error("Interrupted while writing cellCount", e); 179 Thread.currentThread().interrupt(); 180 } 181 } 182 } 183 184 static class LongSumReducer<Key> extends Reducer<Key, LongWritable, Key, LongWritable> { 185 186 private LongWritable result = new LongWritable(); 187 188 public void reduce(Key key, Iterable<LongWritable> values, Context context) 189 throws IOException, InterruptedException { 190 long sum = 0; 191 for (LongWritable val : values) { 192 sum += val.get(); 193 } 194 result.set(sum); 195 context.write(key, result); 196 } 197 198 } 199 200 /** 201 * Sets up the actual job. 202 * @param conf The current configuration. 203 * @param args The command line parameters. 204 * @return The newly created job. 205 * @throws IOException When setting up the job fails. 206 */ 207 public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException { 208 String tableName = args[0]; 209 Path outputDir = new Path(args[1]); 210 String reportSeparatorString = (args.length > 2) ? args[2] : ":"; 211 conf.set("ReportSeparator", reportSeparatorString); 212 Job job = Job.getInstance(conf, conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName)); 213 job.setJarByClass(CellCounter.class); 214 Scan scan = getConfiguredScanForJob(conf, args); 215 TableMapReduceUtil.initTableMapperJob(tableName, scan, CellCounterMapper.class, 216 ImmutableBytesWritable.class, Result.class, job); 217 job.setMapOutputKeyClass(Text.class); 218 job.setMapOutputValueClass(LongWritable.class); 219 job.setOutputFormatClass(TextOutputFormat.class); 220 job.setOutputKeyClass(Text.class); 221 job.setOutputValueClass(LongWritable.class); 222 FileOutputFormat.setOutputPath(job, outputDir); 223 job.setReducerClass(LongSumReducer.class); 224 job.setCombinerClass(LongSumReducer.class); 225 return job; 226 } 227 228 private static Scan getConfiguredScanForJob(Configuration conf, String[] args) 229 throws IOException { 230 // create scan with any properties set from TableInputFormat 231 Scan s = TableInputFormat.createScanFromConfiguration(conf); 232 // Set Scan Versions 233 if (conf.get(TableInputFormat.SCAN_MAXVERSIONS) == null) { 234 // default to all versions unless explicitly set 235 s.readVersions(Integer.MAX_VALUE); 236 } 237 s.setCacheBlocks(false); 238 // Set RowFilter or Prefix Filter if applicable. 239 Filter rowFilter = getRowFilter(args); 240 if (rowFilter != null) { 241 LOG.info("Setting Row Filter for counter."); 242 s.setFilter(rowFilter); 243 } 244 // Set TimeRange if defined 245 long timeRange[] = getTimeRange(args); 246 if (timeRange != null) { 247 LOG.info("Setting TimeRange for counter."); 248 s.setTimeRange(timeRange[0], timeRange[1]); 249 } 250 return s; 251 } 252 253 private static Filter getRowFilter(String[] args) { 254 Filter rowFilter = null; 255 String filterCriteria = (args.length > 3) ? args[3] : null; 256 if (filterCriteria == null) return null; 257 if (filterCriteria.startsWith("^")) { 258 String regexPattern = filterCriteria.substring(1, filterCriteria.length()); 259 rowFilter = new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(regexPattern)); 260 } else { 261 rowFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria)); 262 } 263 return rowFilter; 264 } 265 266 private static long[] getTimeRange(String[] args) throws IOException { 267 final String startTimeArgKey = "--starttime="; 268 final String endTimeArgKey = "--endtime="; 269 long startTime = 0L; 270 long endTime = 0L; 271 272 for (int i = 1; i < args.length; i++) { 273 System.out.println("i:" + i + "arg[i]" + args[i]); 274 if (args[i].startsWith(startTimeArgKey)) { 275 startTime = Long.parseLong(args[i].substring(startTimeArgKey.length())); 276 } 277 if (args[i].startsWith(endTimeArgKey)) { 278 endTime = Long.parseLong(args[i].substring(endTimeArgKey.length())); 279 } 280 } 281 282 if (startTime == 0 && endTime == 0) return null; 283 284 endTime = endTime == 0 ? HConstants.LATEST_TIMESTAMP : endTime; 285 return new long[] { startTime, endTime }; 286 } 287 288 @Override 289 public int run(String[] args) throws Exception { 290 if (args.length < 2) { 291 printUsage(args.length); 292 return -1; 293 } 294 Job job = createSubmittableJob(getConf(), args); 295 return (job.waitForCompletion(true) ? 0 : 1); 296 } 297 298 private void printUsage(int parameterCount) { 299 System.err.println("ERROR: Wrong number of parameters: " + parameterCount); 300 System.err.println("Usage: hbase cellcounter <tablename> <outputDir> [reportSeparator] " 301 + "[^[regex pattern] or [Prefix]] [--starttime=<starttime> --endtime=<endtime>]"); 302 System.err.println(" Note: -D properties will be applied to the conf used."); 303 System.err.println(" Additionally, all of the SCAN properties from TableInputFormat can be " 304 + "specified to get fine grained control on what is counted."); 305 System.err.println(" -D" + TableInputFormat.SCAN_ROW_START + "=<rowkey>"); 306 System.err.println(" -D" + TableInputFormat.SCAN_ROW_STOP + "=<rowkey>"); 307 System.err.println(" -D" + TableInputFormat.SCAN_COLUMNS + "=\"<col1> <col2>...\""); 308 System.err.println(" -D" + TableInputFormat.SCAN_COLUMN_FAMILY + "=<family1>,<family2>, ..."); 309 System.err.println(" -D" + TableInputFormat.SCAN_TIMESTAMP + "=<timestamp>"); 310 System.err.println(" -D" + TableInputFormat.SCAN_TIMERANGE_START + "=<timestamp>"); 311 System.err.println(" -D" + TableInputFormat.SCAN_TIMERANGE_END + "=<timestamp>"); 312 System.err.println(" -D" + TableInputFormat.SCAN_MAXVERSIONS + "=<count>"); 313 System.err.println(" -D" + TableInputFormat.SCAN_CACHEDROWS + "=<count>"); 314 System.err.println(" -D" + TableInputFormat.SCAN_BATCHSIZE + "=<count>"); 315 System.err.println(" <reportSeparator> parameter can be used to override the default report " 316 + "separator string : used to separate the rowId/column family name and qualifier name."); 317 System.err.println(" [^[regex pattern] or [Prefix] parameter can be used to limit the cell " 318 + "counter count operation to a limited subset of rows from the table based on regex or " 319 + "prefix pattern."); 320 } 321 322 /** 323 * Main entry point. 324 * @param args The command line parameters. 325 * @throws Exception When running the job fails. 326 */ 327 public static void main(String[] args) throws Exception { 328 int errCode = ToolRunner.run(HBaseConfiguration.create(), new CellCounter(), args); 329 System.exit(errCode); 330 } 331 332}