001/*
002 *
003 * Licensed to the Apache Software Foundation (ASF) under one
004 * or more contributor license agreements.  See the NOTICE file
005 * distributed with this work for additional information
006 * regarding copyright ownership.  The ASF licenses this file
007 * to you under the Apache License, Version 2.0 (the
008 * "License"); you may not use this file except in compliance
009 * with the License.  You may obtain a copy of the License at
010 *
011 *     http://www.apache.org/licenses/LICENSE-2.0
012 *
013 * Unless required by applicable law or agreed to in writing, software
014 * distributed under the License is distributed on an "AS IS" BASIS,
015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
016 * See the License for the specific language governing permissions and
017 * limitations under the License.
018 */
019package org.apache.hadoop.hbase.io.hfile;
020
021import static com.codahale.metrics.MetricRegistry.name;
022
023import java.io.ByteArrayOutputStream;
024import java.io.DataInput;
025import java.io.IOException;
026import java.io.PrintStream;
027import java.text.DateFormat;
028import java.util.ArrayList;
029import java.util.HashMap;
030import java.util.Iterator;
031import java.util.LinkedHashSet;
032import java.util.List;
033import java.util.Locale;
034import java.util.Map;
035import java.util.Set;
036import java.util.SortedMap;
037import java.util.TimeZone;
038import java.util.concurrent.TimeUnit;
039
040import org.apache.commons.lang3.StringUtils;
041import org.apache.hadoop.conf.Configuration;
042import org.apache.hadoop.conf.Configured;
043import org.apache.hadoop.fs.FileSystem;
044import org.apache.hadoop.fs.Path;
045import org.apache.hadoop.hbase.Cell;
046import org.apache.hadoop.hbase.CellComparator;
047import org.apache.hadoop.hbase.CellUtil;
048import org.apache.hadoop.hbase.HBaseConfiguration;
049import org.apache.hadoop.hbase.HBaseInterfaceAudience;
050import org.apache.hadoop.hbase.HConstants;
051import org.apache.hadoop.hbase.HRegionInfo;
052import org.apache.hadoop.hbase.KeyValue;
053import org.apache.hadoop.hbase.KeyValueUtil;
054import org.apache.hadoop.hbase.PrivateCellUtil;
055import org.apache.hadoop.hbase.TableName;
056import org.apache.hadoop.hbase.Tag;
057import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
058import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
059import org.apache.hadoop.hbase.mob.MobUtils;
060import org.apache.hadoop.hbase.regionserver.HStoreFile;
061import org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
062import org.apache.hadoop.hbase.util.BloomFilter;
063import org.apache.hadoop.hbase.util.BloomFilterFactory;
064import org.apache.hadoop.hbase.util.BloomFilterUtil;
065import org.apache.hadoop.hbase.util.Bytes;
066import org.apache.hadoop.hbase.util.FSUtils;
067import org.apache.hadoop.hbase.util.HFileArchiveUtil;
068import org.apache.hadoop.util.Tool;
069import org.apache.hadoop.util.ToolRunner;
070import org.apache.yetus.audience.InterfaceAudience;
071import org.apache.yetus.audience.InterfaceStability;
072import org.slf4j.Logger;
073import org.slf4j.LoggerFactory;
074
075import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
076import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLineParser;
077import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
078import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
079import org.apache.hbase.thirdparty.org.apache.commons.cli.OptionGroup;
080import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
081import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
082import org.apache.hbase.thirdparty.org.apache.commons.cli.PosixParser;
083
084import com.codahale.metrics.ConsoleReporter;
085import com.codahale.metrics.Counter;
086import com.codahale.metrics.Gauge;
087import com.codahale.metrics.Histogram;
088import com.codahale.metrics.Meter;
089import com.codahale.metrics.MetricFilter;
090import com.codahale.metrics.MetricRegistry;
091import com.codahale.metrics.ScheduledReporter;
092import com.codahale.metrics.Snapshot;
093import com.codahale.metrics.Timer;
094
095/**
096 * Implements pretty-printing functionality for {@link HFile}s.
097 */
098@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
099@InterfaceStability.Evolving
100public class HFilePrettyPrinter extends Configured implements Tool {
101
102  private static final Logger LOG = LoggerFactory.getLogger(HFilePrettyPrinter.class);
103
104  private Options options = new Options();
105
106  private boolean verbose;
107  private boolean printValue;
108  private boolean printKey;
109  private boolean shouldPrintMeta;
110  private boolean printBlockIndex;
111  private boolean printBlockHeaders;
112  private boolean printStats;
113  private boolean checkRow;
114  private boolean checkFamily;
115  private boolean isSeekToRow = false;
116  private boolean checkMobIntegrity = false;
117  private Map<String, List<Path>> mobFileLocations;
118  private static final int FOUND_MOB_FILES_CACHE_CAPACITY = 50;
119  private static final int MISSING_MOB_FILES_CACHE_CAPACITY = 20;
120  private PrintStream out = System.out;
121  private PrintStream err = System.err;
122
123  /**
124   * The row which the user wants to specify and print all the KeyValues for.
125   */
126  private byte[] row = null;
127
128  private List<Path> files = new ArrayList<>();
129  private int count;
130
131  private static final String FOUR_SPACES = "    ";
132
133  public HFilePrettyPrinter() {
134    super();
135    init();
136  }
137
138  public HFilePrettyPrinter(Configuration conf) {
139    super(conf);
140    init();
141  }
142
143  private void init() {
144    options.addOption("v", "verbose", false,
145        "Verbose output; emits file and meta data delimiters");
146    options.addOption("p", "printkv", false, "Print key/value pairs");
147    options.addOption("e", "printkey", false, "Print keys");
148    options.addOption("m", "printmeta", false, "Print meta data of file");
149    options.addOption("b", "printblocks", false, "Print block index meta data");
150    options.addOption("h", "printblockheaders", false, "Print block headers for each block.");
151    options.addOption("k", "checkrow", false,
152        "Enable row order check; looks for out-of-order keys");
153    options.addOption("a", "checkfamily", false, "Enable family check");
154    options.addOption("w", "seekToRow", true,
155      "Seek to this row and print all the kvs for this row only");
156    options.addOption("s", "stats", false, "Print statistics");
157    options.addOption("i", "checkMobIntegrity", false,
158      "Print all cells whose mob files are missing");
159
160    OptionGroup files = new OptionGroup();
161    files.addOption(new Option("f", "file", true,
162      "File to scan. Pass full-path; e.g. hdfs://a:9000/hbase/hbase:meta/12/34"));
163    files.addOption(new Option("r", "region", true,
164      "Region to scan. Pass region name; e.g. 'hbase:meta,,1'"));
165    options.addOptionGroup(files);
166  }
167
168  public void setPrintStreams(PrintStream out, PrintStream err) {
169    this.out = out;
170    this.err = err;
171  }
172
173  public boolean parseOptions(String args[]) throws ParseException,
174      IOException {
175    if (args.length == 0) {
176      HelpFormatter formatter = new HelpFormatter();
177      formatter.printHelp("HFile", options, true);
178      return false;
179    }
180    CommandLineParser parser = new PosixParser();
181    CommandLine cmd = parser.parse(options, args);
182
183    verbose = cmd.hasOption("v");
184    printValue = cmd.hasOption("p");
185    printKey = cmd.hasOption("e") || printValue;
186    shouldPrintMeta = cmd.hasOption("m");
187    printBlockIndex = cmd.hasOption("b");
188    printBlockHeaders = cmd.hasOption("h");
189    printStats = cmd.hasOption("s");
190    checkRow = cmd.hasOption("k");
191    checkFamily = cmd.hasOption("a");
192    checkMobIntegrity = cmd.hasOption("i");
193
194    if (cmd.hasOption("f")) {
195      files.add(new Path(cmd.getOptionValue("f")));
196    }
197
198    if (cmd.hasOption("w")) {
199      String key = cmd.getOptionValue("w");
200      if (key != null && key.length() != 0) {
201        row = Bytes.toBytesBinary(key);
202        isSeekToRow = true;
203      } else {
204        err.println("Invalid row is specified.");
205        System.exit(-1);
206      }
207    }
208
209    if (cmd.hasOption("r")) {
210      String regionName = cmd.getOptionValue("r");
211      byte[] rn = Bytes.toBytes(regionName);
212      byte[][] hri = HRegionInfo.parseRegionName(rn);
213      Path rootDir = FSUtils.getRootDir(getConf());
214      Path tableDir = FSUtils.getTableDir(rootDir, TableName.valueOf(hri[0]));
215      String enc = HRegionInfo.encodeRegionName(rn);
216      Path regionDir = new Path(tableDir, enc);
217      if (verbose)
218        out.println("region dir -> " + regionDir);
219      List<Path> regionFiles = HFile.getStoreFiles(FileSystem.get(getConf()),
220          regionDir);
221      if (verbose)
222        out.println("Number of region files found -> "
223            + regionFiles.size());
224      if (verbose) {
225        int i = 1;
226        for (Path p : regionFiles) {
227          if (verbose)
228            out.println("Found file[" + i++ + "] -> " + p);
229        }
230      }
231      files.addAll(regionFiles);
232    }
233
234    if(checkMobIntegrity) {
235      if (verbose) {
236        System.out.println("checkMobIntegrity is enabled");
237      }
238      mobFileLocations = new HashMap<>();
239    }
240
241    cmd.getArgList().forEach((file) -> files.add(new Path(file)));
242
243    return true;
244  }
245
246  /**
247   * Runs the command-line pretty-printer, and returns the desired command
248   * exit code (zero for success, non-zero for failure).
249   */
250  @Override
251  public int run(String[] args) {
252    if (getConf() == null) {
253      throw new RuntimeException("A Configuration instance must be provided.");
254    }
255    try {
256      FSUtils.setFsDefault(getConf(), FSUtils.getRootDir(getConf()));
257      if (!parseOptions(args))
258        return 1;
259    } catch (IOException ex) {
260      LOG.error("Error parsing command-line options", ex);
261      return 1;
262    } catch (ParseException ex) {
263      LOG.error("Error parsing command-line options", ex);
264      return 1;
265    }
266
267    // iterate over all files found
268    for (Path fileName : files) {
269      try {
270        int exitCode = processFile(fileName);
271        if (exitCode != 0) {
272          return exitCode;
273        }
274      } catch (IOException ex) {
275        LOG.error("Error reading " + fileName, ex);
276        return -2;
277      }
278    }
279
280    if (verbose || printKey) {
281      out.println("Scanned kv count -> " + count);
282    }
283
284    return 0;
285  }
286
287  public int processFile(Path file) throws IOException {
288    if (verbose)
289      out.println("Scanning -> " + file);
290
291    Path rootPath = FSUtils.getRootDir(getConf());
292    String rootString = rootPath + rootPath.SEPARATOR;
293    if (!file.toString().startsWith(rootString)) {
294      // First we see if fully-qualified URI matches the root dir. It might
295      // also be an absolute path in the same filesystem, so we prepend the FS
296      // of the root dir and see if that fully-qualified URI matches.
297      FileSystem rootFS = rootPath.getFileSystem(getConf());
298      String qualifiedFile = rootFS.getUri().toString() + file.toString();
299      if (!qualifiedFile.startsWith(rootString)) {
300        err.println("ERROR, file (" + file +
301            ") is not in HBase's root directory (" + rootString + ")");
302        return -2;
303      }
304    }
305
306    FileSystem fs = file.getFileSystem(getConf());
307    if (!fs.exists(file)) {
308      err.println("ERROR, file doesnt exist: " + file);
309      return -2;
310    }
311
312    HFile.Reader reader = HFile.createReader(fs, file, CacheConfig.DISABLED, true, getConf());
313
314    Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
315
316    KeyValueStatsCollector fileStats = null;
317
318    if (verbose || printKey || checkRow || checkFamily || printStats || checkMobIntegrity) {
319      // scan over file and read key/value's and check if requested
320      HFileScanner scanner = reader.getScanner(false, false, false);
321      fileStats = new KeyValueStatsCollector();
322      boolean shouldScanKeysValues = false;
323      if (this.isSeekToRow) {
324        // seek to the first kv on this row
325        shouldScanKeysValues =
326          (scanner.seekTo(PrivateCellUtil.createFirstOnRow(this.row)) != -1);
327      } else {
328        shouldScanKeysValues = scanner.seekTo();
329      }
330      if (shouldScanKeysValues)
331        scanKeysValues(file, fileStats, scanner, row);
332    }
333
334    // print meta data
335    if (shouldPrintMeta) {
336      printMeta(reader, fileInfo);
337    }
338
339    if (printBlockIndex) {
340      out.println("Block Index:");
341      out.println(reader.getDataBlockIndexReader());
342    }
343
344    if (printBlockHeaders) {
345      out.println("Block Headers:");
346      /*
347       * TODO: this same/similar block iteration logic is used in HFileBlock#blockRange and
348       * TestLazyDataBlockDecompression. Refactor?
349       */
350      FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, file);
351      long fileSize = fs.getFileStatus(file).getLen();
352      FixedFileTrailer trailer =
353        FixedFileTrailer.readFromStream(fsdis.getStream(false), fileSize);
354      long offset = trailer.getFirstDataBlockOffset(),
355        max = trailer.getLastDataBlockOffset();
356      HFileBlock block;
357      while (offset <= max) {
358        block = reader.readBlock(offset, -1, /* cacheBlock */ false, /* pread */ false,
359          /* isCompaction */ false, /* updateCacheMetrics */ false, null, null);
360        offset += block.getOnDiskSizeWithHeader();
361        out.println(block);
362      }
363    }
364
365    if (printStats) {
366      fileStats.finish();
367      out.println("Stats:\n" + fileStats);
368    }
369
370    reader.close();
371    return 0;
372  }
373
374  private void scanKeysValues(Path file, KeyValueStatsCollector fileStats,
375      HFileScanner scanner,  byte[] row) throws IOException {
376    Cell pCell = null;
377    FileSystem fs = FileSystem.get(getConf());
378    Set<String> foundMobFiles = new LinkedHashSet<>(FOUND_MOB_FILES_CACHE_CAPACITY);
379    Set<String> missingMobFiles = new LinkedHashSet<>(MISSING_MOB_FILES_CACHE_CAPACITY);
380    do {
381      Cell cell = scanner.getCell();
382      if (row != null && row.length != 0) {
383        int result = CellComparator.getInstance().compareRows(cell, row, 0, row.length);
384        if (result > 0) {
385          break;
386        } else if (result < 0) {
387          continue;
388        }
389      }
390      // collect stats
391      if (printStats) {
392        fileStats.collect(cell);
393      }
394      // dump key value
395      if (printKey) {
396        out.print("K: " + cell);
397        if (printValue) {
398          out.print(" V: "
399              + Bytes.toStringBinary(cell.getValueArray(), cell.getValueOffset(),
400                  cell.getValueLength()));
401          int i = 0;
402          List<Tag> tags = PrivateCellUtil.getTags(cell);
403          for (Tag tag : tags) {
404            out.print(String.format(" T[%d]: %s", i++, tag.toString()));
405          }
406        }
407        out.println();
408      }
409      // check if rows are in order
410      if (checkRow && pCell != null) {
411        if (CellComparator.getInstance().compareRows(pCell, cell) > 0) {
412          err.println("WARNING, previous row is greater then"
413              + " current row\n\tfilename -> " + file + "\n\tprevious -> "
414              + CellUtil.getCellKeyAsString(pCell) + "\n\tcurrent  -> "
415              + CellUtil.getCellKeyAsString(cell));
416        }
417      }
418      // check if families are consistent
419      if (checkFamily) {
420        String fam = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(),
421            cell.getFamilyLength());
422        if (!file.toString().contains(fam)) {
423          err.println("WARNING, filename does not match kv family,"
424              + "\n\tfilename -> " + file + "\n\tkeyvalue -> "
425              + CellUtil.getCellKeyAsString(cell));
426        }
427        if (pCell != null && CellComparator.getInstance().compareFamilies(pCell, cell) != 0) {
428          err.println("WARNING, previous kv has different family"
429              + " compared to current key\n\tfilename -> " + file
430              + "\n\tprevious -> " + CellUtil.getCellKeyAsString(pCell)
431              + "\n\tcurrent  -> " + CellUtil.getCellKeyAsString(cell));
432        }
433      }
434      // check if mob files are missing.
435      if (checkMobIntegrity && MobUtils.isMobReferenceCell(cell)) {
436        Tag tnTag = MobUtils.getTableNameTag(cell);
437        if (tnTag == null) {
438          System.err.println("ERROR, wrong tag format in mob reference cell "
439            + CellUtil.getCellKeyAsString(cell));
440        } else if (!MobUtils.hasValidMobRefCellValue(cell)) {
441          System.err.println("ERROR, wrong value format in mob reference cell "
442            + CellUtil.getCellKeyAsString(cell));
443        } else {
444          TableName tn = TableName.valueOf(Tag.cloneValue(tnTag));
445          String mobFileName = MobUtils.getMobFileName(cell);
446          boolean exist = mobFileExists(fs, tn, mobFileName,
447            Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles);
448          if (!exist) {
449            // report error
450            System.err.println("ERROR, the mob file [" + mobFileName
451              + "] is missing referenced by cell " + CellUtil.getCellKeyAsString(cell));
452          }
453        }
454      }
455      pCell = cell;
456      ++count;
457    } while (scanner.next());
458  }
459
460  /**
461   * Checks whether the referenced mob file exists.
462   */
463  private boolean mobFileExists(FileSystem fs, TableName tn, String mobFileName, String family,
464    Set<String> foundMobFiles, Set<String> missingMobFiles) throws IOException {
465    if (foundMobFiles.contains(mobFileName)) {
466      return true;
467    }
468    if (missingMobFiles.contains(mobFileName)) {
469      return false;
470    }
471    String tableName = tn.getNameAsString();
472    List<Path> locations = mobFileLocations.get(tableName);
473    if (locations == null) {
474      locations = new ArrayList<>(2);
475      locations.add(MobUtils.getMobFamilyPath(getConf(), tn, family));
476      locations.add(HFileArchiveUtil.getStoreArchivePath(getConf(), tn,
477        MobUtils.getMobRegionInfo(tn).getEncodedName(), family));
478      mobFileLocations.put(tn.getNameAsString(), locations);
479    }
480    boolean exist = false;
481    for (Path location : locations) {
482      Path mobFilePath = new Path(location, mobFileName);
483      if (fs.exists(mobFilePath)) {
484        exist = true;
485        break;
486      }
487    }
488    if (exist) {
489      evictMobFilesIfNecessary(foundMobFiles, FOUND_MOB_FILES_CACHE_CAPACITY);
490      foundMobFiles.add(mobFileName);
491    } else {
492      evictMobFilesIfNecessary(missingMobFiles, MISSING_MOB_FILES_CACHE_CAPACITY);
493      missingMobFiles.add(mobFileName);
494    }
495    return exist;
496  }
497
498  /**
499   * Evicts the cached mob files if the set is larger than the limit.
500   */
501  private void evictMobFilesIfNecessary(Set<String> mobFileNames, int limit) {
502    if (mobFileNames.size() < limit) {
503      return;
504    }
505    int index = 0;
506    int evict = limit / 2;
507    Iterator<String> fileNamesItr = mobFileNames.iterator();
508    while (index < evict && fileNamesItr.hasNext()) {
509      fileNamesItr.next();
510      fileNamesItr.remove();
511      index++;
512    }
513  }
514
515  /**
516   * Format a string of the form "k1=v1, k2=v2, ..." into separate lines
517   * with a four-space indentation.
518   */
519  private static String asSeparateLines(String keyValueStr) {
520    return keyValueStr.replaceAll(", ([a-zA-Z]+=)",
521                                  ",\n" + FOUR_SPACES + "$1");
522  }
523
524  private void printMeta(HFile.Reader reader, Map<byte[], byte[]> fileInfo)
525      throws IOException {
526    out.println("Block index size as per heapsize: "
527        + reader.indexSize());
528    out.println(asSeparateLines(reader.toString()));
529    out.println("Trailer:\n    "
530        + asSeparateLines(reader.getTrailer().toString()));
531    out.println("Fileinfo:");
532    for (Map.Entry<byte[], byte[]> e : fileInfo.entrySet()) {
533      out.print(FOUR_SPACES + Bytes.toString(e.getKey()) + " = ");
534      if (Bytes.equals(e.getKey(), HStoreFile.MAX_SEQ_ID_KEY)
535          || Bytes.equals(e.getKey(), HStoreFile.DELETE_FAMILY_COUNT)
536          || Bytes.equals(e.getKey(), HStoreFile.EARLIEST_PUT_TS)
537          || Bytes.equals(e.getKey(), HFileWriterImpl.MAX_MEMSTORE_TS_KEY)
538          || Bytes.equals(e.getKey(), FileInfo.CREATE_TIME_TS)
539          || Bytes.equals(e.getKey(), HStoreFile.BULKLOAD_TIME_KEY)) {
540        out.println(Bytes.toLong(e.getValue()));
541      } else if (Bytes.equals(e.getKey(), HStoreFile.TIMERANGE_KEY)) {
542        TimeRangeTracker timeRangeTracker = TimeRangeTracker.parseFrom(e.getValue());
543        out.println(timeRangeTracker.getMin() + "...." + timeRangeTracker.getMax());
544      } else if (Bytes.equals(e.getKey(), FileInfo.AVG_KEY_LEN)
545          || Bytes.equals(e.getKey(), FileInfo.AVG_VALUE_LEN)
546          || Bytes.equals(e.getKey(), HFileWriterImpl.KEY_VALUE_VERSION)
547          || Bytes.equals(e.getKey(), FileInfo.MAX_TAGS_LEN)) {
548        out.println(Bytes.toInt(e.getValue()));
549      } else if (Bytes.equals(e.getKey(), HStoreFile.MAJOR_COMPACTION_KEY)
550          || Bytes.equals(e.getKey(), FileInfo.TAGS_COMPRESSED)
551          || Bytes.equals(e.getKey(), HStoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY)) {
552        out.println(Bytes.toBoolean(e.getValue()));
553      } else if (Bytes.equals(e.getKey(), FileInfo.LASTKEY)) {
554        out.println(new KeyValue.KeyOnlyKeyValue(e.getValue()).toString());
555      } else {
556        out.println(Bytes.toStringBinary(e.getValue()));
557      }
558    }
559
560    try {
561      out.println("Mid-key: " + reader.midKey().map(CellUtil::getCellKeyAsString));
562    } catch (Exception e) {
563      out.println ("Unable to retrieve the midkey");
564    }
565
566    // Printing general bloom information
567    DataInput bloomMeta = reader.getGeneralBloomFilterMetadata();
568    BloomFilter bloomFilter = null;
569    if (bloomMeta != null)
570      bloomFilter = BloomFilterFactory.createFromMeta(bloomMeta, reader);
571
572    out.println("Bloom filter:");
573    if (bloomFilter != null) {
574      out.println(FOUR_SPACES + bloomFilter.toString().replaceAll(
575          BloomFilterUtil.STATS_RECORD_SEP, "\n" + FOUR_SPACES));
576    } else {
577      out.println(FOUR_SPACES + "Not present");
578    }
579
580    // Printing delete bloom information
581    bloomMeta = reader.getDeleteBloomFilterMetadata();
582    bloomFilter = null;
583    if (bloomMeta != null)
584      bloomFilter = BloomFilterFactory.createFromMeta(bloomMeta, reader);
585
586    out.println("Delete Family Bloom filter:");
587    if (bloomFilter != null) {
588      out.println(FOUR_SPACES
589          + bloomFilter.toString().replaceAll(BloomFilterUtil.STATS_RECORD_SEP,
590              "\n" + FOUR_SPACES));
591    } else {
592      out.println(FOUR_SPACES + "Not present");
593    }
594  }
595
596  private static class KeyValueStatsCollector {
597    private final MetricRegistry metricsRegistry = new MetricRegistry();
598    private final ByteArrayOutputStream metricsOutput = new ByteArrayOutputStream();
599    private final SimpleReporter simpleReporter = SimpleReporter.forRegistry(metricsRegistry).
600        outputTo(new PrintStream(metricsOutput)).filter(MetricFilter.ALL).build();
601
602    Histogram keyLen = metricsRegistry.histogram(name(HFilePrettyPrinter.class, "Key length"));
603    Histogram valLen = metricsRegistry.histogram(name(HFilePrettyPrinter.class, "Val length"));
604    Histogram rowSizeBytes = metricsRegistry.histogram(
605      name(HFilePrettyPrinter.class, "Row size (bytes)"));
606    Histogram rowSizeCols = metricsRegistry.histogram(
607      name(HFilePrettyPrinter.class, "Row size (columns)"));
608
609    long curRowBytes = 0;
610    long curRowCols = 0;
611
612    byte[] biggestRow = null;
613
614    private Cell prevCell = null;
615    private long maxRowBytes = 0;
616    private long curRowKeyLength;
617
618    public void collect(Cell cell) {
619      valLen.update(cell.getValueLength());
620      if (prevCell != null &&
621          CellComparator.getInstance().compareRows(prevCell, cell) != 0) {
622        // new row
623        collectRow();
624      }
625      curRowBytes += KeyValueUtil.length(cell);
626      curRowKeyLength = KeyValueUtil.keyLength(cell);
627      curRowCols++;
628      prevCell = cell;
629    }
630
631    private void collectRow() {
632      rowSizeBytes.update(curRowBytes);
633      rowSizeCols.update(curRowCols);
634      keyLen.update(curRowKeyLength);
635
636      if (curRowBytes > maxRowBytes && prevCell != null) {
637        biggestRow = CellUtil.cloneRow(prevCell);
638        maxRowBytes = curRowBytes;
639      }
640
641      curRowBytes = 0;
642      curRowCols = 0;
643    }
644
645    public void finish() {
646      if (curRowCols > 0) {
647        collectRow();
648      }
649    }
650
651    @Override
652    public String toString() {
653      if (prevCell == null)
654        return "no data available for statistics";
655
656      // Dump the metrics to the output stream
657      simpleReporter.stop();
658      simpleReporter.report();
659
660      return
661              metricsOutput.toString() +
662                      "Key of biggest row: " + Bytes.toStringBinary(biggestRow);
663    }
664  }
665
666  /**
667   * Almost identical to ConsoleReporter, but extending ScheduledReporter,
668   * as extending ConsoleReporter in this version of dropwizard is now too much trouble.
669   */
670  private static class SimpleReporter extends ScheduledReporter {
671    /**
672     * Returns a new {@link Builder} for {@link ConsoleReporter}.
673     *
674     * @param registry the registry to report
675     * @return a {@link Builder} instance for a {@link ConsoleReporter}
676     */
677    public static Builder forRegistry(MetricRegistry registry) {
678      return new Builder(registry);
679    }
680
681    /**
682     * A builder for {@link SimpleReporter} instances. Defaults to using the default locale and
683     * time zone, writing to {@code System.out}, converting rates to events/second, converting
684     * durations to milliseconds, and not filtering metrics.
685     */
686    public static class Builder {
687      private final MetricRegistry registry;
688      private PrintStream output;
689      private Locale locale;
690      private TimeZone timeZone;
691      private TimeUnit rateUnit;
692      private TimeUnit durationUnit;
693      private MetricFilter filter;
694
695      private Builder(MetricRegistry registry) {
696        this.registry = registry;
697        this.output = System.out;
698        this.locale = Locale.getDefault();
699        this.timeZone = TimeZone.getDefault();
700        this.rateUnit = TimeUnit.SECONDS;
701        this.durationUnit = TimeUnit.MILLISECONDS;
702        this.filter = MetricFilter.ALL;
703      }
704
705      /**
706       * Write to the given {@link PrintStream}.
707       *
708       * @param output a {@link PrintStream} instance.
709       * @return {@code this}
710       */
711      public Builder outputTo(PrintStream output) {
712        this.output = output;
713        return this;
714      }
715
716      /**
717       * Only report metrics which match the given filter.
718       *
719       * @param filter a {@link MetricFilter}
720       * @return {@code this}
721       */
722      public Builder filter(MetricFilter filter) {
723        this.filter = filter;
724        return this;
725      }
726
727      /**
728       * Builds a {@link ConsoleReporter} with the given properties.
729       *
730       * @return a {@link ConsoleReporter}
731       */
732      public SimpleReporter build() {
733        return new SimpleReporter(registry,
734            output,
735            locale,
736            timeZone,
737            rateUnit,
738            durationUnit,
739            filter);
740      }
741    }
742
743    private final PrintStream output;
744    private final Locale locale;
745    private final DateFormat dateFormat;
746
747    private SimpleReporter(MetricRegistry registry,
748                            PrintStream output,
749                            Locale locale,
750                            TimeZone timeZone,
751                            TimeUnit rateUnit,
752                            TimeUnit durationUnit,
753                            MetricFilter filter) {
754      super(registry, "simple-reporter", filter, rateUnit, durationUnit);
755      this.output = output;
756      this.locale = locale;
757
758      this.dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT,
759          DateFormat.MEDIUM,
760          locale);
761      dateFormat.setTimeZone(timeZone);
762    }
763
764    @Override
765    public void report(SortedMap<String, Gauge> gauges,
766                       SortedMap<String, Counter> counters,
767                       SortedMap<String, Histogram> histograms,
768                       SortedMap<String, Meter> meters,
769                       SortedMap<String, Timer> timers) {
770      // we know we only have histograms
771      if (!histograms.isEmpty()) {
772        for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
773          output.print("   " + StringUtils.substringAfterLast(entry.getKey(), "."));
774          output.println(':');
775          printHistogram(entry.getValue());
776        }
777        output.println();
778      }
779
780      output.println();
781      output.flush();
782    }
783
784    private void printHistogram(Histogram histogram) {
785      Snapshot snapshot = histogram.getSnapshot();
786      output.printf(locale, "               min = %d%n", snapshot.getMin());
787      output.printf(locale, "               max = %d%n", snapshot.getMax());
788      output.printf(locale, "              mean = %2.2f%n", snapshot.getMean());
789      output.printf(locale, "            stddev = %2.2f%n", snapshot.getStdDev());
790      output.printf(locale, "            median = %2.2f%n", snapshot.getMedian());
791      output.printf(locale, "              75%% <= %2.2f%n", snapshot.get75thPercentile());
792      output.printf(locale, "              95%% <= %2.2f%n", snapshot.get95thPercentile());
793      output.printf(locale, "              98%% <= %2.2f%n", snapshot.get98thPercentile());
794      output.printf(locale, "              99%% <= %2.2f%n", snapshot.get99thPercentile());
795      output.printf(locale, "            99.9%% <= %2.2f%n", snapshot.get999thPercentile());
796      output.printf(locale, "             count = %d%n", histogram.getCount());
797    }
798  }
799
800  public static void main(String[] args) throws Exception {
801    Configuration conf = HBaseConfiguration.create();
802    // no need for a block cache
803    conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);
804    int ret = ToolRunner.run(conf, new HFilePrettyPrinter(), args);
805    System.exit(ret);
806  }
807}