001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import org.apache.yetus.audience.InterfaceAudience;
021
022/**
023 * This BlockCompressedSizePredicator implementation adjusts the block size limit based on the
024 * compression rate of the block contents read so far. For the first block, adjusted size would be
025 * zero, so it performs a compression of current block contents and calculate compression rate and
026 * adjusted size. For subsequent blocks, decision whether the block should be finished or not will
027 * be based on the compression rate calculated for the previous block.
028 */
029@InterfaceAudience.Private
030public class PreviousBlockCompressionRatePredicator implements BlockCompressedSizePredicator {
031
032  private int adjustedBlockSize;
033  private int compressionRatio = 1;
034  private int configuredMaxBlockSize;
035
036  /**
037   * Recalculates compression rate for the last block and adjusts the block size limit as:
038   * BLOCK_SIZE * (uncompressed/compressed).
039   * @param context      HFIleContext containing the configured max block size.
040   * @param uncompressed the uncompressed size of last block written.
041   * @param compressed   the compressed size of last block written.
042   */
043  @Override
044  public void updateLatestBlockSizes(HFileContext context, int uncompressed, int compressed) {
045    configuredMaxBlockSize = context.getBlocksize();
046    compressionRatio = uncompressed / compressed;
047    adjustedBlockSize = context.getBlocksize() * compressionRatio;
048  }
049
050  /**
051   * Returns <b>true</b> if the passed uncompressed size is larger than the limit calculated by
052   * <code>updateLatestBlockSizes</code>.
053   * @param uncompressed true if the block should be finished.
054   */
055  @Override
056  public boolean shouldFinishBlock(int uncompressed) {
057    if (uncompressed >= configuredMaxBlockSize) {
058      return uncompressed >= adjustedBlockSize;
059    }
060    return false;
061  }
062}