001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.compress;
019
020import java.io.IOException;
021import java.nio.ByteBuffer;
022import org.apache.hadoop.hbase.nio.ByteBuff;
023import org.apache.yetus.audience.InterfaceAudience;
024
025/**
026 * Helper to decompress a ByteBuff that was created by a
027 * {@link org.apache.hadoop.io.compress.BlockCompressorStream}, or is at least in the same format.
028 * Parses the binary format and delegates actual decompression work to the provided
029 * {@link RawDecompressor}. Note that the use of the word "block" here does not refer to an HFile
030 * block.
031 */
032@InterfaceAudience.Private
033public class BlockDecompressorHelper {
034
035  public interface RawDecompressor {
036    int decompress(ByteBuff output, ByteBuff input, int inputLen) throws IOException;
037  }
038
039  public static int decompress(ByteBuff output, ByteBuff input, int inputSize,
040    RawDecompressor rawDecompressor) throws IOException {
041    int totalDecompressedBytes = 0;
042    int compressedBytesConsumed = 0;
043
044    while (compressedBytesConsumed < inputSize) {
045      int decompressedBlockSize = rawReadInt(input);
046      compressedBytesConsumed += 4;
047      int decompressedBytesInBlock = 0;
048
049      while (decompressedBytesInBlock < decompressedBlockSize) {
050        int compressedChunkSize = rawReadInt(input);
051        compressedBytesConsumed += 4;
052        int n = rawDecompressor.decompress(output, input, compressedChunkSize);
053        if (n <= 0) {
054          throw new IOException("Decompression failed. Compressed size: " + compressedChunkSize
055            + ", decompressed size: " + decompressedBlockSize);
056        }
057        compressedBytesConsumed += compressedChunkSize;
058        decompressedBytesInBlock += n;
059        totalDecompressedBytes += n;
060      }
061    }
062    return totalDecompressedBytes;
063  }
064
065  /**
066   * Read an integer from the buffer in big-endian byte order. Note that {@link ByteBuffer#getInt()}
067   * reads in system-dependent endian-ness, so we can't use that.
068   */
069  private static int rawReadInt(ByteBuff input) {
070    int b1 = Byte.toUnsignedInt(input.get());
071    int b2 = Byte.toUnsignedInt(input.get());
072    int b3 = Byte.toUnsignedInt(input.get());
073    int b4 = Byte.toUnsignedInt(input.get());
074    return ((b1 << 24) + (b2 << 16) + (b3 << 8) + b4);
075  }
076
077}