001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements. See the NOTICE file distributed with this
004 * work for additional information regarding copyright ownership. The ASF
005 * licenses this file to you under the Apache License, Version 2.0 (the
006 * "License"); you may not use this file except in compliance with the License.
007 * You may obtain a copy of the License at
008 *
009 * http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
013 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
014 * License for the specific language governing permissions and limitations
015 * under the License.
016 */
017package org.apache.hadoop.hbase.io.encoding;
018
019import java.io.DataInputStream;
020import java.io.IOException;
021import java.io.InputStream;
022
023import org.apache.commons.io.IOUtils;
024import org.apache.hadoop.hbase.io.ByteBuffInputStream;
025import org.apache.hadoop.hbase.io.TagCompressionContext;
026import org.apache.hadoop.hbase.io.compress.Compression;
027import org.apache.hadoop.hbase.io.crypto.Cipher;
028import org.apache.hadoop.hbase.io.crypto.Decryptor;
029import org.apache.hadoop.hbase.io.crypto.Encryption;
030import org.apache.hadoop.hbase.io.hfile.HFileContext;
031import org.apache.hadoop.hbase.nio.ByteBuff;
032import org.apache.hadoop.hbase.util.Bytes;
033import org.apache.yetus.audience.InterfaceAudience;
034
035/**
036 * A default implementation of {@link HFileBlockDecodingContext}. It assumes the
037 * block data section is compressed as a whole.
038 *
039 * @see HFileBlockDefaultEncodingContext for the default compression context
040 *
041 */
042@InterfaceAudience.Private
043public class HFileBlockDefaultDecodingContext implements
044    HFileBlockDecodingContext {
045  private final HFileContext fileContext;
046  private TagCompressionContext tagCompressionContext;
047  
048  public HFileBlockDefaultDecodingContext(HFileContext fileContext) {
049    this.fileContext = fileContext;
050  }
051
052  @Override
053  public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
054      ByteBuff blockBufferWithoutHeader, ByteBuff onDiskBlock) throws IOException {
055    final ByteBuffInputStream byteBuffInputStream = new ByteBuffInputStream(onDiskBlock);
056    InputStream dataInputStream = new DataInputStream(byteBuffInputStream);
057
058    try {
059      Encryption.Context cryptoContext = fileContext.getEncryptionContext();
060      if (cryptoContext != Encryption.Context.NONE) {
061
062        Cipher cipher = cryptoContext.getCipher();
063        Decryptor decryptor = cipher.getDecryptor();
064        decryptor.setKey(cryptoContext.getKey());
065
066        // Encrypted block format:
067        // +--------------------------+
068        // | byte iv length           |
069        // +--------------------------+
070        // | iv data ...              |
071        // +--------------------------+
072        // | encrypted block data ... |
073        // +--------------------------+
074
075        int ivLength = dataInputStream.read();
076        if (ivLength > 0) {
077          byte[] iv = new byte[ivLength];
078          IOUtils.readFully(dataInputStream, iv);
079          decryptor.setIv(iv);
080          // All encrypted blocks will have a nonzero IV length. If we see an IV
081          // length of zero, this means the encoding context had 0 bytes of
082          // plaintext to encode.
083          decryptor.reset();
084          dataInputStream = decryptor.createDecryptionStream(dataInputStream);
085        }
086        onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength;
087      }
088
089      Compression.Algorithm compression = fileContext.getCompression();
090      assert blockBufferWithoutHeader.hasArray();
091      if (compression != Compression.Algorithm.NONE) {
092        Compression.decompress(blockBufferWithoutHeader.array(),
093            blockBufferWithoutHeader.arrayOffset(), dataInputStream, onDiskSizeWithoutHeader,
094            uncompressedSizeWithoutHeader, compression);
095      } else {
096        IOUtils.readFully(dataInputStream, blockBufferWithoutHeader.array(),
097            blockBufferWithoutHeader.arrayOffset(), onDiskSizeWithoutHeader);
098      }
099    } finally {
100      byteBuffInputStream.close();
101      dataInputStream.close();
102    }
103  }
104
105  @Override
106  public HFileContext getHFileContext() {
107    return this.fileContext;
108  }
109
110  public TagCompressionContext getTagCompressionContext() {
111    return tagCompressionContext;
112  }
113
114  public void setTagCompressionContext(TagCompressionContext tagCompressionContext) {
115    this.tagCompressionContext = tagCompressionContext;
116  }
117}