001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements. See the NOTICE file distributed with this
004 * work for additional information regarding copyright ownership. The ASF
005 * licenses this file to you under the Apache License, Version 2.0 (the
006 * "License"); you may not use this file except in compliance with the License.
007 * You may obtain a copy of the License at
008 *
009 * http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
013 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
014 * License for the specific language governing permissions and limitations
015 * under the License.
016 */
017package org.apache.hadoop.hbase.io.encoding;
018
019import java.io.DataInputStream;
020import java.io.DataOutputStream;
021import java.io.IOException;
022import java.nio.ByteBuffer;
023
024import org.apache.hadoop.hbase.Cell;
025import org.apache.hadoop.hbase.CellComparator;
026import org.apache.hadoop.hbase.nio.ByteBuff;
027import org.apache.hadoop.hbase.util.ByteBufferUtils;
028import org.apache.hadoop.hbase.util.Bytes;
029import org.apache.yetus.audience.InterfaceAudience;
030
031/**
032 * Just copy data, do not do any kind of compression. Use for comparison and
033 * benchmarking.
034 */
035@InterfaceAudience.Private
036public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder {
037
038  private static class CopyKeyEncodingState extends EncodingState {
039    NoneEncoder encoder = null;
040  }
041
042  @Override
043  public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx,
044      DataOutputStream out) throws IOException {
045    if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
046      throw new IOException(this.getClass().getName() + " only accepts "
047          + HFileBlockDefaultEncodingContext.class.getName() + " as the "
048          + "encoding context.");
049    }
050
051    HFileBlockDefaultEncodingContext encodingCtx = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
052    encodingCtx.prepareEncoding(out);
053
054    NoneEncoder encoder = new NoneEncoder(out, encodingCtx);
055    CopyKeyEncodingState state = new CopyKeyEncodingState();
056    state.encoder = encoder;
057    blkEncodingCtx.setEncodingState(state);
058  }
059
060  @Override
061  public int internalEncode(Cell cell,
062      HFileBlockDefaultEncodingContext encodingContext, DataOutputStream out)
063      throws IOException {
064    CopyKeyEncodingState state = (CopyKeyEncodingState) encodingContext
065        .getEncodingState();
066    NoneEncoder encoder = state.encoder;
067    return encoder.write(cell);
068  }
069
070  @Override
071  public Cell getFirstKeyCellInBlock(ByteBuff block) {
072    int keyLength = block.getIntAfterPosition(Bytes.SIZEOF_INT);
073    int pos = 3 * Bytes.SIZEOF_INT;
074    ByteBuffer key = block.asSubByteBuffer(pos + keyLength).duplicate();
075    return createFirstKeyCell(key, keyLength);
076  }
077
078  @Override
079  public String toString() {
080    return CopyKeyDataBlockEncoder.class.getSimpleName();
081  }
082
083  @Override
084  public EncodedSeeker createSeeker(CellComparator comparator,
085      final HFileBlockDecodingContext decodingCtx) {
086    return new BufferedEncodedSeeker<SeekerState>(comparator, decodingCtx) {
087      @Override
088      protected void decodeNext() {
089        current.keyLength = currentBuffer.getInt();
090        current.valueLength = currentBuffer.getInt();
091        current.ensureSpaceForKey();
092        currentBuffer.get(current.keyBuffer, 0, current.keyLength);
093        current.valueOffset = currentBuffer.position();
094        currentBuffer.skip(current.valueLength);
095        if (includesTags()) {
096          // Read short as unsigned, high byte first
097          current.tagsLength = ((currentBuffer.get() & 0xff) << 8) ^ (currentBuffer.get() & 0xff);
098          currentBuffer.skip(current.tagsLength);
099        }
100        if (includesMvcc()) {
101          current.memstoreTS = ByteBuff.readVLong(currentBuffer);
102        } else {
103          current.memstoreTS = 0;
104        }
105        current.nextKvOffset = currentBuffer.position();
106      }
107
108      @Override
109      protected void decodeFirst() {
110        currentBuffer.skip(Bytes.SIZEOF_INT);
111        current.lastCommonPrefix = 0;
112        decodeNext();
113      }
114    };
115  }
116
117  @Override
118  protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength,
119      int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
120    int decompressedSize = source.readInt();
121    ByteBuffer buffer = ByteBuffer.allocate(decompressedSize +
122        allocateHeaderLength);
123    buffer.position(allocateHeaderLength);
124    ByteBufferUtils.copyFromStreamToBuffer(buffer, source, decompressedSize);
125
126    return buffer;
127  }
128}