001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements. See the NOTICE file distributed with this
004 * work for additional information regarding copyright ownership. The ASF
005 * licenses this file to you under the Apache License, Version 2.0 (the
006 * "License"); you may not use this file except in compliance with the License.
007 * You may obtain a copy of the License at
008 *
009 * http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
013 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
014 * License for the specific language governing permissions and limitations
015 * under the License.
016 */
017package org.apache.hadoop.hbase.io.encoding;
018
019import java.io.DataInputStream;
020import java.io.DataOutputStream;
021import java.io.IOException;
022import java.nio.ByteBuffer;
023import java.util.ArrayList;
024import java.util.List;
025
026import org.apache.hadoop.hbase.Cell;
027import org.apache.hadoop.hbase.CellComparator;
028import org.apache.hadoop.hbase.CellComparatorImpl;
029import org.apache.hadoop.hbase.KeyValue;
030import org.apache.hadoop.hbase.KeyValueUtil;
031import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
032import org.apache.hadoop.hbase.nio.ByteBuff;
033import org.apache.hadoop.hbase.nio.SingleByteBuff;
034import org.apache.hadoop.hbase.util.ByteBufferUtils;
035import org.apache.hadoop.hbase.util.Bytes;
036import org.apache.hadoop.io.WritableUtils;
037import org.apache.yetus.audience.InterfaceAudience;
038
039/**
040 * Store cells following every row's start offset, so we can binary search to a row's cells.
041 *
042 * Format:
043 * flat cells
044 * integer: number of rows
045 * integer: row0's offset
046 * integer: row1's offset
047 * ....
048 * integer: dataSize
049 *
050*/
051@InterfaceAudience.Private
052public class RowIndexCodecV1 extends AbstractDataBlockEncoder {
053
054  private static class RowIndexEncodingState extends EncodingState {
055    RowIndexEncoderV1 encoder = null;
056  }
057
058  @Override
059  public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx,
060      DataOutputStream out) throws IOException {
061    if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
062      throw new IOException(this.getClass().getName() + " only accepts "
063          + HFileBlockDefaultEncodingContext.class.getName() + " as the "
064          + "encoding context.");
065    }
066
067    HFileBlockDefaultEncodingContext encodingCtx = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
068    encodingCtx.prepareEncoding(out);
069
070    RowIndexEncoderV1 encoder = new RowIndexEncoderV1(out, encodingCtx);
071    RowIndexEncodingState state = new RowIndexEncodingState();
072    state.encoder = encoder;
073    blkEncodingCtx.setEncodingState(state);
074  }
075
076  @Override
077  public int encode(Cell cell, HFileBlockEncodingContext encodingCtx,
078      DataOutputStream out) throws IOException {
079    RowIndexEncodingState state = (RowIndexEncodingState) encodingCtx
080        .getEncodingState();
081    RowIndexEncoderV1 encoder = state.encoder;
082    return encoder.write(cell);
083  }
084
085  @Override
086  public void endBlockEncoding(HFileBlockEncodingContext encodingCtx,
087      DataOutputStream out, byte[] uncompressedBytesWithHeader)
088      throws IOException {
089    RowIndexEncodingState state = (RowIndexEncodingState) encodingCtx
090        .getEncodingState();
091    RowIndexEncoderV1 encoder = state.encoder;
092    encoder.flush();
093    postEncoding(encodingCtx);
094  }
095
096  @Override
097  public ByteBuffer decodeKeyValues(DataInputStream source,
098      HFileBlockDecodingContext decodingCtx) throws IOException {
099    ByteBuffer sourceAsBuffer = ByteBufferUtils
100        .drainInputStreamToBuffer(source);// waste
101    sourceAsBuffer.mark();
102    if (!decodingCtx.getHFileContext().isIncludesTags()) {
103      sourceAsBuffer.position(sourceAsBuffer.limit() - Bytes.SIZEOF_INT);
104      int onDiskSize = sourceAsBuffer.getInt();
105      sourceAsBuffer.reset();
106      ByteBuffer dup = sourceAsBuffer.duplicate();
107      dup.position(sourceAsBuffer.position());
108      dup.limit(sourceAsBuffer.position() + onDiskSize);
109      return dup.slice();
110    } else {
111      RowIndexSeekerV1 seeker = new RowIndexSeekerV1(CellComparatorImpl.COMPARATOR,
112          decodingCtx);
113      seeker.setCurrentBuffer(new SingleByteBuff(sourceAsBuffer));
114      List<Cell> kvs = new ArrayList<>();
115      kvs.add(seeker.getCell());
116      while (seeker.next()) {
117        kvs.add(seeker.getCell());
118      }
119      boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
120      ByteArrayOutputStream baos = new ByteArrayOutputStream();
121      DataOutputStream out = new DataOutputStream(baos);
122      for (Cell cell : kvs) {
123        KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(cell);
124        out.write(currentCell.getBuffer(), currentCell.getOffset(),
125            currentCell.getLength());
126        if (includesMvcc) {
127          WritableUtils.writeVLong(out, cell.getSequenceId());
128        }
129      }
130      out.flush();
131      return ByteBuffer.wrap(baos.getBuffer(), 0, baos.size());
132    }
133  }
134
135  @Override
136  public Cell getFirstKeyCellInBlock(ByteBuff block) {
137    block.mark();
138    int keyLength = block.getInt();
139    block.getInt();
140    ByteBuffer key = block.asSubByteBuffer(keyLength).duplicate();
141    block.reset();
142    return createFirstKeyCell(key, keyLength);
143  }
144
145  @Override
146  public EncodedSeeker createSeeker(CellComparator comparator,
147      HFileBlockDecodingContext decodingCtx) {
148    return new RowIndexSeekerV1(comparator, decodingCtx);
149  }
150
151}