001/**
002 *
003 * Licensed to the Apache Software Foundation (ASF) under one
004 * or more contributor license agreements.  See the NOTICE file
005 * distributed with this work for additional information
006 * regarding copyright ownership.  The ASF licenses this file
007 * to you under the Apache License, Version 2.0 (the
008 * "License"); you may not use this file except in compliance
009 * with the License.  You may obtain a copy of the License at
010 *
011 *     http://www.apache.org/licenses/LICENSE-2.0
012 *
013 * Unless required by applicable law or agreed to in writing, software
014 * distributed under the License is distributed on an "AS IS" BASIS,
015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
016 * See the License for the specific language governing permissions and
017 * limitations under the License.
018 */
019package org.apache.hadoop.hbase.mapreduce;
020
021import java.io.IOException;
022
023import org.apache.yetus.audience.InterfaceAudience;
024import org.apache.hadoop.hbase.client.Result;
025import org.apache.hadoop.hbase.client.Scan;
026import org.apache.hadoop.hbase.client.Table;
027import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
028import org.apache.hadoop.mapreduce.InputSplit;
029import org.apache.hadoop.mapreduce.RecordReader;
030import org.apache.hadoop.mapreduce.TaskAttemptContext;
031
032/**
033 * Iterate over an HBase table data, return (ImmutableBytesWritable, Result)
034 * pairs.
035 */
036@InterfaceAudience.Public
037public class TableRecordReader
038extends RecordReader<ImmutableBytesWritable, Result> {
039
040  private TableRecordReaderImpl recordReaderImpl = new TableRecordReaderImpl();
041
042  /**
043   * Restart from survivable exceptions by creating a new scanner.
044   *
045   * @param firstRow  The first row to start at.
046   * @throws IOException When restarting fails.
047   */
048  public void restart(byte[] firstRow) throws IOException {
049    this.recordReaderImpl.restart(firstRow);
050  }
051
052  /**
053   * @param table the {@link Table} to scan.
054   */
055  public void setTable(Table table) {
056    this.recordReaderImpl.setHTable(table);
057  }
058
059  /**
060   * Sets the scan defining the actual details like columns etc.
061   *
062   * @param scan  The scan to set.
063   */
064  public void setScan(Scan scan) {
065    this.recordReaderImpl.setScan(scan);
066  }
067
068  /**
069   * Closes the split.
070   *
071   * @see org.apache.hadoop.mapreduce.RecordReader#close()
072   */
073  @Override
074  public void close() {
075    this.recordReaderImpl.close();
076  }
077
078  /**
079   * Returns the current key.
080   *
081   * @return The current key.
082   * @throws IOException
083   * @throws InterruptedException When the job is aborted.
084   * @see org.apache.hadoop.mapreduce.RecordReader#getCurrentKey()
085   */
086  @Override
087  public ImmutableBytesWritable getCurrentKey() throws IOException,
088      InterruptedException {
089    return this.recordReaderImpl.getCurrentKey();
090  }
091
092  /**
093   * Returns the current value.
094   *
095   * @return The current value.
096   * @throws IOException When the value is faulty.
097   * @throws InterruptedException When the job is aborted.
098   * @see org.apache.hadoop.mapreduce.RecordReader#getCurrentValue()
099   */
100  @Override
101  public Result getCurrentValue() throws IOException, InterruptedException {
102    return this.recordReaderImpl.getCurrentValue();
103  }
104
105  /**
106   * Initializes the reader.
107   *
108   * @param inputsplit  The split to work with.
109   * @param context  The current task context.
110   * @throws IOException When setting up the reader fails.
111   * @throws InterruptedException When the job is aborted.
112   * @see org.apache.hadoop.mapreduce.RecordReader#initialize(
113   *   org.apache.hadoop.mapreduce.InputSplit,
114   *   org.apache.hadoop.mapreduce.TaskAttemptContext)
115   */
116  @Override
117  public void initialize(InputSplit inputsplit,
118      TaskAttemptContext context) throws IOException,
119      InterruptedException {
120    this.recordReaderImpl.initialize(inputsplit, context);
121  }
122
123  /**
124   * Positions the record reader to the next record.
125   *
126   * @return <code>true</code> if there was another record.
127   * @throws IOException When reading the record failed.
128   * @throws InterruptedException When the job was aborted.
129   * @see org.apache.hadoop.mapreduce.RecordReader#nextKeyValue()
130   */
131  @Override
132  public boolean nextKeyValue() throws IOException, InterruptedException {
133    return this.recordReaderImpl.nextKeyValue();
134  }
135
136  /**
137   * The current progress of the record reader through its data.
138   *
139   * @return A number between 0.0 and 1.0, the fraction of the data read.
140   * @see org.apache.hadoop.mapreduce.RecordReader#getProgress()
141   */
142  @Override
143  public float getProgress() {
144    return this.recordReaderImpl.getProgress();
145  }
146
147}