@InterfaceAudience.Public public class TableRecordReaderImpl extends Object
Modifier and Type | Field and Description |
---|---|
static String |
LOG_PER_ROW_COUNT |
Constructor and Description |
---|
TableRecordReaderImpl() |
Modifier and Type | Method and Description |
---|---|
void |
close()
Closes the split.
|
ImmutableBytesWritable |
getCurrentKey()
Returns the current key.
|
Result |
getCurrentValue()
Returns the current value.
|
float |
getProgress()
The current progress of the record reader through its data.
|
void |
initialize(org.apache.hadoop.mapreduce.InputSplit inputsplit,
org.apache.hadoop.mapreduce.TaskAttemptContext context)
Build the scanner.
|
boolean |
nextKeyValue()
Positions the record reader to the next record.
|
void |
restart(byte[] firstRow)
Restart from survivable exceptions by creating a new scanner.
|
protected static Method |
retrieveGetCounterWithStringsParams(org.apache.hadoop.mapreduce.TaskAttemptContext context)
Deprecated.
since 2.4.0 and 2.3.2, will be removed in 4.0.0
|
void |
setHTable(Table htable)
Sets the HBase table.
|
void |
setScan(Scan scan)
Sets the scan defining the actual details like columns etc.
|
protected static void |
updateCounters(org.apache.hadoop.hbase.client.metrics.ScanMetrics scanMetrics,
long numScannerRestarts,
Method getCounter,
org.apache.hadoop.mapreduce.TaskAttemptContext context,
long numStale)
Deprecated.
since 2.4.0 and 2.3.2, will be removed in 4.0.0
Use
updateCounters(ScanMetrics, long, TaskAttemptContext, long) instead. |
protected static void |
updateCounters(org.apache.hadoop.hbase.client.metrics.ScanMetrics scanMetrics,
long numScannerRestarts,
org.apache.hadoop.mapreduce.TaskAttemptContext context,
long numStale) |
public static final String LOG_PER_ROW_COUNT
public TableRecordReaderImpl()
public void restart(byte[] firstRow) throws IOException
firstRow
- The first row to start at.IOException
- When restarting fails.@Deprecated protected static Method retrieveGetCounterWithStringsParams(org.apache.hadoop.mapreduce.TaskAttemptContext context) throws IOException
IOException
public void setHTable(Table htable)
htable
- The HTableDescriptor
to scan.public void setScan(Scan scan)
scan
- The scan to set.public void initialize(org.apache.hadoop.mapreduce.InputSplit inputsplit, org.apache.hadoop.mapreduce.TaskAttemptContext context) throws IOException, InterruptedException
IOException
InterruptedException
public void close()
public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException
InterruptedException
- When the job is aborted.IOException
public Result getCurrentValue() throws IOException, InterruptedException
IOException
- When the value is faulty.InterruptedException
- When the job is aborted.public boolean nextKeyValue() throws IOException, InterruptedException
true
if there was another record.IOException
- When reading the record failed.InterruptedException
- When the job was aborted.@Deprecated protected static void updateCounters(org.apache.hadoop.hbase.client.metrics.ScanMetrics scanMetrics, long numScannerRestarts, Method getCounter, org.apache.hadoop.mapreduce.TaskAttemptContext context, long numStale)
updateCounters(ScanMetrics, long, TaskAttemptContext, long)
instead.protected static void updateCounters(org.apache.hadoop.hbase.client.metrics.ScanMetrics scanMetrics, long numScannerRestarts, org.apache.hadoop.mapreduce.TaskAttemptContext context, long numStale)
public float getProgress()
Copyright © 2007–2021 The Apache Software Foundation. All rights reserved.