@InterfaceAudience.Public public class TableRecordReaderImpl extends Object
Modifier and Type | Field and Description |
---|---|
private org.apache.hadoop.mapreduce.TaskAttemptContext |
context |
private Scan |
currentScan |
(package private) static String |
HBASE_COUNTER_GROUP_NAME |
private Table |
htable |
private ImmutableBytesWritable |
key |
private byte[] |
lastSuccessfulRow |
private static org.slf4j.Logger |
LOG |
static String |
LOG_PER_ROW_COUNT |
private int |
logPerRowCount |
private boolean |
logScannerActivity |
private long |
numRestarts |
private long |
numStale |
private int |
rowcount |
private Scan |
scan |
private ResultScanner |
scanner |
private long |
timestamp |
private Result |
value |
Constructor and Description |
---|
TableRecordReaderImpl() |
Modifier and Type | Method and Description |
---|---|
void |
close()
Closes the split.
|
ImmutableBytesWritable |
getCurrentKey()
Returns the current key.
|
Result |
getCurrentValue()
Returns the current value.
|
float |
getProgress()
The current progress of the record reader through its data.
|
void |
initialize(org.apache.hadoop.mapreduce.InputSplit inputsplit,
org.apache.hadoop.mapreduce.TaskAttemptContext context)
Build the scanner.
|
boolean |
nextKeyValue()
Positions the record reader to the next record.
|
void |
restart(byte[] firstRow)
Restart from survivable exceptions by creating a new scanner.
|
protected static Method |
retrieveGetCounterWithStringsParams(org.apache.hadoop.mapreduce.TaskAttemptContext context)
Deprecated.
since 2.4.0 and 2.3.2, will be removed in 4.0.0
|
void |
setHTable(Table htable)
Sets the HBase table.
|
void |
setScan(Scan scan)
Sets the scan defining the actual details like columns etc.
|
private void |
updateCounters()
If hbase runs on new version of mapreduce, RecordReader has access to counters thus can update
counters based on scanMetrics.
|
protected static void |
updateCounters(ScanMetrics scanMetrics,
long numScannerRestarts,
Method getCounter,
org.apache.hadoop.mapreduce.TaskAttemptContext context,
long numStale)
Deprecated.
since 2.4.0 and 2.3.2, will be removed in 4.0.0 Use
updateCounters(ScanMetrics, long, TaskAttemptContext, long) instead. |
protected static void |
updateCounters(ScanMetrics scanMetrics,
long numScannerRestarts,
org.apache.hadoop.mapreduce.TaskAttemptContext context,
long numStale) |
public static final String LOG_PER_ROW_COUNT
private static final org.slf4j.Logger LOG
@InterfaceAudience.Private static final String HBASE_COUNTER_GROUP_NAME
private ResultScanner scanner
private Scan currentScan
private byte[] lastSuccessfulRow
private ImmutableBytesWritable key
private org.apache.hadoop.mapreduce.TaskAttemptContext context
private long numRestarts
private long numStale
private long timestamp
private int rowcount
private boolean logScannerActivity
private int logPerRowCount
public TableRecordReaderImpl()
public void restart(byte[] firstRow) throws IOException
firstRow
- The first row to start at.IOException
- When restarting fails.@Deprecated protected static Method retrieveGetCounterWithStringsParams(org.apache.hadoop.mapreduce.TaskAttemptContext context) throws IOException
IOException
public void setHTable(Table htable)
htable
- The HTableDescriptor
to scan.public void setScan(Scan scan)
scan
- The scan to set.public void initialize(org.apache.hadoop.mapreduce.InputSplit inputsplit, org.apache.hadoop.mapreduce.TaskAttemptContext context) throws IOException, InterruptedException
IOException
InterruptedException
public void close()
public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException
InterruptedException
- When the job is aborted.IOException
public Result getCurrentValue() throws IOException, InterruptedException
IOException
- When the value is faulty.InterruptedException
- When the job is aborted.public boolean nextKeyValue() throws IOException, InterruptedException
true
if there was another record.IOException
- When reading the record failed.InterruptedException
- When the job was aborted.private void updateCounters()
@Deprecated protected static void updateCounters(ScanMetrics scanMetrics, long numScannerRestarts, Method getCounter, org.apache.hadoop.mapreduce.TaskAttemptContext context, long numStale)
updateCounters(ScanMetrics, long, TaskAttemptContext, long)
instead.protected static void updateCounters(ScanMetrics scanMetrics, long numScannerRestarts, org.apache.hadoop.mapreduce.TaskAttemptContext context, long numStale)
public float getProgress()
Copyright © 2007–2020 The Apache Software Foundation. All rights reserved.