@InterfaceAudience.LimitedPrivate(value="Coprocesssor") @InterfaceStability.Evolving public class Export extends org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService implements RegionCoprocessor
Import
to read it back in again. It is implemented by the endpoint
technique.Export
Modifier and Type | Class and Description |
---|---|
private static class |
Export.PrivilegedWriter |
private static class |
Export.RegionOp |
static class |
Export.Response |
private static class |
Export.ScanCoprocessor |
private static class |
Export.SecureWriter |
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService.BlockingInterface, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService.Interface, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService.Stub
Coprocessor.State
Modifier and Type | Field and Description |
---|---|
private static Class<? extends org.apache.hadoop.io.compress.CompressionCodec> |
DEFAULT_CODEC |
private static org.apache.hadoop.io.SequenceFile.CompressionType |
DEFAULT_TYPE |
private RegionCoprocessorEnvironment |
env |
private static org.slf4j.Logger |
LOG |
private UserProvider |
userProvider |
PRIORITY_HIGHEST, PRIORITY_LOWEST, PRIORITY_SYSTEM, PRIORITY_USER, VERSION
Constructor and Description |
---|
Export() |
Modifier and Type | Method and Description |
---|---|
private static void |
checkDir(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path dir) |
void |
export(com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done)
rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse); |
private static boolean |
getCompression(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) |
private static org.apache.hadoop.io.compress.CompressionCodec |
getCompressionCodec(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) |
private static org.apache.hadoop.io.SequenceFile.CompressionType |
getCompressionType(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) |
private static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest |
getConfiguredRequest(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.fs.Path dir,
Scan scan,
org.apache.hadoop.security.token.Token<?> userToken) |
private static org.apache.hadoop.io.SequenceFile.Writer.Option |
getOutputPath(org.apache.hadoop.conf.Configuration conf,
RegionInfo info,
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) |
Iterable<com.google.protobuf.Service> |
getServices()
Coprocessor endpoints providing protobuf services should override this method.
|
private static List<org.apache.hadoop.io.SequenceFile.Writer.Option> |
getWriterOptions(org.apache.hadoop.conf.Configuration conf,
RegionInfo info,
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) |
static void |
main(String[] args) |
private static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse |
processData(Region region,
org.apache.hadoop.conf.Configuration conf,
UserProvider userProvider,
Scan scan,
org.apache.hadoop.security.token.Token userToken,
List<org.apache.hadoop.io.SequenceFile.Writer.Option> opts) |
(package private) static Map<byte[],Export.Response> |
run(org.apache.hadoop.conf.Configuration conf,
String[] args) |
static Map<byte[],Export.Response> |
run(org.apache.hadoop.conf.Configuration conf,
TableName tableName,
Scan scan,
org.apache.hadoop.fs.Path dir) |
void |
start(CoprocessorEnvironment environment)
Called by the
CoprocessorEnvironment during it's own startup to initialize the
coprocessor. |
void |
stop(CoprocessorEnvironment env)
Called by the
CoprocessorEnvironment during it's own shutdown to stop the
coprocessor. |
private Scan |
validateKey(RegionInfo region,
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) |
callMethod, getDescriptor, getDescriptorForType, getRequestPrototype, getResponsePrototype, newBlockingStub, newReflectiveBlockingService, newReflectiveService, newStub
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
getBulkLoadObserver, getEndpointObserver, getRegionObserver
private static final org.slf4j.Logger LOG
private static final Class<? extends org.apache.hadoop.io.compress.CompressionCodec> DEFAULT_CODEC
private static final org.apache.hadoop.io.SequenceFile.CompressionType DEFAULT_TYPE
private RegionCoprocessorEnvironment env
private UserProvider userProvider
public Export()
static Map<byte[],Export.Response> run(org.apache.hadoop.conf.Configuration conf, String[] args) throws Throwable
Throwable
public static Map<byte[],Export.Response> run(org.apache.hadoop.conf.Configuration conf, TableName tableName, Scan scan, org.apache.hadoop.fs.Path dir) throws Throwable
Throwable
private static boolean getCompression(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
private static org.apache.hadoop.io.SequenceFile.CompressionType getCompressionType(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
private static org.apache.hadoop.io.compress.CompressionCodec getCompressionCodec(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
private static org.apache.hadoop.io.SequenceFile.Writer.Option getOutputPath(org.apache.hadoop.conf.Configuration conf, RegionInfo info, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) throws IOException
IOException
private static List<org.apache.hadoop.io.SequenceFile.Writer.Option> getWriterOptions(org.apache.hadoop.conf.Configuration conf, RegionInfo info, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) throws IOException
IOException
private static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse processData(Region region, org.apache.hadoop.conf.Configuration conf, UserProvider userProvider, Scan scan, org.apache.hadoop.security.token.Token userToken, List<org.apache.hadoop.io.SequenceFile.Writer.Option> opts) throws IOException
IOException
private static void checkDir(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path dir) throws IOException
IOException
private static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest getConfiguredRequest(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.fs.Path dir, Scan scan, org.apache.hadoop.security.token.Token<?> userToken) throws IOException
IOException
public void start(CoprocessorEnvironment environment) throws IOException
Coprocessor
CoprocessorEnvironment
during it's own startup to initialize the
coprocessor.start
in interface Coprocessor
IOException
public void stop(CoprocessorEnvironment env) throws IOException
Coprocessor
CoprocessorEnvironment
during it's own shutdown to stop the
coprocessor.stop
in interface Coprocessor
IOException
public Iterable<com.google.protobuf.Service> getServices()
Coprocessor
getServices
in interface Coprocessor
Service
s or empty collection. Implementations should never
return null.public void export(com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done)
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService
rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse);
export
in class org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService
private Scan validateKey(RegionInfo region, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) throws IOException
IOException
Copyright © 2007–2020 The Apache Software Foundation. All rights reserved.