@InterfaceAudience.LimitedPrivate(value="Coprocesssor") @InterfaceStability.Evolving public class Export extends org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService implements RegionCoprocessor
Import to read it back in again. It is implemented by the endpoint
 technique.Export| Modifier and Type | Class and Description | 
|---|---|
| private static class  | Export.PrivilegedWriter | 
| private static class  | Export.RegionOp | 
| static class  | Export.Response | 
| private static class  | Export.ScanCoprocessor | 
| private static class  | Export.SecureWriter | 
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService.BlockingInterface, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService.Interface, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService.StubCoprocessor.State| Modifier and Type | Field and Description | 
|---|---|
| private static Class<? extends org.apache.hadoop.io.compress.CompressionCodec> | DEFAULT_CODEC | 
| private static org.apache.hadoop.io.SequenceFile.CompressionType | DEFAULT_TYPE | 
| private RegionCoprocessorEnvironment | env | 
| private static org.slf4j.Logger | LOG | 
| private UserProvider | userProvider | 
PRIORITY_HIGHEST, PRIORITY_LOWEST, PRIORITY_SYSTEM, PRIORITY_USER, VERSION| Constructor and Description | 
|---|
| Export() | 
| Modifier and Type | Method and Description | 
|---|---|
| private static void | checkDir(org.apache.hadoop.fs.FileSystem fs,
        org.apache.hadoop.fs.Path dir) | 
| void | export(com.google.protobuf.RpcController controller,
      org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request,
      com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done)rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse); | 
| private static boolean | getCompression(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) | 
| private static org.apache.hadoop.io.compress.CompressionCodec | getCompressionCodec(org.apache.hadoop.conf.Configuration conf,
                   org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) | 
| private static org.apache.hadoop.io.SequenceFile.CompressionType | getCompressionType(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) | 
| private static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest | getConfiguredRequest(org.apache.hadoop.conf.Configuration conf,
                    org.apache.hadoop.fs.Path dir,
                    Scan scan,
                    org.apache.hadoop.security.token.Token<?> userToken) | 
| private static org.apache.hadoop.io.SequenceFile.Writer.Option | getOutputPath(org.apache.hadoop.conf.Configuration conf,
             RegionInfo info,
             org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) | 
| Iterable<com.google.protobuf.Service> | getServices()Coprocessor endpoints providing protobuf services should override this method. | 
| private static List<org.apache.hadoop.io.SequenceFile.Writer.Option> | getWriterOptions(org.apache.hadoop.conf.Configuration conf,
                RegionInfo info,
                org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) | 
| static void | main(String[] args) | 
| private static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse | processData(Region region,
           org.apache.hadoop.conf.Configuration conf,
           UserProvider userProvider,
           Scan scan,
           org.apache.hadoop.security.token.Token userToken,
           List<org.apache.hadoop.io.SequenceFile.Writer.Option> opts) | 
| (package private) static Map<byte[],Export.Response> | run(org.apache.hadoop.conf.Configuration conf,
   String[] args) | 
| static Map<byte[],Export.Response> | run(org.apache.hadoop.conf.Configuration conf,
   TableName tableName,
   Scan scan,
   org.apache.hadoop.fs.Path dir) | 
| void | start(CoprocessorEnvironment environment)Called by the  CoprocessorEnvironmentduring it's own startup to initialize the
 coprocessor. | 
| void | stop(CoprocessorEnvironment env)Called by the  CoprocessorEnvironmentduring it's own shutdown to stop the
 coprocessor. | 
| private Scan | validateKey(RegionInfo region,
           org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) | 
callMethod, getDescriptor, getDescriptorForType, getRequestPrototype, getResponsePrototype, newBlockingStub, newReflectiveBlockingService, newReflectiveService, newStubclone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, waitgetBulkLoadObserver, getEndpointObserver, getRegionObserverprivate static final org.slf4j.Logger LOG
private static final Class<? extends org.apache.hadoop.io.compress.CompressionCodec> DEFAULT_CODEC
private static final org.apache.hadoop.io.SequenceFile.CompressionType DEFAULT_TYPE
private RegionCoprocessorEnvironment env
private UserProvider userProvider
public Export()
static Map<byte[],Export.Response> run(org.apache.hadoop.conf.Configuration conf, String[] args) throws Throwable
Throwablepublic static Map<byte[],Export.Response> run(org.apache.hadoop.conf.Configuration conf, TableName tableName, Scan scan, org.apache.hadoop.fs.Path dir) throws Throwable
Throwableprivate static boolean getCompression(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
private static org.apache.hadoop.io.SequenceFile.CompressionType getCompressionType(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
private static org.apache.hadoop.io.compress.CompressionCodec getCompressionCodec(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
private static org.apache.hadoop.io.SequenceFile.Writer.Option getOutputPath(org.apache.hadoop.conf.Configuration conf, RegionInfo info, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) throws IOException
IOExceptionprivate static List<org.apache.hadoop.io.SequenceFile.Writer.Option> getWriterOptions(org.apache.hadoop.conf.Configuration conf, RegionInfo info, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) throws IOException
IOExceptionprivate static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse processData(Region region, org.apache.hadoop.conf.Configuration conf, UserProvider userProvider, Scan scan, org.apache.hadoop.security.token.Token userToken, List<org.apache.hadoop.io.SequenceFile.Writer.Option> opts) throws IOException
IOExceptionprivate static void checkDir(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path dir) throws IOException
IOExceptionprivate static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest getConfiguredRequest(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.fs.Path dir, Scan scan, org.apache.hadoop.security.token.Token<?> userToken) throws IOException
IOExceptionpublic void start(CoprocessorEnvironment environment) throws IOException
CoprocessorCoprocessorEnvironment during it's own startup to initialize the
 coprocessor.start in interface CoprocessorIOExceptionpublic void stop(CoprocessorEnvironment env) throws IOException
CoprocessorCoprocessorEnvironment during it's own shutdown to stop the
 coprocessor.stop in interface CoprocessorIOExceptionpublic Iterable<com.google.protobuf.Service> getServices()
CoprocessorgetServices in interface CoprocessorServices or empty collection. Implementations should never
 return null.public void export(com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done)
org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportServicerpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse);export in class org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportServiceprivate Scan validateKey(RegionInfo region, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) throws IOException
IOExceptionCopyright © 2007–2019 The Apache Software Foundation. All rights reserved.