001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.client.example; 019 020import java.util.ArrayList; 021import java.util.List; 022import java.util.Map; 023import org.apache.hadoop.conf.Configuration; 024import org.apache.hadoop.fs.Path; 025import org.apache.hadoop.hbase.HBaseConfiguration; 026import org.apache.hadoop.hbase.TableName; 027import org.apache.hadoop.hbase.client.Admin; 028import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 029import org.apache.hadoop.hbase.client.Connection; 030import org.apache.hadoop.hbase.client.ConnectionFactory; 031import org.apache.hadoop.hbase.client.Put; 032import org.apache.hadoop.hbase.client.Scan; 033import org.apache.hadoop.hbase.client.Table; 034import org.apache.hadoop.hbase.client.TableDescriptor; 035import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 036import org.apache.hadoop.hbase.coprocessor.Export; 037import org.apache.hadoop.hbase.util.Bytes; 038import org.apache.yetus.audience.InterfaceAudience; 039 040/** 041 * A simple example on how to use {@link org.apache.hadoop.hbase.coprocessor.Export}. 042 * 043 * <p> 044 * For the protocol buffer definition of the ExportService, see the source file located under 045 * hbase-endpoint/src/main/protobuf/Export.proto. 046 * </p> 047 */ 048@InterfaceAudience.Private 049public final class ExportEndpointExample { 050 051 public static void main(String[] args) throws Throwable { 052 int rowCount = 100; 053 byte[] family = Bytes.toBytes("family"); 054 Configuration conf = HBaseConfiguration.create(); 055 TableName tableName = TableName.valueOf("ExportEndpointExample"); 056 try (Connection con = ConnectionFactory.createConnection(conf); 057 Admin admin = con.getAdmin()) { 058 TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) 059 // MUST mount the export endpoint 060 .setCoprocessor(Export.class.getName()) 061 .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)) 062 .build(); 063 admin.createTable(desc); 064 065 List<Put> puts = new ArrayList<>(rowCount); 066 for (int row = 0; row != rowCount; ++row) { 067 byte[] bs = Bytes.toBytes(row); 068 Put put = new Put(bs); 069 put.addColumn(family, bs, bs); 070 puts.add(put); 071 } 072 try (Table table = con.getTable(tableName)) { 073 table.put(puts); 074 } 075 076 Path output = new Path("/tmp/ExportEndpointExample_output"); 077 Scan scan = new Scan(); 078 Map<byte[], Export.Response> result = Export.run(conf, tableName, scan, output); 079 final long totalOutputRows = result.values().stream().mapToLong(v -> v.getRowCount()).sum(); 080 final long totalOutputCells = result.values().stream().mapToLong(v -> v.getCellCount()).sum(); 081 System.out.println("table:" + tableName); 082 System.out.println("output:" + output); 083 System.out.println("total rows:" + totalOutputRows); 084 System.out.println("total cells:" + totalOutputCells); 085 } 086 } 087 088 private ExportEndpointExample(){} 089}