001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018 019package org.apache.hadoop.hbase.rest; 020 021import java.security.PrivilegedExceptionAction; 022import java.util.HashMap; 023import java.util.Map; 024 025import javax.security.auth.Subject; 026import javax.security.auth.login.AppConfigurationEntry; 027import javax.security.auth.login.Configuration; 028import javax.security.auth.login.LoginContext; 029 030import org.apache.hadoop.hbase.Cell; 031import org.apache.hadoop.hbase.CellUtil; 032import org.apache.hadoop.hbase.HBaseConfiguration; 033import org.apache.hadoop.hbase.client.Get; 034import org.apache.hadoop.hbase.client.Put; 035import org.apache.hadoop.hbase.client.Result; 036import org.apache.hadoop.hbase.rest.client.Client; 037import org.apache.hadoop.hbase.rest.client.Cluster; 038import org.apache.hadoop.hbase.rest.client.RemoteHTable; 039import org.apache.hadoop.hbase.util.Bytes; 040import org.apache.yetus.audience.InterfaceAudience; 041import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; 042 043@InterfaceAudience.Private 044public class RESTDemoClient { 045 046 private static String host = "localhost"; 047 private static int port = 9090; 048 private static boolean secure = false; 049 private static org.apache.hadoop.conf.Configuration conf = null; 050 051 public static void main(String[] args) throws Exception { 052 System.out.println("REST Demo"); 053 System.out.println("Usage: RESTDemoClient [host=localhost] [port=9090] [secure=false]"); 054 System.out.println("This demo assumes you have a table called \"example\"" 055 + " with a column family called \"family1\""); 056 057 // use passed in arguments instead of defaults 058 if (args.length >= 1) { 059 host = args[0]; 060 } 061 if (args.length >= 2) { 062 port = Integer.parseInt(args[1]); 063 } 064 conf = HBaseConfiguration.create(); 065 String principal = conf.get(Constants.REST_KERBEROS_PRINCIPAL); 066 if (principal != null) { 067 secure = true; 068 } 069 if (args.length >= 3) { 070 secure = Boolean.parseBoolean(args[2]); 071 } 072 073 final RESTDemoClient client = new RESTDemoClient(); 074 Subject.doAs(getSubject(), new PrivilegedExceptionAction<Void>() { 075 @Override 076 public Void run() throws Exception { 077 client.run(); 078 return null; 079 } 080 }); 081 } 082 083 public void run() throws Exception { 084 Cluster cluster = new Cluster(); 085 cluster.add(host, port); 086 Client restClient = new Client(cluster, conf.getBoolean(Constants.REST_SSL_ENABLED, false)); 087 try (RemoteHTable remoteTable = new RemoteHTable(restClient, conf, "example")) { 088 // Write data to the table 089 String rowKey = "row1"; 090 Put p = new Put(rowKey.getBytes()); 091 p.addColumn("family1".getBytes(), "qualifier1".getBytes(), "value1".getBytes()); 092 remoteTable.put(p); 093 094 // Get the data from the table 095 Get g = new Get(rowKey.getBytes()); 096 Result result = remoteTable.get(g); 097 098 Preconditions.checkArgument(result != null, 099 Bytes.toString(remoteTable.getTableName()) + " should have a row with key as " + rowKey); 100 System.out.println("row = " + new String(result.getRow())); 101 for (Cell cell : result.rawCells()) { 102 System.out.print("family = " + Bytes.toString(CellUtil.cloneFamily(cell)) + "\t"); 103 System.out.print("qualifier = " + Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t"); 104 System.out.print("value = " + Bytes.toString(CellUtil.cloneValue(cell)) + "\t"); 105 System.out.println("timestamp = " + Long.toString(cell.getTimestamp())); 106 } 107 } 108 } 109 110 static Subject getSubject() throws Exception { 111 if (!secure) { 112 return new Subject(); 113 } 114 115 /* 116 * To authenticate the demo client, kinit should be invoked ahead. Here we try to get the 117 * Kerberos credential from the ticket cache. 118 */ 119 LoginContext context = new LoginContext("", new Subject(), null, new Configuration() { 120 @Override 121 public AppConfigurationEntry[] getAppConfigurationEntry(String name) { 122 Map<String, String> options = new HashMap<>(); 123 options.put("useKeyTab", "false"); 124 options.put("storeKey", "false"); 125 options.put("doNotPrompt", "true"); 126 options.put("useTicketCache", "true"); 127 options.put("renewTGT", "true"); 128 options.put("refreshKrb5Config", "true"); 129 options.put("isInitiator", "true"); 130 String ticketCache = System.getenv("KRB5CCNAME"); 131 if (ticketCache != null) { 132 options.put("ticketCache", ticketCache); 133 } 134 options.put("debug", "true"); 135 136 return new AppConfigurationEntry[] { 137 new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", 138 AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) }; 139 } 140 }); 141 context.login(); 142 return context.getSubject(); 143 } 144}