View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *   http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  
19  package org.apache.hadoop.hbase.rest;
20  
21  import com.google.common.base.Preconditions;
22  
23  import java.security.PrivilegedExceptionAction;
24  import java.util.HashMap;
25  import java.util.Map;
26  
27  import javax.security.auth.Subject;
28  import javax.security.auth.login.AppConfigurationEntry;
29  import javax.security.auth.login.Configuration;
30  import javax.security.auth.login.LoginContext;
31  
32  import org.apache.hadoop.hbase.Cell;
33  import org.apache.hadoop.hbase.CellUtil;
34  import org.apache.hadoop.hbase.HBaseConfiguration;
35  import org.apache.hadoop.hbase.classification.InterfaceAudience;
36  import org.apache.hadoop.hbase.client.Get;
37  import org.apache.hadoop.hbase.client.Put;
38  import org.apache.hadoop.hbase.client.Result;
39  import org.apache.hadoop.hbase.rest.client.Client;
40  import org.apache.hadoop.hbase.rest.client.Cluster;
41  import org.apache.hadoop.hbase.rest.client.RemoteHTable;
42  import org.apache.hadoop.hbase.util.Bytes;
43  
44  @InterfaceAudience.Private
45  public class RESTDemoClient {
46  
47    private static String host = "localhost";
48    private static int port = 9090;
49    private static boolean secure = false;
50    private static org.apache.hadoop.conf.Configuration conf = null;
51  
52    public static void main(String[] args) throws Exception {
53      System.out.println("REST Demo");
54      System.out.println("Usage: RESTDemoClient [host=localhost] [port=9090] [secure=false]");
55      System.out.println("This demo assumes you have a table called \"example\""
56          + " with a column family called \"family1\"");
57  
58      // use passed in arguments instead of defaults
59      if (args.length >= 1) {
60        host = args[0];
61      }
62      if (args.length >= 2) {
63        port = Integer.parseInt(args[1]);
64      }
65      conf = HBaseConfiguration.create();
66      String principal = conf.get(Constants.REST_KERBEROS_PRINCIPAL);
67      if (principal != null) {
68        secure = true;
69      }
70      if (args.length >= 3) {
71        secure = Boolean.parseBoolean(args[2]);
72      }
73  
74      final RESTDemoClient client = new RESTDemoClient();
75      Subject.doAs(getSubject(), new PrivilegedExceptionAction<Void>() {
76        @Override
77        public Void run() throws Exception {
78          client.run();
79          return null;
80        }
81      });
82    }
83  
84    public void run() throws Exception {
85      Cluster cluster = new Cluster();
86      cluster.add(host, port);
87      Client restClient = new Client(cluster, conf.getBoolean(Constants.REST_SSL_ENABLED, false));
88      try (RemoteHTable remoteTable = new RemoteHTable(restClient, conf, "example")) {
89        // Write data to the table
90        String rowKey = "row1";
91        Put p = new Put(rowKey.getBytes());
92        p.addColumn("family1".getBytes(), "qualifier1".getBytes(), "value1".getBytes());
93        remoteTable.put(p);
94  
95        // Get the data from the table
96        Get g = new Get(rowKey.getBytes());
97        Result result = remoteTable.get(g);
98  
99        Preconditions.checkArgument(result != null,
100         Bytes.toString(remoteTable.getTableName()) + " should have a row with key as " + rowKey);
101       System.out.println("row = " + new String(result.getRow()));
102       for (Cell cell : result.rawCells()) {
103         System.out.print("family = " + Bytes.toString(CellUtil.cloneFamily(cell)) + "\t");
104         System.out.print("qualifier = " + Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t");
105         System.out.print("value = " + Bytes.toString(CellUtil.cloneValue(cell)) + "\t");
106         System.out.println("timestamp = " + Long.toString(cell.getTimestamp()));
107       }
108     }
109   }
110 
111   static Subject getSubject() throws Exception {
112     if (!secure) {
113       return new Subject();
114     }
115 
116     /*
117      * To authenticate the demo client, kinit should be invoked ahead. Here we try to get the
118      * Kerberos credential from the ticket cache.
119      */
120     LoginContext context = new LoginContext("", new Subject(), null, new Configuration() {
121       @Override
122       public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
123         Map<String, String> options = new HashMap<>();
124         options.put("useKeyTab", "false");
125         options.put("storeKey", "false");
126         options.put("doNotPrompt", "true");
127         options.put("useTicketCache", "true");
128         options.put("renewTGT", "true");
129         options.put("refreshKrb5Config", "true");
130         options.put("isInitiator", "true");
131         String ticketCache = System.getenv("KRB5CCNAME");
132         if (ticketCache != null) {
133           options.put("ticketCache", ticketCache);
134         }
135         options.put("debug", "true");
136 
137         return new AppConfigurationEntry[] {
138           new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
139                 AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) };
140       }
141     });
142     context.login();
143     return context.getSubject();
144   }
145 }