001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.rest;
019
020import static org.apache.hadoop.hbase.rest.RESTServlet.HBASE_REST_SUPPORT_PROXYUSER;
021import static org.junit.Assert.assertEquals;
022import static org.junit.Assert.assertTrue;
023
024import com.fasterxml.jackson.databind.ObjectMapper;
025import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
026
027import java.io.File;
028import java.io.IOException;
029import java.net.HttpURLConnection;
030import java.net.URL;
031import java.security.Principal;
032import java.security.PrivilegedExceptionAction;
033
034import javax.ws.rs.core.MediaType;
035
036import org.apache.commons.io.FileUtils;
037import org.apache.hadoop.conf.Configuration;
038import org.apache.hadoop.hbase.HBaseClassTestRule;
039import org.apache.hadoop.hbase.HBaseTestingUtility;
040import org.apache.hadoop.hbase.MiniHBaseCluster;
041import org.apache.hadoop.hbase.StartMiniClusterOption;
042import org.apache.hadoop.hbase.TableName;
043import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
044import org.apache.hadoop.hbase.client.Connection;
045import org.apache.hadoop.hbase.client.ConnectionFactory;
046import org.apache.hadoop.hbase.client.Put;
047import org.apache.hadoop.hbase.client.Table;
048import org.apache.hadoop.hbase.client.TableDescriptor;
049import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
050import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
051import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
052import org.apache.hadoop.hbase.rest.model.CellModel;
053import org.apache.hadoop.hbase.rest.model.CellSetModel;
054import org.apache.hadoop.hbase.rest.model.RowModel;
055import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
056import org.apache.hadoop.hbase.security.access.AccessControlClient;
057import org.apache.hadoop.hbase.security.access.AccessControlConstants;
058import org.apache.hadoop.hbase.security.access.AccessController;
059import org.apache.hadoop.hbase.security.access.Permission.Action;
060import org.apache.hadoop.hbase.security.token.TokenProvider;
061import org.apache.hadoop.hbase.testclassification.MediumTests;
062import org.apache.hadoop.hbase.testclassification.MiscTests;
063import org.apache.hadoop.hbase.util.Bytes;
064import org.apache.hadoop.hbase.util.Pair;
065import org.apache.hadoop.hdfs.DFSConfigKeys;
066import org.apache.hadoop.http.HttpConfig;
067import org.apache.hadoop.minikdc.MiniKdc;
068import org.apache.hadoop.security.UserGroupInformation;
069import org.apache.hadoop.security.authentication.util.KerberosName;
070import org.apache.http.HttpEntity;
071import org.apache.http.HttpHost;
072import org.apache.http.auth.AuthSchemeProvider;
073import org.apache.http.auth.AuthScope;
074import org.apache.http.auth.Credentials;
075import org.apache.http.client.AuthCache;
076import org.apache.http.client.CredentialsProvider;
077import org.apache.http.client.config.AuthSchemes;
078import org.apache.http.client.methods.CloseableHttpResponse;
079import org.apache.http.client.methods.HttpGet;
080import org.apache.http.client.methods.HttpPut;
081import org.apache.http.client.protocol.HttpClientContext;
082import org.apache.http.config.Registry;
083import org.apache.http.config.RegistryBuilder;
084import org.apache.http.conn.HttpClientConnectionManager;
085import org.apache.http.entity.ContentType;
086import org.apache.http.entity.StringEntity;
087import org.apache.http.impl.auth.SPNegoSchemeFactory;
088import org.apache.http.impl.client.BasicAuthCache;
089import org.apache.http.impl.client.BasicCredentialsProvider;
090import org.apache.http.impl.client.CloseableHttpClient;
091import org.apache.http.impl.client.HttpClients;
092import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
093import org.apache.http.util.EntityUtils;
094import org.junit.AfterClass;
095import org.junit.BeforeClass;
096import org.junit.ClassRule;
097import org.junit.Test;
098import org.junit.experimental.categories.Category;
099import org.slf4j.Logger;
100import org.slf4j.LoggerFactory;
101
102/**
103 * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
104 * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o.
105 */
106@Category({MiscTests.class, MediumTests.class})
107public class TestSecureRESTServer {
108
109  @ClassRule
110  public static final HBaseClassTestRule CLASS_RULE =
111      HBaseClassTestRule.forClass(TestSecureRESTServer.class);
112
113  private static final Logger LOG = LoggerFactory.getLogger(TestSecureRESTServer.class);
114  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
115  private static final HBaseRESTTestingUtility REST_TEST = new HBaseRESTTestingUtility();
116  private static MiniHBaseCluster CLUSTER;
117
118  private static final String HOSTNAME = "localhost";
119  private static final String CLIENT_PRINCIPAL = "client";
120  private static final String WHEEL_PRINCIPAL = "wheel";
121  // The principal for accepting SPNEGO authn'ed requests (*must* be HTTP/fqdn)
122  private static final String SPNEGO_SERVICE_PRINCIPAL = "HTTP/" + HOSTNAME;
123  // The principal we use to connect to HBase
124  private static final String REST_SERVER_PRINCIPAL = "rest";
125  private static final String SERVICE_PRINCIPAL = "hbase/" + HOSTNAME;
126
127  private static URL baseUrl;
128  private static MiniKdc KDC;
129  private static RESTServer server;
130  private static File restServerKeytab;
131  private static File clientKeytab;
132  private static File wheelKeytab;
133  private static File serviceKeytab;
134
135  @BeforeClass
136  public static void setupServer() throws Exception {
137    final File target = new File(System.getProperty("user.dir"), "target");
138    assertTrue(target.exists());
139
140    /*
141     * Keytabs
142     */
143    File keytabDir = new File(target, TestSecureRESTServer.class.getSimpleName()
144        + "_keytabs");
145    if (keytabDir.exists()) {
146      FileUtils.deleteDirectory(keytabDir);
147    }
148    keytabDir.mkdirs();
149    // Keytab for HBase services (RS, Master)
150    serviceKeytab = new File(keytabDir, "hbase.service.keytab");
151    // The keytab for the REST server
152    restServerKeytab = new File(keytabDir, "spnego.keytab");
153    // Keytab for the client
154    clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab");
155    // Keytab for wheel
156    wheelKeytab = new File(keytabDir, WHEEL_PRINCIPAL + ".keytab");
157
158    /*
159     * Update UGI
160     */
161    Configuration conf = TEST_UTIL.getConfiguration();
162
163    /*
164     * Start KDC
165     */
166    KDC = TEST_UTIL.setupMiniKdc(serviceKeytab);
167    KDC.createPrincipal(clientKeytab, CLIENT_PRINCIPAL);
168    KDC.createPrincipal(wheelKeytab, WHEEL_PRINCIPAL);
169    KDC.createPrincipal(serviceKeytab, SERVICE_PRINCIPAL);
170    // REST server's keytab contains keys for both principals REST uses
171    KDC.createPrincipal(restServerKeytab, SPNEGO_SERVICE_PRINCIPAL, REST_SERVER_PRINCIPAL);
172
173    // Set configuration for HBase
174    HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + KDC.getRealm());
175    HBaseKerberosUtils.setKeytabFileForTesting(serviceKeytab.getAbsolutePath());
176    // Why doesn't `setKeytabFileForTesting` do this?
177    conf.set("hbase.master.keytab.file", serviceKeytab.getAbsolutePath());
178    conf.set("hbase.unsafe.regionserver.hostname", "localhost");
179    conf.set("hbase.master.hostname", "localhost");
180    HBaseKerberosUtils.setSecuredConfiguration(conf,
181        SERVICE_PRINCIPAL+ "@" + KDC.getRealm(), SPNEGO_SERVICE_PRINCIPAL+ "@" + KDC.getRealm());
182    setHdfsSecuredConfiguration(conf);
183    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
184        TokenProvider.class.getName(), AccessController.class.getName());
185    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
186        AccessController.class.getName());
187    conf.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY,
188        AccessController.class.getName());
189    // Enable EXEC permission checking
190    conf.setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, true);
191    conf.set("hbase.superuser", "hbase");
192    conf.set("hadoop.proxyuser.rest.hosts", "*");
193    conf.set("hadoop.proxyuser.rest.users", "*");
194    conf.set("hadoop.proxyuser.wheel.hosts", "*");
195    conf.set("hadoop.proxyuser.wheel.users", "*");
196    UserGroupInformation.setConfiguration(conf);
197
198    updateKerberosConfiguration(conf, REST_SERVER_PRINCIPAL, SPNEGO_SERVICE_PRINCIPAL,
199        restServerKeytab);
200
201    // Start HDFS
202    TEST_UTIL.startMiniCluster(StartMiniClusterOption.builder()
203        .numMasters(1)
204        .numRegionServers(1)
205        .numZkServers(1)
206        .build());
207
208    // Start REST
209    UserGroupInformation restUser = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
210        REST_SERVER_PRINCIPAL, restServerKeytab.getAbsolutePath());
211    restUser.doAs(new PrivilegedExceptionAction<Void>() {
212      @Override
213      public Void run() throws Exception {
214        REST_TEST.startServletContainer(conf);
215        return null;
216      }
217    });
218    baseUrl = new URL("http://localhost:" + REST_TEST.getServletPort());
219
220    LOG.info("HTTP server started: "+ baseUrl);
221    TEST_UTIL.waitTableAvailable(TableName.valueOf("hbase:acl"));
222
223    // Let the REST server create, read, and write globally
224    UserGroupInformation superuser = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
225        SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
226    superuser.doAs(new PrivilegedExceptionAction<Void>() {
227      @Override
228      public Void run() throws Exception {
229        try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
230          AccessControlClient.grant(
231              conn, REST_SERVER_PRINCIPAL, Action.CREATE, Action.READ, Action.WRITE);
232        } catch (Throwable t) {
233          if (t instanceof Exception) {
234            throw (Exception) t;
235          } else {
236            throw new Exception(t);
237          }
238        }
239        return null;
240      }
241    });
242    instertData();
243  }
244
245  @AfterClass
246  public static void stopServer() throws Exception {
247    try {
248      if (null != server) {
249        server.stop();
250      }
251    } catch (Exception e) {
252      LOG.info("Failed to stop info server", e);
253    }
254    try {
255      if (CLUSTER != null) {
256        CLUSTER.shutdown();
257      }
258    } catch (Exception e) {
259      LOG.info("Failed to stop HBase cluster", e);
260    }
261    try {
262      if (null != KDC) {
263        KDC.stop();
264      }
265    } catch (Exception e) {
266      LOG.info("Failed to stop mini KDC", e);
267    }
268  }
269
270  private static void setHdfsSecuredConfiguration(Configuration conf) throws Exception {
271    // Set principal+keytab configuration for HDFS
272    conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY,
273        SERVICE_PRINCIPAL + "@" + KDC.getRealm());
274    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
275    conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY,
276        SERVICE_PRINCIPAL + "@" + KDC.getRealm());
277    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
278    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
279        SPNEGO_SERVICE_PRINCIPAL + "@" + KDC.getRealm());
280    // Enable token access for HDFS blocks
281    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
282    // Only use HTTPS (required because we aren't using "secure" ports)
283    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
284    // Bind on localhost for spnego to have a chance at working
285    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
286    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
287
288    // Generate SSL certs
289    File keystoresDir = new File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
290    keystoresDir.mkdirs();
291    String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureRESTServer.class);
292    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
293
294    // Magic flag to tell hdfs to not fail on using ports above 1024
295    conf.setBoolean("ignore.secure.ports.for.testing", true);
296  }
297
298  private static void updateKerberosConfiguration(Configuration conf,
299      String serverPrincipal, String spnegoPrincipal, File serverKeytab) {
300    KerberosName.setRules("DEFAULT");
301
302    // Enable Kerberos (pre-req)
303    conf.set("hbase.security.authentication", "kerberos");
304    conf.set(RESTServer.REST_AUTHENTICATION_TYPE, "kerberos");
305    // User to talk to HBase as
306    conf.set(RESTServer.REST_KERBEROS_PRINCIPAL, serverPrincipal);
307    // User to accept SPNEGO-auth'd http calls as
308    conf.set("hbase.rest.authentication.kerberos.principal", spnegoPrincipal);
309    // Keytab for both principals above
310    conf.set(RESTServer.REST_KEYTAB_FILE, serverKeytab.getAbsolutePath());
311    conf.set("hbase.rest.authentication.kerberos.keytab", serverKeytab.getAbsolutePath());
312    conf.set(HBASE_REST_SUPPORT_PROXYUSER, "true");
313  }
314
315  private static void instertData() throws IOException, InterruptedException {
316    // Create a table, write a row to it, grant read perms to the client
317    UserGroupInformation superuser = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
318      SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
319    final TableName table = TableName.valueOf("publicTable");
320    superuser.doAs(new PrivilegedExceptionAction<Void>() {
321      @Override
322      public Void run() throws Exception {
323        try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
324          TableDescriptor desc = TableDescriptorBuilder.newBuilder(table)
325            .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1"))
326            .build();
327          conn.getAdmin().createTable(desc);
328          try (Table t = conn.getTable(table)) {
329            Put p = new Put(Bytes.toBytes("a"));
330            p.addColumn(Bytes.toBytes("f1"), new byte[0], Bytes.toBytes("1"));
331            t.put(p);
332          }
333          AccessControlClient.grant(conn, CLIENT_PRINCIPAL, Action.READ);
334        } catch (Throwable e) {
335          if (e instanceof Exception) {
336            throw (Exception) e;
337          } else {
338            throw new Exception(e);
339          }
340        }
341        return null;
342      }
343    });
344  }
345
346  public void testProxy(String extraArgs, String PRINCIPAL, File keytab, int responseCode) throws Exception{
347    UserGroupInformation superuser = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
348      SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
349    final TableName table = TableName.valueOf("publicTable");
350
351    // Read that row as the client
352    Pair<CloseableHttpClient,HttpClientContext> pair = getClient();
353    CloseableHttpClient client = pair.getFirst();
354    HttpClientContext context = pair.getSecond();
355
356    HttpGet get = new HttpGet(new URL("http://localhost:"+ REST_TEST.getServletPort()).toURI()
357      + "/" + table + "/a" + extraArgs);
358    get.addHeader("Accept", "application/json");
359    UserGroupInformation user = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
360      PRINCIPAL, keytab.getAbsolutePath());
361    String jsonResponse = user.doAs(new PrivilegedExceptionAction<String>() {
362      @Override
363      public String run() throws Exception {
364        try (CloseableHttpResponse response = client.execute(get, context)) {
365          final int statusCode = response.getStatusLine().getStatusCode();
366          assertEquals(response.getStatusLine().toString(), responseCode, statusCode);
367          HttpEntity entity = response.getEntity();
368          return EntityUtils.toString(entity);
369        }
370      }
371    });
372    if(responseCode == HttpURLConnection.HTTP_OK) {
373      ObjectMapper mapper = new JacksonJaxbJsonProvider().locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
374      CellSetModel model = mapper.readValue(jsonResponse, CellSetModel.class);
375      assertEquals(1, model.getRows().size());
376      RowModel row = model.getRows().get(0);
377      assertEquals("a", Bytes.toString(row.getKey()));
378      assertEquals(1, row.getCells().size());
379      CellModel cell = row.getCells().get(0);
380      assertEquals("1", Bytes.toString(cell.getValue()));
381    }
382  }
383
384  @Test
385  public void testPositiveAuthorization() throws Exception {
386    testProxy("", CLIENT_PRINCIPAL, clientKeytab, HttpURLConnection.HTTP_OK);
387  }
388
389  @Test
390  public void testDoAs() throws Exception {
391    testProxy("?doAs="+CLIENT_PRINCIPAL, WHEEL_PRINCIPAL, wheelKeytab, HttpURLConnection.HTTP_OK);
392  }
393
394  @Test
395  public void testDoas() throws Exception {
396    testProxy("?doas="+CLIENT_PRINCIPAL, WHEEL_PRINCIPAL, wheelKeytab, HttpURLConnection.HTTP_OK);
397  }
398
399  @Test
400  public void testWithoutDoAs() throws Exception {
401    testProxy("", WHEEL_PRINCIPAL, wheelKeytab, HttpURLConnection.HTTP_FORBIDDEN);
402  }
403
404
405  @Test
406  public void testNegativeAuthorization() throws Exception {
407    Pair<CloseableHttpClient,HttpClientContext> pair = getClient();
408    CloseableHttpClient client = pair.getFirst();
409    HttpClientContext context = pair.getSecond();
410
411    StringEntity entity = new StringEntity(
412        "{\"name\":\"test\", \"ColumnSchema\":[{\"name\":\"f\"}]}", ContentType.APPLICATION_JSON);
413    HttpPut put = new HttpPut("http://localhost:"+ REST_TEST.getServletPort() + "/test/schema");
414    put.setEntity(entity);
415
416
417    UserGroupInformation unprivileged = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
418        CLIENT_PRINCIPAL, clientKeytab.getAbsolutePath());
419    unprivileged.doAs(new PrivilegedExceptionAction<Void>() {
420      @Override
421      public Void run() throws Exception {
422        try (CloseableHttpResponse response = client.execute(put, context)) {
423          final int statusCode = response.getStatusLine().getStatusCode();
424          HttpEntity entity = response.getEntity();
425          assertEquals("Got response: "+ EntityUtils.toString(entity),
426              HttpURLConnection.HTTP_FORBIDDEN, statusCode);
427        }
428        return null;
429      }
430    });
431  }
432
433  private Pair<CloseableHttpClient,HttpClientContext> getClient() {
434    HttpClientConnectionManager pool = new PoolingHttpClientConnectionManager();
435    HttpHost host = new HttpHost("localhost", REST_TEST.getServletPort());
436    Registry<AuthSchemeProvider> authRegistry =
437        RegistryBuilder.<AuthSchemeProvider>create().register(AuthSchemes.SPNEGO,
438            new SPNegoSchemeFactory(true, true)).build();
439    CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
440    credentialsProvider.setCredentials(AuthScope.ANY, EmptyCredentials.INSTANCE);
441    AuthCache authCache = new BasicAuthCache();
442
443    CloseableHttpClient client = HttpClients.custom()
444        .setDefaultAuthSchemeRegistry(authRegistry)
445        .setConnectionManager(pool).build();
446
447    HttpClientContext context = HttpClientContext.create();
448    context.setTargetHost(host);
449    context.setCredentialsProvider(credentialsProvider);
450    context.setAuthSchemeRegistry(authRegistry);
451    context.setAuthCache(authCache);
452
453    return new Pair<>(client, context);
454  }
455
456  private static class EmptyCredentials implements Credentials {
457    public static final EmptyCredentials INSTANCE = new EmptyCredentials();
458
459    @Override public String getPassword() {
460      return null;
461    }
462    @Override public Principal getUserPrincipal() {
463      return null;
464    }
465  }
466}