001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.rest; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertTrue; 022 023import com.fasterxml.jackson.databind.ObjectMapper; 024import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; 025 026import java.io.File; 027import java.net.HttpURLConnection; 028import java.net.URL; 029import java.security.Principal; 030import java.security.PrivilegedExceptionAction; 031 032import javax.ws.rs.core.MediaType; 033 034import org.apache.commons.io.FileUtils; 035import org.apache.hadoop.conf.Configuration; 036import org.apache.hadoop.hbase.HBaseClassTestRule; 037import org.apache.hadoop.hbase.HBaseTestingUtility; 038import org.apache.hadoop.hbase.MiniHBaseCluster; 039import org.apache.hadoop.hbase.StartMiniClusterOption; 040import org.apache.hadoop.hbase.TableName; 041import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 042import org.apache.hadoop.hbase.client.Connection; 043import org.apache.hadoop.hbase.client.ConnectionFactory; 044import org.apache.hadoop.hbase.client.Put; 045import org.apache.hadoop.hbase.client.Table; 046import org.apache.hadoop.hbase.client.TableDescriptor; 047import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 048import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; 049import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil; 050import org.apache.hadoop.hbase.rest.model.CellModel; 051import org.apache.hadoop.hbase.rest.model.CellSetModel; 052import org.apache.hadoop.hbase.rest.model.RowModel; 053import org.apache.hadoop.hbase.security.HBaseKerberosUtils; 054import org.apache.hadoop.hbase.security.access.AccessControlClient; 055import org.apache.hadoop.hbase.security.access.AccessControlConstants; 056import org.apache.hadoop.hbase.security.access.AccessController; 057import org.apache.hadoop.hbase.security.access.Permission.Action; 058import org.apache.hadoop.hbase.security.token.TokenProvider; 059import org.apache.hadoop.hbase.testclassification.MediumTests; 060import org.apache.hadoop.hbase.testclassification.MiscTests; 061import org.apache.hadoop.hbase.util.Bytes; 062import org.apache.hadoop.hbase.util.Pair; 063import org.apache.hadoop.hdfs.DFSConfigKeys; 064import org.apache.hadoop.http.HttpConfig; 065import org.apache.hadoop.minikdc.MiniKdc; 066import org.apache.hadoop.security.UserGroupInformation; 067import org.apache.hadoop.security.authentication.util.KerberosName; 068import org.apache.http.HttpEntity; 069import org.apache.http.HttpHost; 070import org.apache.http.auth.AuthSchemeProvider; 071import org.apache.http.auth.AuthScope; 072import org.apache.http.auth.Credentials; 073import org.apache.http.client.AuthCache; 074import org.apache.http.client.CredentialsProvider; 075import org.apache.http.client.config.AuthSchemes; 076import org.apache.http.client.methods.CloseableHttpResponse; 077import org.apache.http.client.methods.HttpGet; 078import org.apache.http.client.methods.HttpPut; 079import org.apache.http.client.protocol.HttpClientContext; 080import org.apache.http.config.Registry; 081import org.apache.http.config.RegistryBuilder; 082import org.apache.http.conn.HttpClientConnectionManager; 083import org.apache.http.entity.ContentType; 084import org.apache.http.entity.StringEntity; 085import org.apache.http.impl.auth.SPNegoSchemeFactory; 086import org.apache.http.impl.client.BasicAuthCache; 087import org.apache.http.impl.client.BasicCredentialsProvider; 088import org.apache.http.impl.client.CloseableHttpClient; 089import org.apache.http.impl.client.HttpClients; 090import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; 091import org.apache.http.util.EntityUtils; 092import org.junit.AfterClass; 093import org.junit.BeforeClass; 094import org.junit.ClassRule; 095import org.junit.Test; 096import org.junit.experimental.categories.Category; 097import org.slf4j.Logger; 098import org.slf4j.LoggerFactory; 099 100/** 101 * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache 102 * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o. 103 */ 104@Category({MiscTests.class, MediumTests.class}) 105public class TestSecureRESTServer { 106 107 @ClassRule 108 public static final HBaseClassTestRule CLASS_RULE = 109 HBaseClassTestRule.forClass(TestSecureRESTServer.class); 110 111 private static final Logger LOG = LoggerFactory.getLogger(TestSecureRESTServer.class); 112 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); 113 private static final HBaseRESTTestingUtility REST_TEST = new HBaseRESTTestingUtility(); 114 private static MiniHBaseCluster CLUSTER; 115 116 private static final String HOSTNAME = "localhost"; 117 private static final String CLIENT_PRINCIPAL = "client"; 118 // The principal for accepting SPNEGO authn'ed requests (*must* be HTTP/fqdn) 119 private static final String SPNEGO_SERVICE_PRINCIPAL = "HTTP/" + HOSTNAME; 120 // The principal we use to connect to HBase 121 private static final String REST_SERVER_PRINCIPAL = "rest"; 122 private static final String SERVICE_PRINCIPAL = "hbase/" + HOSTNAME; 123 124 private static URL baseUrl; 125 private static MiniKdc KDC; 126 private static RESTServer server; 127 private static File restServerKeytab; 128 private static File clientKeytab; 129 private static File serviceKeytab; 130 131 @BeforeClass 132 public static void setupServer() throws Exception { 133 final File target = new File(System.getProperty("user.dir"), "target"); 134 assertTrue(target.exists()); 135 136 /* 137 * Keytabs 138 */ 139 File keytabDir = new File(target, TestSecureRESTServer.class.getSimpleName() 140 + "_keytabs"); 141 if (keytabDir.exists()) { 142 FileUtils.deleteDirectory(keytabDir); 143 } 144 keytabDir.mkdirs(); 145 // Keytab for HBase services (RS, Master) 146 serviceKeytab = new File(keytabDir, "hbase.service.keytab"); 147 // The keytab for the REST server 148 restServerKeytab = new File(keytabDir, "spnego.keytab"); 149 // Keytab for the client 150 clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab"); 151 152 /* 153 * Update UGI 154 */ 155 Configuration conf = TEST_UTIL.getConfiguration(); 156 157 /* 158 * Start KDC 159 */ 160 KDC = TEST_UTIL.setupMiniKdc(serviceKeytab); 161 KDC.createPrincipal(clientKeytab, CLIENT_PRINCIPAL); 162 KDC.createPrincipal(serviceKeytab, SERVICE_PRINCIPAL); 163 // REST server's keytab contains keys for both principals REST uses 164 KDC.createPrincipal(restServerKeytab, SPNEGO_SERVICE_PRINCIPAL, REST_SERVER_PRINCIPAL); 165 166 // Set configuration for HBase 167 HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + KDC.getRealm()); 168 HBaseKerberosUtils.setKeytabFileForTesting(serviceKeytab.getAbsolutePath()); 169 // Why doesn't `setKeytabFileForTesting` do this? 170 conf.set("hbase.master.keytab.file", serviceKeytab.getAbsolutePath()); 171 conf.set("hbase.regionserver.hostname", "localhost"); 172 conf.set("hbase.master.hostname", "localhost"); 173 HBaseKerberosUtils.setSecuredConfiguration(conf, 174 SERVICE_PRINCIPAL+ "@" + KDC.getRealm(), SPNEGO_SERVICE_PRINCIPAL+ "@" + KDC.getRealm()); 175 setHdfsSecuredConfiguration(conf); 176 conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, 177 TokenProvider.class.getName(), AccessController.class.getName()); 178 conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, 179 AccessController.class.getName()); 180 conf.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, 181 AccessController.class.getName()); 182 // Enable EXEC permission checking 183 conf.setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, true); 184 conf.set("hbase.superuser", "hbase"); 185 conf.set("hadoop.proxyuser.rest.hosts", "*"); 186 conf.set("hadoop.proxyuser.rest.users", "*"); 187 UserGroupInformation.setConfiguration(conf); 188 189 updateKerberosConfiguration(conf, REST_SERVER_PRINCIPAL, SPNEGO_SERVICE_PRINCIPAL, 190 restServerKeytab); 191 192 // Start HDFS 193 TEST_UTIL.startMiniCluster(StartMiniClusterOption.builder() 194 .numMasters(1) 195 .numRegionServers(1) 196 .numZkServers(1) 197 .build()); 198 199 // Start REST 200 UserGroupInformation restUser = UserGroupInformation.loginUserFromKeytabAndReturnUGI( 201 REST_SERVER_PRINCIPAL, restServerKeytab.getAbsolutePath()); 202 restUser.doAs(new PrivilegedExceptionAction<Void>() { 203 @Override 204 public Void run() throws Exception { 205 REST_TEST.startServletContainer(conf); 206 return null; 207 } 208 }); 209 baseUrl = new URL("http://localhost:" + REST_TEST.getServletPort()); 210 211 LOG.info("HTTP server started: "+ baseUrl); 212 TEST_UTIL.waitTableAvailable(TableName.valueOf("hbase:acl")); 213 214 // Let the REST server create, read, and write globally 215 UserGroupInformation superuser = UserGroupInformation.loginUserFromKeytabAndReturnUGI( 216 SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath()); 217 superuser.doAs(new PrivilegedExceptionAction<Void>() { 218 @Override 219 public Void run() throws Exception { 220 try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) { 221 AccessControlClient.grant( 222 conn, REST_SERVER_PRINCIPAL, Action.CREATE, Action.READ, Action.WRITE); 223 } catch (Throwable t) { 224 if (t instanceof Exception) { 225 throw (Exception) t; 226 } else { 227 throw new Exception(t); 228 } 229 } 230 return null; 231 } 232 }); 233 } 234 235 @AfterClass 236 public static void stopServer() throws Exception { 237 try { 238 if (null != server) { 239 server.stop(); 240 } 241 } catch (Exception e) { 242 LOG.info("Failed to stop info server", e); 243 } 244 try { 245 if (CLUSTER != null) { 246 CLUSTER.shutdown(); 247 } 248 } catch (Exception e) { 249 LOG.info("Failed to stop HBase cluster", e); 250 } 251 try { 252 if (null != KDC) { 253 KDC.stop(); 254 } 255 } catch (Exception e) { 256 LOG.info("Failed to stop mini KDC", e); 257 } 258 } 259 260 private static void setHdfsSecuredConfiguration(Configuration conf) throws Exception { 261 // Set principal+keytab configuration for HDFS 262 conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, 263 SERVICE_PRINCIPAL + "@" + KDC.getRealm()); 264 conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath()); 265 conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, 266 SERVICE_PRINCIPAL + "@" + KDC.getRealm()); 267 conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath()); 268 conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, 269 SPNEGO_SERVICE_PRINCIPAL + "@" + KDC.getRealm()); 270 // Enable token access for HDFS blocks 271 conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true); 272 // Only use HTTPS (required because we aren't using "secure" ports) 273 conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name()); 274 // Bind on localhost for spnego to have a chance at working 275 conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0"); 276 conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0"); 277 278 // Generate SSL certs 279 File keystoresDir = new File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath()); 280 keystoresDir.mkdirs(); 281 String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureRESTServer.class); 282 KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false); 283 284 // Magic flag to tell hdfs to not fail on using ports above 1024 285 conf.setBoolean("ignore.secure.ports.for.testing", true); 286 } 287 288 private static void updateKerberosConfiguration(Configuration conf, 289 String serverPrincipal, String spnegoPrincipal, File serverKeytab) { 290 KerberosName.setRules("DEFAULT"); 291 292 // Enable Kerberos (pre-req) 293 conf.set("hbase.security.authentication", "kerberos"); 294 conf.set(RESTServer.REST_AUTHENTICATION_TYPE, "kerberos"); 295 // User to talk to HBase as 296 conf.set(RESTServer.REST_KERBEROS_PRINCIPAL, serverPrincipal); 297 // User to accept SPNEGO-auth'd http calls as 298 conf.set("hbase.rest.authentication.kerberos.principal", spnegoPrincipal); 299 // Keytab for both principals above 300 conf.set(RESTServer.REST_KEYTAB_FILE, serverKeytab.getAbsolutePath()); 301 conf.set("hbase.rest.authentication.kerberos.keytab", serverKeytab.getAbsolutePath()); 302 } 303 304 @Test 305 public void testPositiveAuthorization() throws Exception { 306 // Create a table, write a row to it, grant read perms to the client 307 UserGroupInformation superuser = UserGroupInformation.loginUserFromKeytabAndReturnUGI( 308 SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath()); 309 final TableName table = TableName.valueOf("publicTable"); 310 superuser.doAs(new PrivilegedExceptionAction<Void>() { 311 @Override 312 public Void run() throws Exception { 313 try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) { 314 TableDescriptor desc = TableDescriptorBuilder.newBuilder(table) 315 .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")) 316 .build(); 317 conn.getAdmin().createTable(desc); 318 try (Table t = conn.getTable(table)) { 319 Put p = new Put(Bytes.toBytes("a")); 320 p.addColumn(Bytes.toBytes("f1"), new byte[0], Bytes.toBytes("1")); 321 t.put(p); 322 } 323 AccessControlClient.grant(conn, CLIENT_PRINCIPAL, Action.READ); 324 } catch (Throwable e) { 325 if (e instanceof Exception) { 326 throw (Exception) e; 327 } else { 328 throw new Exception(e); 329 } 330 } 331 return null; 332 } 333 }); 334 335 // Read that row as the client 336 Pair<CloseableHttpClient,HttpClientContext> pair = getClient(); 337 CloseableHttpClient client = pair.getFirst(); 338 HttpClientContext context = pair.getSecond(); 339 340 HttpGet get = new HttpGet(new URL("http://localhost:"+ REST_TEST.getServletPort()).toURI() 341 + "/" + table + "/a"); 342 get.addHeader("Accept", "application/json"); 343 UserGroupInformation user = UserGroupInformation.loginUserFromKeytabAndReturnUGI( 344 CLIENT_PRINCIPAL, clientKeytab.getAbsolutePath()); 345 String jsonResponse = user.doAs(new PrivilegedExceptionAction<String>() { 346 @Override 347 public String run() throws Exception { 348 try (CloseableHttpResponse response = client.execute(get, context)) { 349 final int statusCode = response.getStatusLine().getStatusCode(); 350 assertEquals(response.getStatusLine().toString(), HttpURLConnection.HTTP_OK, statusCode); 351 HttpEntity entity = response.getEntity(); 352 return EntityUtils.toString(entity); 353 } 354 } 355 }); 356 ObjectMapper mapper = new JacksonJaxbJsonProvider() 357 .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); 358 CellSetModel model = mapper.readValue(jsonResponse, CellSetModel.class); 359 assertEquals(1, model.getRows().size()); 360 RowModel row = model.getRows().get(0); 361 assertEquals("a", Bytes.toString(row.getKey())); 362 assertEquals(1, row.getCells().size()); 363 CellModel cell = row.getCells().get(0); 364 assertEquals("1", Bytes.toString(cell.getValue())); 365 } 366 367 @Test 368 public void testNegativeAuthorization() throws Exception { 369 Pair<CloseableHttpClient,HttpClientContext> pair = getClient(); 370 CloseableHttpClient client = pair.getFirst(); 371 HttpClientContext context = pair.getSecond(); 372 373 StringEntity entity = new StringEntity( 374 "{\"name\":\"test\", \"ColumnSchema\":[{\"name\":\"f\"}]}", ContentType.APPLICATION_JSON); 375 HttpPut put = new HttpPut("http://localhost:"+ REST_TEST.getServletPort() + "/test/schema"); 376 put.setEntity(entity); 377 378 379 UserGroupInformation unprivileged = UserGroupInformation.loginUserFromKeytabAndReturnUGI( 380 CLIENT_PRINCIPAL, clientKeytab.getAbsolutePath()); 381 unprivileged.doAs(new PrivilegedExceptionAction<Void>() { 382 @Override 383 public Void run() throws Exception { 384 try (CloseableHttpResponse response = client.execute(put, context)) { 385 final int statusCode = response.getStatusLine().getStatusCode(); 386 HttpEntity entity = response.getEntity(); 387 assertEquals("Got response: "+ EntityUtils.toString(entity), 388 HttpURLConnection.HTTP_FORBIDDEN, statusCode); 389 } 390 return null; 391 } 392 }); 393 } 394 395 private Pair<CloseableHttpClient,HttpClientContext> getClient() { 396 HttpClientConnectionManager pool = new PoolingHttpClientConnectionManager(); 397 HttpHost host = new HttpHost("localhost", REST_TEST.getServletPort()); 398 Registry<AuthSchemeProvider> authRegistry = 399 RegistryBuilder.<AuthSchemeProvider>create().register(AuthSchemes.SPNEGO, 400 new SPNegoSchemeFactory(true, true)).build(); 401 CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); 402 credentialsProvider.setCredentials(AuthScope.ANY, EmptyCredentials.INSTANCE); 403 AuthCache authCache = new BasicAuthCache(); 404 405 CloseableHttpClient client = HttpClients.custom() 406 .setDefaultAuthSchemeRegistry(authRegistry) 407 .setConnectionManager(pool).build(); 408 409 HttpClientContext context = HttpClientContext.create(); 410 context.setTargetHost(host); 411 context.setCredentialsProvider(credentialsProvider); 412 context.setAuthSchemeRegistry(authRegistry); 413 context.setAuthCache(authCache); 414 415 return new Pair<>(client, context); 416 } 417 418 private static class EmptyCredentials implements Credentials { 419 public static final EmptyCredentials INSTANCE = new EmptyCredentials(); 420 421 @Override public String getPassword() { 422 return null; 423 } 424 @Override public Principal getUserPrincipal() { 425 return null; 426 } 427 } 428}