001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.rest;
019
020import static org.apache.hadoop.hbase.rest.RESTServlet.HBASE_REST_SUPPORT_PROXYUSER;
021import static org.junit.Assert.assertEquals;
022import static org.junit.Assert.assertTrue;
023
024import com.fasterxml.jackson.databind.ObjectMapper;
025import java.io.File;
026import java.io.IOException;
027import java.net.HttpURLConnection;
028import java.net.URL;
029import java.security.Principal;
030import java.security.PrivilegedExceptionAction;
031import org.apache.commons.io.FileUtils;
032import org.apache.hadoop.conf.Configuration;
033import org.apache.hadoop.hbase.HBaseClassTestRule;
034import org.apache.hadoop.hbase.HBaseTestingUtility;
035import org.apache.hadoop.hbase.MiniHBaseCluster;
036import org.apache.hadoop.hbase.StartMiniClusterOption;
037import org.apache.hadoop.hbase.TableName;
038import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
039import org.apache.hadoop.hbase.client.Connection;
040import org.apache.hadoop.hbase.client.ConnectionFactory;
041import org.apache.hadoop.hbase.client.Put;
042import org.apache.hadoop.hbase.client.Table;
043import org.apache.hadoop.hbase.client.TableDescriptor;
044import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
045import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
046import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
047import org.apache.hadoop.hbase.rest.model.CellModel;
048import org.apache.hadoop.hbase.rest.model.CellSetModel;
049import org.apache.hadoop.hbase.rest.model.RowModel;
050import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
051import org.apache.hadoop.hbase.security.access.AccessControlClient;
052import org.apache.hadoop.hbase.security.access.AccessControlConstants;
053import org.apache.hadoop.hbase.security.access.AccessController;
054import org.apache.hadoop.hbase.security.access.Permission.Action;
055import org.apache.hadoop.hbase.security.token.TokenProvider;
056import org.apache.hadoop.hbase.testclassification.MediumTests;
057import org.apache.hadoop.hbase.testclassification.MiscTests;
058import org.apache.hadoop.hbase.util.Bytes;
059import org.apache.hadoop.hbase.util.Pair;
060import org.apache.hadoop.hdfs.DFSConfigKeys;
061import org.apache.hadoop.http.HttpConfig;
062import org.apache.hadoop.minikdc.MiniKdc;
063import org.apache.hadoop.security.UserGroupInformation;
064import org.apache.hadoop.security.authentication.util.KerberosName;
065import org.apache.http.HttpEntity;
066import org.apache.http.HttpHost;
067import org.apache.http.auth.AuthSchemeProvider;
068import org.apache.http.auth.AuthScope;
069import org.apache.http.auth.Credentials;
070import org.apache.http.client.AuthCache;
071import org.apache.http.client.CredentialsProvider;
072import org.apache.http.client.config.AuthSchemes;
073import org.apache.http.client.methods.CloseableHttpResponse;
074import org.apache.http.client.methods.HttpGet;
075import org.apache.http.client.methods.HttpPut;
076import org.apache.http.client.protocol.HttpClientContext;
077import org.apache.http.config.Registry;
078import org.apache.http.config.RegistryBuilder;
079import org.apache.http.conn.HttpClientConnectionManager;
080import org.apache.http.entity.ContentType;
081import org.apache.http.entity.StringEntity;
082import org.apache.http.impl.auth.SPNegoSchemeFactory;
083import org.apache.http.impl.client.BasicAuthCache;
084import org.apache.http.impl.client.BasicCredentialsProvider;
085import org.apache.http.impl.client.CloseableHttpClient;
086import org.apache.http.impl.client.HttpClients;
087import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
088import org.apache.http.util.EntityUtils;
089import org.junit.AfterClass;
090import org.junit.BeforeClass;
091import org.junit.ClassRule;
092import org.junit.Test;
093import org.junit.experimental.categories.Category;
094import org.slf4j.Logger;
095import org.slf4j.LoggerFactory;
096
097import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
098import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
099
100/**
101 * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
102 * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o.
103 */
104@Category({ MiscTests.class, MediumTests.class })
105public class TestSecureRESTServer {
106
107  @ClassRule
108  public static final HBaseClassTestRule CLASS_RULE =
109    HBaseClassTestRule.forClass(TestSecureRESTServer.class);
110
111  private static final Logger LOG = LoggerFactory.getLogger(TestSecureRESTServer.class);
112  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
113  private static final HBaseRESTTestingUtility REST_TEST = new HBaseRESTTestingUtility();
114  private static MiniHBaseCluster CLUSTER;
115
116  private static final String HOSTNAME = "localhost";
117  private static final String CLIENT_PRINCIPAL = "client";
118  private static final String WHEEL_PRINCIPAL = "wheel";
119  // The principal for accepting SPNEGO authn'ed requests (*must* be HTTP/fqdn)
120  private static final String SPNEGO_SERVICE_PRINCIPAL = "HTTP/" + HOSTNAME;
121  // The principal we use to connect to HBase
122  private static final String REST_SERVER_PRINCIPAL = "rest";
123  private static final String SERVICE_PRINCIPAL = "hbase/" + HOSTNAME;
124
125  private static URL baseUrl;
126  private static MiniKdc KDC;
127  private static RESTServer server;
128  private static File restServerKeytab;
129  private static File clientKeytab;
130  private static File wheelKeytab;
131  private static File serviceKeytab;
132
133  @BeforeClass
134  public static void setupServer() throws Exception {
135    final File target = new File(System.getProperty("user.dir"), "target");
136    assertTrue(target.exists());
137
138    /*
139     * Keytabs
140     */
141    File keytabDir = new File(target, TestSecureRESTServer.class.getSimpleName() + "_keytabs");
142    if (keytabDir.exists()) {
143      FileUtils.deleteDirectory(keytabDir);
144    }
145    keytabDir.mkdirs();
146    // Keytab for HBase services (RS, Master)
147    serviceKeytab = new File(keytabDir, "hbase.service.keytab");
148    // The keytab for the REST server
149    restServerKeytab = new File(keytabDir, "spnego.keytab");
150    // Keytab for the client
151    clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab");
152    // Keytab for wheel
153    wheelKeytab = new File(keytabDir, WHEEL_PRINCIPAL + ".keytab");
154
155    /*
156     * Update UGI
157     */
158    Configuration conf = TEST_UTIL.getConfiguration();
159
160    /*
161     * Start KDC
162     */
163    KDC = TEST_UTIL.setupMiniKdc(serviceKeytab);
164    KDC.createPrincipal(clientKeytab, CLIENT_PRINCIPAL);
165    KDC.createPrincipal(wheelKeytab, WHEEL_PRINCIPAL);
166    KDC.createPrincipal(serviceKeytab, SERVICE_PRINCIPAL);
167    // REST server's keytab contains keys for both principals REST uses
168    KDC.createPrincipal(restServerKeytab, SPNEGO_SERVICE_PRINCIPAL, REST_SERVER_PRINCIPAL);
169
170    // Set configuration for HBase
171    HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + KDC.getRealm());
172    HBaseKerberosUtils.setKeytabFileForTesting(serviceKeytab.getAbsolutePath());
173    // Why doesn't `setKeytabFileForTesting` do this?
174    conf.set("hbase.master.keytab.file", serviceKeytab.getAbsolutePath());
175    conf.set("hbase.unsafe.regionserver.hostname", "localhost");
176    conf.set("hbase.master.hostname", "localhost");
177    HBaseKerberosUtils.setSecuredConfiguration(conf, SERVICE_PRINCIPAL + "@" + KDC.getRealm(),
178      SPNEGO_SERVICE_PRINCIPAL + "@" + KDC.getRealm());
179    setHdfsSecuredConfiguration(conf);
180    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, TokenProvider.class.getName(),
181      AccessController.class.getName());
182    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
183    conf.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY,
184      AccessController.class.getName());
185    // Enable EXEC permission checking
186    conf.setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, true);
187    conf.set("hbase.superuser", "hbase");
188    conf.set("hadoop.proxyuser.rest.hosts", "*");
189    conf.set("hadoop.proxyuser.rest.users", "*");
190    conf.set("hadoop.proxyuser.wheel.hosts", "*");
191    conf.set("hadoop.proxyuser.wheel.users", "*");
192    UserGroupInformation.setConfiguration(conf);
193
194    updateKerberosConfiguration(conf, REST_SERVER_PRINCIPAL, SPNEGO_SERVICE_PRINCIPAL,
195      restServerKeytab);
196
197    // Start HDFS
198    TEST_UTIL.startMiniCluster(
199      StartMiniClusterOption.builder().numMasters(1).numRegionServers(1).numZkServers(1).build());
200
201    // Start REST
202    UserGroupInformation restUser = UserGroupInformation
203      .loginUserFromKeytabAndReturnUGI(REST_SERVER_PRINCIPAL, restServerKeytab.getAbsolutePath());
204    restUser.doAs(new PrivilegedExceptionAction<Void>() {
205      @Override
206      public Void run() throws Exception {
207        REST_TEST.startServletContainer(conf);
208        return null;
209      }
210    });
211    baseUrl = new URL("http://localhost:" + REST_TEST.getServletPort());
212
213    LOG.info("HTTP server started: " + baseUrl);
214    TEST_UTIL.waitTableAvailable(TableName.valueOf("hbase:acl"));
215
216    // Let the REST server create, read, and write globally
217    UserGroupInformation superuser = UserGroupInformation
218      .loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
219    superuser.doAs(new PrivilegedExceptionAction<Void>() {
220      @Override
221      public Void run() throws Exception {
222        try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
223          AccessControlClient.grant(conn, REST_SERVER_PRINCIPAL, Action.CREATE, Action.READ,
224            Action.WRITE);
225        } catch (Throwable t) {
226          if (t instanceof Exception) {
227            throw (Exception) t;
228          } else {
229            throw new Exception(t);
230          }
231        }
232        return null;
233      }
234    });
235    instertData();
236  }
237
238  @AfterClass
239  public static void stopServer() throws Exception {
240    try {
241      if (null != server) {
242        server.stop();
243      }
244    } catch (Exception e) {
245      LOG.info("Failed to stop info server", e);
246    }
247    try {
248      if (CLUSTER != null) {
249        CLUSTER.shutdown();
250      }
251    } catch (Exception e) {
252      LOG.info("Failed to stop HBase cluster", e);
253    }
254    try {
255      if (null != KDC) {
256        KDC.stop();
257      }
258    } catch (Exception e) {
259      LOG.info("Failed to stop mini KDC", e);
260    }
261  }
262
263  private static void setHdfsSecuredConfiguration(Configuration conf) throws Exception {
264    // Set principal+keytab configuration for HDFS
265    conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY,
266      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
267    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
268    conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY,
269      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
270    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, serviceKeytab.getAbsolutePath());
271    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
272      SPNEGO_SERVICE_PRINCIPAL + "@" + KDC.getRealm());
273    // Enable token access for HDFS blocks
274    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
275    // Only use HTTPS (required because we aren't using "secure" ports)
276    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
277    // Bind on localhost for spnego to have a chance at working
278    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
279    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
280
281    // Generate SSL certs
282    File keystoresDir = new File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
283    keystoresDir.mkdirs();
284    String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureRESTServer.class);
285    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
286
287    // Magic flag to tell hdfs to not fail on using ports above 1024
288    conf.setBoolean("ignore.secure.ports.for.testing", true);
289  }
290
291  private static void updateKerberosConfiguration(Configuration conf, String serverPrincipal,
292    String spnegoPrincipal, File serverKeytab) {
293    KerberosName.setRules("DEFAULT");
294
295    // Enable Kerberos (pre-req)
296    conf.set("hbase.security.authentication", "kerberos");
297    conf.set(RESTServer.REST_AUTHENTICATION_TYPE, "kerberos");
298    // User to talk to HBase as
299    conf.set(RESTServer.REST_KERBEROS_PRINCIPAL, serverPrincipal);
300    // User to accept SPNEGO-auth'd http calls as
301    conf.set("hbase.rest.authentication.kerberos.principal", spnegoPrincipal);
302    // Keytab for both principals above
303    conf.set(RESTServer.REST_KEYTAB_FILE, serverKeytab.getAbsolutePath());
304    conf.set("hbase.rest.authentication.kerberos.keytab", serverKeytab.getAbsolutePath());
305    conf.set(HBASE_REST_SUPPORT_PROXYUSER, "true");
306  }
307
308  private static void instertData() throws IOException, InterruptedException {
309    // Create a table, write a row to it, grant read perms to the client
310    UserGroupInformation superuser = UserGroupInformation
311      .loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
312    final TableName table = TableName.valueOf("publicTable");
313    superuser.doAs(new PrivilegedExceptionAction<Void>() {
314      @Override
315      public Void run() throws Exception {
316        try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
317          TableDescriptor desc = TableDescriptorBuilder.newBuilder(table)
318            .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build();
319          conn.getAdmin().createTable(desc);
320          try (Table t = conn.getTable(table)) {
321            Put p = new Put(Bytes.toBytes("a"));
322            p.addColumn(Bytes.toBytes("f1"), new byte[0], Bytes.toBytes("1"));
323            t.put(p);
324          }
325          AccessControlClient.grant(conn, CLIENT_PRINCIPAL, Action.READ);
326        } catch (Throwable e) {
327          if (e instanceof Exception) {
328            throw (Exception) e;
329          } else {
330            throw new Exception(e);
331          }
332        }
333        return null;
334      }
335    });
336  }
337
338  public void testProxy(String extraArgs, String PRINCIPAL, File keytab, int responseCode)
339    throws Exception {
340    UserGroupInformation superuser = UserGroupInformation
341      .loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
342    final TableName table = TableName.valueOf("publicTable");
343
344    // Read that row as the client
345    Pair<CloseableHttpClient, HttpClientContext> pair = getClient();
346    CloseableHttpClient client = pair.getFirst();
347    HttpClientContext context = pair.getSecond();
348
349    HttpGet get = new HttpGet(new URL("http://localhost:" + REST_TEST.getServletPort()).toURI()
350      + "/" + table + "/a" + extraArgs);
351    get.addHeader("Accept", "application/json");
352    UserGroupInformation user =
353      UserGroupInformation.loginUserFromKeytabAndReturnUGI(PRINCIPAL, keytab.getAbsolutePath());
354    String jsonResponse = user.doAs(new PrivilegedExceptionAction<String>() {
355      @Override
356      public String run() throws Exception {
357        try (CloseableHttpResponse response = client.execute(get, context)) {
358          final int statusCode = response.getStatusLine().getStatusCode();
359          assertEquals(response.getStatusLine().toString(), responseCode, statusCode);
360          HttpEntity entity = response.getEntity();
361          return EntityUtils.toString(entity);
362        }
363      }
364    });
365    if (responseCode == HttpURLConnection.HTTP_OK) {
366      ObjectMapper mapper = new JacksonJaxbJsonProvider().locateMapper(CellSetModel.class,
367        MediaType.APPLICATION_JSON_TYPE);
368      CellSetModel model = mapper.readValue(jsonResponse, CellSetModel.class);
369      assertEquals(1, model.getRows().size());
370      RowModel row = model.getRows().get(0);
371      assertEquals("a", Bytes.toString(row.getKey()));
372      assertEquals(1, row.getCells().size());
373      CellModel cell = row.getCells().get(0);
374      assertEquals("1", Bytes.toString(cell.getValue()));
375    }
376  }
377
378  @Test
379  public void testPositiveAuthorization() throws Exception {
380    testProxy("", CLIENT_PRINCIPAL, clientKeytab, HttpURLConnection.HTTP_OK);
381  }
382
383  @Test
384  public void testDoAs() throws Exception {
385    testProxy("?doAs=" + CLIENT_PRINCIPAL, WHEEL_PRINCIPAL, wheelKeytab, HttpURLConnection.HTTP_OK);
386  }
387
388  @Test
389  public void testDoas() throws Exception {
390    testProxy("?doas=" + CLIENT_PRINCIPAL, WHEEL_PRINCIPAL, wheelKeytab, HttpURLConnection.HTTP_OK);
391  }
392
393  @Test
394  public void testWithoutDoAs() throws Exception {
395    testProxy("", WHEEL_PRINCIPAL, wheelKeytab, HttpURLConnection.HTTP_FORBIDDEN);
396  }
397
398  @Test
399  public void testNegativeAuthorization() throws Exception {
400    Pair<CloseableHttpClient, HttpClientContext> pair = getClient();
401    CloseableHttpClient client = pair.getFirst();
402    HttpClientContext context = pair.getSecond();
403
404    StringEntity entity = new StringEntity(
405      "{\"name\":\"test\", \"ColumnSchema\":[{\"name\":\"f\"}]}", ContentType.APPLICATION_JSON);
406    HttpPut put = new HttpPut("http://localhost:" + REST_TEST.getServletPort() + "/test/schema");
407    put.setEntity(entity);
408
409    UserGroupInformation unprivileged = UserGroupInformation
410      .loginUserFromKeytabAndReturnUGI(CLIENT_PRINCIPAL, clientKeytab.getAbsolutePath());
411    unprivileged.doAs(new PrivilegedExceptionAction<Void>() {
412      @Override
413      public Void run() throws Exception {
414        try (CloseableHttpResponse response = client.execute(put, context)) {
415          final int statusCode = response.getStatusLine().getStatusCode();
416          HttpEntity entity = response.getEntity();
417          assertEquals("Got response: " + EntityUtils.toString(entity),
418            HttpURLConnection.HTTP_FORBIDDEN, statusCode);
419        }
420        return null;
421      }
422    });
423  }
424
425  private Pair<CloseableHttpClient, HttpClientContext> getClient() {
426    HttpClientConnectionManager pool = new PoolingHttpClientConnectionManager();
427    HttpHost host = new HttpHost("localhost", REST_TEST.getServletPort());
428    Registry<AuthSchemeProvider> authRegistry = RegistryBuilder.<AuthSchemeProvider> create()
429      .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true)).build();
430    CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
431    credentialsProvider.setCredentials(AuthScope.ANY, EmptyCredentials.INSTANCE);
432    AuthCache authCache = new BasicAuthCache();
433
434    CloseableHttpClient client = HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry)
435      .setConnectionManager(pool).build();
436
437    HttpClientContext context = HttpClientContext.create();
438    context.setTargetHost(host);
439    context.setCredentialsProvider(credentialsProvider);
440    context.setAuthSchemeRegistry(authRegistry);
441    context.setAuthCache(authCache);
442
443    return new Pair<>(client, context);
444  }
445
446  private static class EmptyCredentials implements Credentials {
447    public static final EmptyCredentials INSTANCE = new EmptyCredentials();
448
449    @Override
450    public String getPassword() {
451      return null;
452    }
453
454    @Override
455    public Principal getUserPrincipal() {
456      return null;
457    }
458  }
459}