001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.security.provider.example; 019 020import static org.junit.jupiter.api.Assertions.assertEquals; 021import static org.junit.jupiter.api.Assertions.assertFalse; 022import static org.junit.jupiter.api.Assertions.assertNotNull; 023import static org.junit.jupiter.api.Assertions.assertTrue; 024import static org.junit.jupiter.api.Assertions.fail; 025 026import java.io.BufferedWriter; 027import java.io.File; 028import java.io.IOException; 029import java.io.OutputStreamWriter; 030import java.nio.charset.StandardCharsets; 031import java.security.PrivilegedExceptionAction; 032import java.util.ArrayList; 033import java.util.Collections; 034import java.util.List; 035import java.util.Map; 036import java.util.Map.Entry; 037import org.apache.hadoop.conf.Configuration; 038import org.apache.hadoop.fs.FSDataOutputStream; 039import org.apache.hadoop.fs.FileSystem; 040import org.apache.hadoop.fs.Path; 041import org.apache.hadoop.hbase.Cell; 042import org.apache.hadoop.hbase.CellUtil; 043import org.apache.hadoop.hbase.HBaseTestingUtil; 044import org.apache.hadoop.hbase.HConstants; 045import org.apache.hadoop.hbase.LocalHBaseCluster; 046import org.apache.hadoop.hbase.TableName; 047import org.apache.hadoop.hbase.client.Admin; 048import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 049import org.apache.hadoop.hbase.client.Connection; 050import org.apache.hadoop.hbase.client.ConnectionFactory; 051import org.apache.hadoop.hbase.client.Get; 052import org.apache.hadoop.hbase.client.MasterRegistry; 053import org.apache.hadoop.hbase.client.Put; 054import org.apache.hadoop.hbase.client.Result; 055import org.apache.hadoop.hbase.client.RetriesExhaustedException; 056import org.apache.hadoop.hbase.client.Table; 057import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 058import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; 059import org.apache.hadoop.hbase.exceptions.MasterRegistryFetchException; 060import org.apache.hadoop.hbase.security.HBaseKerberosUtils; 061import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders; 062import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders; 063import org.apache.hadoop.hbase.security.token.TokenProvider; 064import org.apache.hadoop.hbase.testclassification.MediumTests; 065import org.apache.hadoop.hbase.testclassification.SecurityTests; 066import org.apache.hadoop.hbase.util.Bytes; 067import org.apache.hadoop.hbase.util.CommonFSUtils; 068import org.apache.hadoop.hbase.util.Pair; 069import org.apache.hadoop.minikdc.MiniKdc; 070import org.apache.hadoop.security.UserGroupInformation; 071import org.junit.jupiter.api.AfterAll; 072import org.junit.jupiter.api.BeforeAll; 073import org.junit.jupiter.api.BeforeEach; 074import org.junit.jupiter.api.Tag; 075import org.junit.jupiter.api.Test; 076import org.junit.jupiter.api.TestInfo; 077import org.slf4j.Logger; 078import org.slf4j.LoggerFactory; 079 080@Tag(MediumTests.TAG) 081@Tag(SecurityTests.TAG) 082public class TestShadeSaslAuthenticationProvider { 083 084 private static final Logger LOG = 085 LoggerFactory.getLogger(TestShadeSaslAuthenticationProvider.class); 086 087 private static final char[] USER1_PASSWORD = "foobarbaz".toCharArray(); 088 089 static LocalHBaseCluster createCluster(HBaseTestingUtil util, File keytabFile, MiniKdc kdc, 090 Map<String, char[]> userDatabase) throws Exception { 091 String servicePrincipal = "hbase/localhost"; 092 String spnegoPrincipal = "HTTP/localhost"; 093 kdc.createPrincipal(keytabFile, servicePrincipal); 094 util.startMiniZKCluster(); 095 096 HBaseKerberosUtils.setSecuredConfiguration(util.getConfiguration(), 097 servicePrincipal + "@" + kdc.getRealm(), spnegoPrincipal + "@" + kdc.getRealm()); 098 HBaseKerberosUtils.setSSLConfiguration(util, TestShadeSaslAuthenticationProvider.class); 099 100 util.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, 101 TokenProvider.class.getName()); 102 util.startMiniDFSCluster(1); 103 Path testDir = util.getDataTestDirOnTestFS("TestShadeSaslAuthenticationProvider"); 104 USER_DATABASE_FILE = new Path(testDir, "user-db.txt"); 105 106 createUserDBFile(USER_DATABASE_FILE.getFileSystem(CONF), USER_DATABASE_FILE, userDatabase); 107 CONF.set(ShadeSaslServerAuthenticationProvider.PASSWORD_FILE_KEY, 108 USER_DATABASE_FILE.toString()); 109 110 Path rootdir = new Path(testDir, "hbase-root"); 111 CommonFSUtils.setRootDir(CONF, rootdir); 112 LocalHBaseCluster cluster = new LocalHBaseCluster(CONF, 1); 113 return cluster; 114 } 115 116 static void createUserDBFile(FileSystem fs, Path p, Map<String, char[]> userDatabase) 117 throws IOException { 118 if (fs.exists(p)) { 119 fs.delete(p, true); 120 } 121 try (FSDataOutputStream out = fs.create(p); BufferedWriter writer = 122 new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8))) { 123 for (Entry<String, char[]> e : userDatabase.entrySet()) { 124 writer.write(e.getKey()); 125 writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR); 126 writer.write(e.getValue()); 127 writer.newLine(); 128 } 129 } 130 } 131 132 private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); 133 private static final Configuration CONF = UTIL.getConfiguration(); 134 private static LocalHBaseCluster CLUSTER; 135 private static File KEYTAB_FILE; 136 private static Path USER_DATABASE_FILE; 137 138 @BeforeAll 139 public static void setupCluster() throws Exception { 140 KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath()); 141 final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE); 142 143 // Adds our test impls instead of creating service loader entries which 144 // might inadvertently get them loaded on a real cluster. 145 CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY, 146 ShadeSaslClientAuthenticationProvider.class.getName()); 147 CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY, 148 ShadeSaslServerAuthenticationProvider.class.getName()); 149 CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY, ShadeProviderSelector.class.getName()); 150 151 CLUSTER = 152 createCluster(UTIL, KEYTAB_FILE, kdc, Collections.singletonMap("user1", USER1_PASSWORD)); 153 CLUSTER.startup(); 154 } 155 156 @AfterAll 157 public static void teardownCluster() throws Exception { 158 if (CLUSTER != null) { 159 CLUSTER.shutdown(); 160 CLUSTER = null; 161 } 162 UTIL.shutdownMiniZKCluster(); 163 } 164 165 TableName tableName; 166 String clusterId; 167 168 @BeforeEach 169 public void createTable(TestInfo testInfo) throws Exception { 170 tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); 171 172 // Create a table and write a record as the service user (hbase) 173 UserGroupInformation serviceUgi = UserGroupInformation 174 .loginUserFromKeytabAndReturnUGI("hbase/localhost", KEYTAB_FILE.getAbsolutePath()); 175 clusterId = serviceUgi.doAs(new PrivilegedExceptionAction<String>() { 176 @Override 177 public String run() throws Exception { 178 try (Connection conn = ConnectionFactory.createConnection(CONF); 179 Admin admin = conn.getAdmin();) { 180 admin.createTable(TableDescriptorBuilder.newBuilder(tableName) 181 .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build()); 182 183 UTIL.waitTableAvailable(tableName); 184 185 try (Table t = conn.getTable(tableName)) { 186 Put p = new Put(Bytes.toBytes("r1")); 187 p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("q1"), Bytes.toBytes("1")); 188 t.put(p); 189 } 190 191 return admin.getClusterMetrics().getClusterId(); 192 } 193 } 194 }); 195 196 assertNotNull(clusterId); 197 } 198 199 @Test 200 public void testPositiveAuthentication() throws Exception { 201 final Configuration clientConf = new Configuration(CONF); 202 try (Connection conn1 = ConnectionFactory.createConnection(clientConf)) { 203 UserGroupInformation user1 = 204 UserGroupInformation.createUserForTesting("user1", new String[0]); 205 user1.addToken(ShadeClientTokenUtil.obtainToken(conn1, "user1", USER1_PASSWORD)); 206 user1.doAs(new PrivilegedExceptionAction<Void>() { 207 @Override 208 public Void run() throws Exception { 209 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 210 try (Table t = conn.getTable(tableName)) { 211 Result r = t.get(new Get(Bytes.toBytes("r1"))); 212 assertNotNull(r); 213 assertFalse(r.isEmpty(), "Should have read a non-empty Result"); 214 final Cell cell = r.getColumnLatestCell(Bytes.toBytes("f1"), Bytes.toBytes("q1")); 215 assertTrue(CellUtil.matchingValue(cell, Bytes.toBytes("1")), "Unexpected value"); 216 217 return null; 218 } 219 } 220 } 221 }); 222 } 223 } 224 225 @Test 226 public void testNegativeAuthentication() throws Exception { 227 List<Pair<String, Class<? extends Exception>>> params = new ArrayList<>(); 228 // Master-based connection will fail to ask the master its cluster ID 229 // as a part of creating the Connection. 230 params.add(new Pair<String, Class<? extends Exception>>(MasterRegistry.class.getName(), 231 MasterRegistryFetchException.class)); 232 // ZK based connection will fail on the master RPC 233 params.add(new Pair<String, Class<? extends Exception>>( 234 // ZKConnectionRegistry is package-private 235 HConstants.ZK_CONNECTION_REGISTRY_CLASS, RetriesExhaustedException.class)); 236 237 params.forEach((pair) -> { 238 LOG.info("Running negative authentication test for client registry {}, expecting {}", 239 pair.getFirst(), pair.getSecond().getName()); 240 // Validate that we can read that record back out as the user with our custom auth'n 241 final Configuration clientConf = new Configuration(CONF); 242 clientConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3); 243 clientConf.set(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, pair.getFirst()); 244 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 245 UserGroupInformation user1 = 246 UserGroupInformation.createUserForTesting("user1", new String[0]); 247 user1.addToken( 248 ShadeClientTokenUtil.obtainToken(conn, "user1", "not a real password".toCharArray())); 249 250 LOG.info("Executing request to HBase Master which should fail"); 251 user1.doAs(new PrivilegedExceptionAction<Void>() { 252 @Override 253 public Void run() throws Exception { 254 try (Connection conn = ConnectionFactory.createConnection(clientConf);) { 255 conn.getAdmin().listTableDescriptors(); 256 fail("Should not successfully authenticate with HBase"); 257 } catch (Exception e) { 258 LOG.info("Caught exception in negative Master connectivity test", e); 259 assertEquals(pair.getSecond(), e.getClass(), "Found unexpected exception"); 260 } 261 return null; 262 } 263 }); 264 265 LOG.info("Executing request to HBase RegionServer which should fail"); 266 user1.doAs(new PrivilegedExceptionAction<Void>() { 267 @Override 268 public Void run() throws Exception { 269 // A little contrived because, with MasterRegistry, we'll still fail on talking 270 // to the HBase master before trying to talk to a RegionServer. 271 try (Connection conn = ConnectionFactory.createConnection(clientConf); 272 Table t = conn.getTable(tableName)) { 273 t.get(new Get(Bytes.toBytes("r1"))); 274 fail("Should not successfully authenticate with HBase"); 275 } catch (Exception e) { 276 LOG.info("Caught exception in negative RegionServer connectivity test", e); 277 assertEquals(pair.getSecond(), e.getClass(), "Found unexpected exception"); 278 } 279 return null; 280 } 281 }); 282 } catch (InterruptedException e) { 283 LOG.error("Caught interrupted exception", e); 284 Thread.currentThread().interrupt(); 285 return; 286 } catch (IOException e) { 287 throw new RuntimeException(e); 288 } 289 }); 290 } 291}