001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.security.provider.example; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertFalse; 022import static org.junit.Assert.assertNotNull; 023import static org.junit.Assert.assertTrue; 024import static org.junit.Assert.fail; 025 026import java.io.BufferedWriter; 027import java.io.File; 028import java.io.IOException; 029import java.io.OutputStreamWriter; 030import java.nio.charset.StandardCharsets; 031import java.security.PrivilegedExceptionAction; 032import java.util.ArrayList; 033import java.util.Collections; 034import java.util.List; 035import java.util.Map; 036import java.util.Map.Entry; 037import org.apache.hadoop.conf.Configuration; 038import org.apache.hadoop.fs.FSDataOutputStream; 039import org.apache.hadoop.fs.FileSystem; 040import org.apache.hadoop.fs.Path; 041import org.apache.hadoop.hbase.Cell; 042import org.apache.hadoop.hbase.CellUtil; 043import org.apache.hadoop.hbase.HBaseClassTestRule; 044import org.apache.hadoop.hbase.HBaseTestingUtil; 045import org.apache.hadoop.hbase.HConstants; 046import org.apache.hadoop.hbase.LocalHBaseCluster; 047import org.apache.hadoop.hbase.TableName; 048import org.apache.hadoop.hbase.client.Admin; 049import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 050import org.apache.hadoop.hbase.client.Connection; 051import org.apache.hadoop.hbase.client.ConnectionFactory; 052import org.apache.hadoop.hbase.client.Get; 053import org.apache.hadoop.hbase.client.MasterRegistry; 054import org.apache.hadoop.hbase.client.Put; 055import org.apache.hadoop.hbase.client.Result; 056import org.apache.hadoop.hbase.client.RetriesExhaustedException; 057import org.apache.hadoop.hbase.client.Table; 058import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 059import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; 060import org.apache.hadoop.hbase.exceptions.MasterRegistryFetchException; 061import org.apache.hadoop.hbase.security.HBaseKerberosUtils; 062import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders; 063import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders; 064import org.apache.hadoop.hbase.security.token.TokenProvider; 065import org.apache.hadoop.hbase.testclassification.MediumTests; 066import org.apache.hadoop.hbase.testclassification.SecurityTests; 067import org.apache.hadoop.hbase.util.Bytes; 068import org.apache.hadoop.hbase.util.CommonFSUtils; 069import org.apache.hadoop.hbase.util.Pair; 070import org.apache.hadoop.minikdc.MiniKdc; 071import org.apache.hadoop.security.UserGroupInformation; 072import org.junit.AfterClass; 073import org.junit.Before; 074import org.junit.BeforeClass; 075import org.junit.ClassRule; 076import org.junit.Rule; 077import org.junit.Test; 078import org.junit.experimental.categories.Category; 079import org.junit.rules.TestName; 080import org.slf4j.Logger; 081import org.slf4j.LoggerFactory; 082 083@Category({ MediumTests.class, SecurityTests.class }) 084public class TestShadeSaslAuthenticationProvider { 085 private static final Logger LOG = 086 LoggerFactory.getLogger(TestShadeSaslAuthenticationProvider.class); 087 088 @ClassRule 089 public static final HBaseClassTestRule CLASS_RULE = 090 HBaseClassTestRule.forClass(TestShadeSaslAuthenticationProvider.class); 091 092 private static final char[] USER1_PASSWORD = "foobarbaz".toCharArray(); 093 094 static LocalHBaseCluster createCluster(HBaseTestingUtil util, File keytabFile, MiniKdc kdc, 095 Map<String, char[]> userDatabase) throws Exception { 096 String servicePrincipal = "hbase/localhost"; 097 String spnegoPrincipal = "HTTP/localhost"; 098 kdc.createPrincipal(keytabFile, servicePrincipal); 099 util.startMiniZKCluster(); 100 101 HBaseKerberosUtils.setSecuredConfiguration(util.getConfiguration(), 102 servicePrincipal + "@" + kdc.getRealm(), spnegoPrincipal + "@" + kdc.getRealm()); 103 HBaseKerberosUtils.setSSLConfiguration(util, TestShadeSaslAuthenticationProvider.class); 104 105 util.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, 106 TokenProvider.class.getName()); 107 util.startMiniDFSCluster(1); 108 Path testDir = util.getDataTestDirOnTestFS("TestShadeSaslAuthenticationProvider"); 109 USER_DATABASE_FILE = new Path(testDir, "user-db.txt"); 110 111 createUserDBFile(USER_DATABASE_FILE.getFileSystem(CONF), USER_DATABASE_FILE, userDatabase); 112 CONF.set(ShadeSaslServerAuthenticationProvider.PASSWORD_FILE_KEY, 113 USER_DATABASE_FILE.toString()); 114 115 Path rootdir = new Path(testDir, "hbase-root"); 116 CommonFSUtils.setRootDir(CONF, rootdir); 117 LocalHBaseCluster cluster = new LocalHBaseCluster(CONF, 1); 118 return cluster; 119 } 120 121 static void createUserDBFile(FileSystem fs, Path p, Map<String, char[]> userDatabase) 122 throws IOException { 123 if (fs.exists(p)) { 124 fs.delete(p, true); 125 } 126 try (FSDataOutputStream out = fs.create(p); BufferedWriter writer = 127 new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8))) { 128 for (Entry<String, char[]> e : userDatabase.entrySet()) { 129 writer.write(e.getKey()); 130 writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR); 131 writer.write(e.getValue()); 132 writer.newLine(); 133 } 134 } 135 } 136 137 private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); 138 private static final Configuration CONF = UTIL.getConfiguration(); 139 private static LocalHBaseCluster CLUSTER; 140 private static File KEYTAB_FILE; 141 private static Path USER_DATABASE_FILE; 142 143 @BeforeClass 144 public static void setupCluster() throws Exception { 145 KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath()); 146 final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE); 147 148 // Adds our test impls instead of creating service loader entries which 149 // might inadvertently get them loaded on a real cluster. 150 CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY, 151 ShadeSaslClientAuthenticationProvider.class.getName()); 152 CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY, 153 ShadeSaslServerAuthenticationProvider.class.getName()); 154 CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY, ShadeProviderSelector.class.getName()); 155 156 CLUSTER = 157 createCluster(UTIL, KEYTAB_FILE, kdc, Collections.singletonMap("user1", USER1_PASSWORD)); 158 CLUSTER.startup(); 159 } 160 161 @AfterClass 162 public static void teardownCluster() throws Exception { 163 if (CLUSTER != null) { 164 CLUSTER.shutdown(); 165 CLUSTER = null; 166 } 167 UTIL.shutdownMiniZKCluster(); 168 } 169 170 @Rule 171 public TestName name = new TestName(); 172 TableName tableName; 173 String clusterId; 174 175 @Before 176 public void createTable() throws Exception { 177 tableName = TableName.valueOf(name.getMethodName()); 178 179 // Create a table and write a record as the service user (hbase) 180 UserGroupInformation serviceUgi = UserGroupInformation 181 .loginUserFromKeytabAndReturnUGI("hbase/localhost", KEYTAB_FILE.getAbsolutePath()); 182 clusterId = serviceUgi.doAs(new PrivilegedExceptionAction<String>() { 183 @Override 184 public String run() throws Exception { 185 try (Connection conn = ConnectionFactory.createConnection(CONF); 186 Admin admin = conn.getAdmin();) { 187 admin.createTable(TableDescriptorBuilder.newBuilder(tableName) 188 .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build()); 189 190 UTIL.waitTableAvailable(tableName); 191 192 try (Table t = conn.getTable(tableName)) { 193 Put p = new Put(Bytes.toBytes("r1")); 194 p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("q1"), Bytes.toBytes("1")); 195 t.put(p); 196 } 197 198 return admin.getClusterMetrics().getClusterId(); 199 } 200 } 201 }); 202 203 assertNotNull(clusterId); 204 } 205 206 @Test 207 public void testPositiveAuthentication() throws Exception { 208 final Configuration clientConf = new Configuration(CONF); 209 try (Connection conn1 = ConnectionFactory.createConnection(clientConf)) { 210 UserGroupInformation user1 = 211 UserGroupInformation.createUserForTesting("user1", new String[0]); 212 user1.addToken(ShadeClientTokenUtil.obtainToken(conn1, "user1", USER1_PASSWORD)); 213 user1.doAs(new PrivilegedExceptionAction<Void>() { 214 @Override 215 public Void run() throws Exception { 216 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 217 try (Table t = conn.getTable(tableName)) { 218 Result r = t.get(new Get(Bytes.toBytes("r1"))); 219 assertNotNull(r); 220 assertFalse("Should have read a non-empty Result", r.isEmpty()); 221 final Cell cell = r.getColumnLatestCell(Bytes.toBytes("f1"), Bytes.toBytes("q1")); 222 assertTrue("Unexpected value", CellUtil.matchingValue(cell, Bytes.toBytes("1"))); 223 224 return null; 225 } 226 } 227 } 228 }); 229 } 230 } 231 232 @Test 233 public void testNegativeAuthentication() throws Exception { 234 List<Pair<String, Class<? extends Exception>>> params = new ArrayList<>(); 235 // Master-based connection will fail to ask the master its cluster ID 236 // as a part of creating the Connection. 237 params.add(new Pair<String, Class<? extends Exception>>(MasterRegistry.class.getName(), 238 MasterRegistryFetchException.class)); 239 // ZK based connection will fail on the master RPC 240 params.add(new Pair<String, Class<? extends Exception>>( 241 // ZKConnectionRegistry is package-private 242 HConstants.ZK_CONNECTION_REGISTRY_CLASS, RetriesExhaustedException.class)); 243 244 params.forEach((pair) -> { 245 LOG.info("Running negative authentication test for client registry {}, expecting {}", 246 pair.getFirst(), pair.getSecond().getName()); 247 // Validate that we can read that record back out as the user with our custom auth'n 248 final Configuration clientConf = new Configuration(CONF); 249 clientConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3); 250 clientConf.set(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, pair.getFirst()); 251 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 252 UserGroupInformation user1 = 253 UserGroupInformation.createUserForTesting("user1", new String[0]); 254 user1.addToken( 255 ShadeClientTokenUtil.obtainToken(conn, "user1", "not a real password".toCharArray())); 256 257 LOG.info("Executing request to HBase Master which should fail"); 258 user1.doAs(new PrivilegedExceptionAction<Void>() { 259 @Override 260 public Void run() throws Exception { 261 try (Connection conn = ConnectionFactory.createConnection(clientConf);) { 262 conn.getAdmin().listTableDescriptors(); 263 fail("Should not successfully authenticate with HBase"); 264 } catch (Exception e) { 265 LOG.info("Caught exception in negative Master connectivity test", e); 266 assertEquals("Found unexpected exception", pair.getSecond(), e.getClass()); 267 } 268 return null; 269 } 270 }); 271 272 LOG.info("Executing request to HBase RegionServer which should fail"); 273 user1.doAs(new PrivilegedExceptionAction<Void>() { 274 @Override 275 public Void run() throws Exception { 276 // A little contrived because, with MasterRegistry, we'll still fail on talking 277 // to the HBase master before trying to talk to a RegionServer. 278 try (Connection conn = ConnectionFactory.createConnection(clientConf); 279 Table t = conn.getTable(tableName)) { 280 t.get(new Get(Bytes.toBytes("r1"))); 281 fail("Should not successfully authenticate with HBase"); 282 } catch (Exception e) { 283 LOG.info("Caught exception in negative RegionServer connectivity test", e); 284 assertEquals("Found unexpected exception", pair.getSecond(), e.getClass()); 285 } 286 return null; 287 } 288 }); 289 } catch (InterruptedException e) { 290 LOG.error("Caught interrupted exception", e); 291 Thread.currentThread().interrupt(); 292 return; 293 } catch (IOException e) { 294 throw new RuntimeException(e); 295 } 296 }); 297 } 298}