001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.security.provider.example; 019 020import static org.junit.Assert.assertFalse; 021import static org.junit.Assert.assertNotNull; 022import static org.junit.Assert.assertTrue; 023import static org.junit.Assert.fail; 024 025import java.io.BufferedWriter; 026import java.io.File; 027import java.io.IOException; 028import java.io.OutputStreamWriter; 029import java.nio.charset.StandardCharsets; 030import java.security.PrivilegedExceptionAction; 031import java.util.Collections; 032import java.util.Map; 033import java.util.Map.Entry; 034import org.apache.hadoop.conf.Configuration; 035import org.apache.hadoop.fs.FSDataOutputStream; 036import org.apache.hadoop.fs.FileSystem; 037import org.apache.hadoop.fs.Path; 038import org.apache.hadoop.hbase.Cell; 039import org.apache.hadoop.hbase.CellUtil; 040import org.apache.hadoop.hbase.DoNotRetryIOException; 041import org.apache.hadoop.hbase.HBaseClassTestRule; 042import org.apache.hadoop.hbase.HBaseTestingUtility; 043import org.apache.hadoop.hbase.HConstants; 044import org.apache.hadoop.hbase.LocalHBaseCluster; 045import org.apache.hadoop.hbase.TableName; 046import org.apache.hadoop.hbase.client.Admin; 047import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 048import org.apache.hadoop.hbase.client.Connection; 049import org.apache.hadoop.hbase.client.ConnectionFactory; 050import org.apache.hadoop.hbase.client.Get; 051import org.apache.hadoop.hbase.client.Put; 052import org.apache.hadoop.hbase.client.Result; 053import org.apache.hadoop.hbase.client.Table; 054import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 055import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; 056import org.apache.hadoop.hbase.security.HBaseKerberosUtils; 057import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders; 058import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders; 059import org.apache.hadoop.hbase.security.token.TokenProvider; 060import org.apache.hadoop.hbase.testclassification.MediumTests; 061import org.apache.hadoop.hbase.testclassification.SecurityTests; 062import org.apache.hadoop.hbase.util.Bytes; 063import org.apache.hadoop.hbase.util.CommonFSUtils; 064import org.apache.hadoop.minikdc.MiniKdc; 065import org.apache.hadoop.security.UserGroupInformation; 066import org.junit.AfterClass; 067import org.junit.Before; 068import org.junit.BeforeClass; 069import org.junit.ClassRule; 070import org.junit.Rule; 071import org.junit.Test; 072import org.junit.experimental.categories.Category; 073import org.junit.rules.TestName; 074 075@Category({ MediumTests.class, SecurityTests.class }) 076public class TestShadeSaslAuthenticationProvider { 077 078 @ClassRule 079 public static final HBaseClassTestRule CLASS_RULE = 080 HBaseClassTestRule.forClass(TestShadeSaslAuthenticationProvider.class); 081 082 private static final char[] USER1_PASSWORD = "foobarbaz".toCharArray(); 083 084 static LocalHBaseCluster createCluster(HBaseTestingUtility util, File keytabFile, MiniKdc kdc, 085 Map<String, char[]> userDatabase) throws Exception { 086 String servicePrincipal = "hbase/localhost"; 087 String spnegoPrincipal = "HTTP/localhost"; 088 kdc.createPrincipal(keytabFile, servicePrincipal); 089 util.startMiniZKCluster(); 090 091 HBaseKerberosUtils.setSecuredConfiguration(util.getConfiguration(), 092 servicePrincipal + "@" + kdc.getRealm(), spnegoPrincipal + "@" + kdc.getRealm()); 093 HBaseKerberosUtils.setSSLConfiguration(util, TestShadeSaslAuthenticationProvider.class); 094 095 util.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, 096 TokenProvider.class.getName()); 097 util.startMiniDFSCluster(1); 098 Path testDir = util.getDataTestDirOnTestFS("TestShadeSaslAuthenticationProvider"); 099 USER_DATABASE_FILE = new Path(testDir, "user-db.txt"); 100 101 createUserDBFile(USER_DATABASE_FILE.getFileSystem(CONF), USER_DATABASE_FILE, userDatabase); 102 CONF.set(ShadeSaslServerAuthenticationProvider.PASSWORD_FILE_KEY, 103 USER_DATABASE_FILE.toString()); 104 105 Path rootdir = new Path(testDir, "hbase-root"); 106 CommonFSUtils.setRootDir(CONF, rootdir); 107 LocalHBaseCluster cluster = new LocalHBaseCluster(CONF, 1); 108 return cluster; 109 } 110 111 static void createUserDBFile(FileSystem fs, Path p, Map<String, char[]> userDatabase) 112 throws IOException { 113 if (fs.exists(p)) { 114 fs.delete(p, true); 115 } 116 try (FSDataOutputStream out = fs.create(p); BufferedWriter writer = 117 new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8))) { 118 for (Entry<String, char[]> e : userDatabase.entrySet()) { 119 writer.write(e.getKey()); 120 writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR); 121 writer.write(e.getValue()); 122 writer.newLine(); 123 } 124 } 125 } 126 127 private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); 128 private static final Configuration CONF = UTIL.getConfiguration(); 129 private static LocalHBaseCluster CLUSTER; 130 private static File KEYTAB_FILE; 131 private static Path USER_DATABASE_FILE; 132 133 @BeforeClass 134 public static void setupCluster() throws Exception { 135 KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath()); 136 final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE); 137 138 // Adds our test impls instead of creating service loader entries which 139 // might inadvertently get them loaded on a real cluster. 140 CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY, 141 ShadeSaslClientAuthenticationProvider.class.getName()); 142 CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY, 143 ShadeSaslServerAuthenticationProvider.class.getName()); 144 CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY, ShadeProviderSelector.class.getName()); 145 146 CLUSTER = 147 createCluster(UTIL, KEYTAB_FILE, kdc, Collections.singletonMap("user1", USER1_PASSWORD)); 148 CLUSTER.startup(); 149 } 150 151 @AfterClass 152 public static void teardownCluster() throws Exception { 153 if (CLUSTER != null) { 154 CLUSTER.shutdown(); 155 CLUSTER = null; 156 } 157 UTIL.shutdownMiniZKCluster(); 158 } 159 160 @Rule 161 public TestName name = new TestName(); 162 TableName tableName; 163 String clusterId; 164 165 @Before 166 public void createTable() throws Exception { 167 tableName = TableName.valueOf(name.getMethodName()); 168 169 // Create a table and write a record as the service user (hbase) 170 UserGroupInformation serviceUgi = UserGroupInformation 171 .loginUserFromKeytabAndReturnUGI("hbase/localhost", KEYTAB_FILE.getAbsolutePath()); 172 clusterId = serviceUgi.doAs(new PrivilegedExceptionAction<String>() { 173 @Override 174 public String run() throws Exception { 175 try (Connection conn = ConnectionFactory.createConnection(CONF); 176 Admin admin = conn.getAdmin();) { 177 admin.createTable(TableDescriptorBuilder.newBuilder(tableName) 178 .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build()); 179 180 UTIL.waitTableAvailable(tableName); 181 182 try (Table t = conn.getTable(tableName)) { 183 Put p = new Put(Bytes.toBytes("r1")); 184 p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("q1"), Bytes.toBytes("1")); 185 t.put(p); 186 } 187 188 return admin.getClusterMetrics().getClusterId(); 189 } 190 } 191 }); 192 193 assertNotNull(clusterId); 194 } 195 196 @Test 197 public void testPositiveAuthentication() throws Exception { 198 final Configuration clientConf = new Configuration(CONF); 199 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 200 UserGroupInformation user1 = 201 UserGroupInformation.createUserForTesting("user1", new String[0]); 202 user1.addToken(ShadeClientTokenUtil.obtainToken(conn, "user1", USER1_PASSWORD)); 203 user1.doAs(new PrivilegedExceptionAction<Void>() { 204 @Override 205 public Void run() throws Exception { 206 try (Table t = conn.getTable(tableName)) { 207 Result r = t.get(new Get(Bytes.toBytes("r1"))); 208 assertNotNull(r); 209 assertFalse("Should have read a non-empty Result", r.isEmpty()); 210 final Cell cell = r.getColumnLatestCell(Bytes.toBytes("f1"), Bytes.toBytes("q1")); 211 assertTrue("Unexpected value", CellUtil.matchingValue(cell, Bytes.toBytes("1"))); 212 213 return null; 214 } 215 } 216 }); 217 } 218 } 219 220 @Test(expected = DoNotRetryIOException.class) 221 public void testNegativeAuthentication() throws Exception { 222 // Validate that we can read that record back out as the user with our custom auth'n 223 final Configuration clientConf = new Configuration(CONF); 224 clientConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3); 225 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 226 UserGroupInformation user1 = 227 UserGroupInformation.createUserForTesting("user1", new String[0]); 228 user1.addToken( 229 ShadeClientTokenUtil.obtainToken(conn, "user1", "not a real password".toCharArray())); 230 user1.doAs(new PrivilegedExceptionAction<Void>() { 231 @Override 232 public Void run() throws Exception { 233 try (Connection conn = ConnectionFactory.createConnection(clientConf); 234 Table t = conn.getTable(tableName)) { 235 t.get(new Get(Bytes.toBytes("r1"))); 236 fail("Should not successfully authenticate with HBase"); 237 return null; 238 } 239 } 240 }); 241 } 242 } 243}