001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.security.provider.example; 019 020import static org.junit.Assert.assertFalse; 021import static org.junit.Assert.assertNotNull; 022import static org.junit.Assert.assertTrue; 023import static org.junit.Assert.fail; 024 025import java.io.BufferedWriter; 026import java.io.File; 027import java.io.IOException; 028import java.io.OutputStreamWriter; 029import java.security.PrivilegedExceptionAction; 030import java.util.Collections; 031import java.util.Map; 032import java.util.Map.Entry; 033import org.apache.hadoop.conf.Configuration; 034import org.apache.hadoop.fs.FSDataOutputStream; 035import org.apache.hadoop.fs.FileSystem; 036import org.apache.hadoop.fs.Path; 037import org.apache.hadoop.hbase.Cell; 038import org.apache.hadoop.hbase.CellUtil; 039import org.apache.hadoop.hbase.DoNotRetryIOException; 040import org.apache.hadoop.hbase.HBaseClassTestRule; 041import org.apache.hadoop.hbase.HBaseTestingUtility; 042import org.apache.hadoop.hbase.HConstants; 043import org.apache.hadoop.hbase.LocalHBaseCluster; 044import org.apache.hadoop.hbase.TableName; 045import org.apache.hadoop.hbase.client.Admin; 046import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 047import org.apache.hadoop.hbase.client.Connection; 048import org.apache.hadoop.hbase.client.ConnectionFactory; 049import org.apache.hadoop.hbase.client.Get; 050import org.apache.hadoop.hbase.client.Put; 051import org.apache.hadoop.hbase.client.Result; 052import org.apache.hadoop.hbase.client.Table; 053import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 054import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; 055import org.apache.hadoop.hbase.security.HBaseKerberosUtils; 056import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders; 057import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders; 058import org.apache.hadoop.hbase.security.token.TokenProvider; 059import org.apache.hadoop.hbase.testclassification.MediumTests; 060import org.apache.hadoop.hbase.testclassification.SecurityTests; 061import org.apache.hadoop.hbase.util.Bytes; 062import org.apache.hadoop.hbase.util.CommonFSUtils; 063import org.apache.hadoop.minikdc.MiniKdc; 064import org.apache.hadoop.security.UserGroupInformation; 065import org.junit.AfterClass; 066import org.junit.Before; 067import org.junit.BeforeClass; 068import org.junit.ClassRule; 069import org.junit.Rule; 070import org.junit.Test; 071import org.junit.experimental.categories.Category; 072import org.junit.rules.TestName; 073 074@Category({MediumTests.class, SecurityTests.class}) 075public class TestShadeSaslAuthenticationProvider { 076 077 @ClassRule 078 public static final HBaseClassTestRule CLASS_RULE = 079 HBaseClassTestRule.forClass(TestShadeSaslAuthenticationProvider.class); 080 081 private static final char[] USER1_PASSWORD = "foobarbaz".toCharArray(); 082 083 static LocalHBaseCluster createCluster(HBaseTestingUtility util, File keytabFile, 084 MiniKdc kdc, Map<String,char[]> userDatabase) throws Exception { 085 String servicePrincipal = "hbase/localhost"; 086 String spnegoPrincipal = "HTTP/localhost"; 087 kdc.createPrincipal(keytabFile, servicePrincipal); 088 util.startMiniZKCluster(); 089 090 HBaseKerberosUtils.setSecuredConfiguration(util.getConfiguration(), 091 servicePrincipal + "@" + kdc.getRealm(), spnegoPrincipal + "@" + kdc.getRealm()); 092 HBaseKerberosUtils.setSSLConfiguration(util, TestShadeSaslAuthenticationProvider.class); 093 094 util.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, 095 TokenProvider.class.getName()); 096 util.startMiniDFSCluster(1); 097 Path testDir = util.getDataTestDirOnTestFS("TestShadeSaslAuthenticationProvider"); 098 USER_DATABASE_FILE = new Path(testDir, "user-db.txt"); 099 100 createUserDBFile( 101 USER_DATABASE_FILE.getFileSystem(CONF), USER_DATABASE_FILE, userDatabase); 102 CONF.set(ShadeSaslServerAuthenticationProvider.PASSWORD_FILE_KEY, 103 USER_DATABASE_FILE.toString()); 104 105 Path rootdir = new Path(testDir, "hbase-root"); 106 CommonFSUtils.setRootDir(CONF, rootdir); 107 LocalHBaseCluster cluster = new LocalHBaseCluster(CONF, 1); 108 return cluster; 109 } 110 111 static void createUserDBFile(FileSystem fs, Path p, 112 Map<String,char[]> userDatabase) throws IOException { 113 if (fs.exists(p)) { 114 fs.delete(p, true); 115 } 116 try (FSDataOutputStream out = fs.create(p); 117 BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out))) { 118 for (Entry<String,char[]> e : userDatabase.entrySet()) { 119 writer.write(e.getKey()); 120 writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR); 121 writer.write(e.getValue()); 122 writer.newLine(); 123 } 124 } 125 } 126 127 private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); 128 private static final Configuration CONF = UTIL.getConfiguration(); 129 private static LocalHBaseCluster CLUSTER; 130 private static File KEYTAB_FILE; 131 private static Path USER_DATABASE_FILE; 132 133 @BeforeClass 134 public static void setupCluster() throws Exception { 135 KEYTAB_FILE = new File( 136 UTIL.getDataTestDir("keytab").toUri().getPath()); 137 final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE); 138 139 // Adds our test impls instead of creating service loader entries which 140 // might inadvertently get them loaded on a real cluster. 141 CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY, 142 ShadeSaslClientAuthenticationProvider.class.getName()); 143 CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY, 144 ShadeSaslServerAuthenticationProvider.class.getName()); 145 CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY, 146 ShadeProviderSelector.class.getName()); 147 148 CLUSTER = createCluster(UTIL, KEYTAB_FILE, kdc, 149 Collections.singletonMap("user1", USER1_PASSWORD)); 150 CLUSTER.startup(); 151 } 152 153 @AfterClass 154 public static void teardownCluster() throws Exception { 155 if (CLUSTER != null) { 156 CLUSTER.shutdown(); 157 CLUSTER = null; 158 } 159 UTIL.shutdownMiniZKCluster(); 160 } 161 162 @Rule 163 public TestName name = new TestName(); 164 TableName tableName; 165 String clusterId; 166 167 @Before 168 public void createTable() throws Exception { 169 tableName = TableName.valueOf(name.getMethodName()); 170 171 // Create a table and write a record as the service user (hbase) 172 UserGroupInformation serviceUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI( 173 "hbase/localhost", KEYTAB_FILE.getAbsolutePath()); 174 clusterId = serviceUgi.doAs(new PrivilegedExceptionAction<String>() { 175 @Override public String run() throws Exception { 176 try (Connection conn = ConnectionFactory.createConnection(CONF); 177 Admin admin = conn.getAdmin();) { 178 admin.createTable(TableDescriptorBuilder 179 .newBuilder(tableName) 180 .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")) 181 .build()); 182 183 UTIL.waitTableAvailable(tableName); 184 185 try (Table t = conn.getTable(tableName)) { 186 Put p = new Put(Bytes.toBytes("r1")); 187 p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("q1"), Bytes.toBytes("1")); 188 t.put(p); 189 } 190 191 return admin.getClusterMetrics().getClusterId(); 192 } 193 } 194 }); 195 196 assertNotNull(clusterId); 197 } 198 199 @Test 200 public void testPositiveAuthentication() throws Exception { 201 final Configuration clientConf = new Configuration(CONF); 202 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 203 UserGroupInformation user1 = UserGroupInformation.createUserForTesting( 204 "user1", new String[0]); 205 user1.addToken(ShadeClientTokenUtil.obtainToken(conn, "user1", USER1_PASSWORD)); 206 user1.doAs(new PrivilegedExceptionAction<Void>() { 207 @Override public Void run() throws Exception { 208 try (Table t = conn.getTable(tableName)) { 209 Result r = t.get(new Get(Bytes.toBytes("r1"))); 210 assertNotNull(r); 211 assertFalse("Should have read a non-empty Result", r.isEmpty()); 212 final Cell cell = r.getColumnLatestCell(Bytes.toBytes("f1"), Bytes.toBytes("q1")); 213 assertTrue("Unexpected value", CellUtil.matchingValue(cell, Bytes.toBytes("1"))); 214 215 return null; 216 } 217 } 218 }); 219 } 220 } 221 222 @Test(expected = DoNotRetryIOException.class) 223 public void testNegativeAuthentication() throws Exception { 224 // Validate that we can read that record back out as the user with our custom auth'n 225 final Configuration clientConf = new Configuration(CONF); 226 clientConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3); 227 try (Connection conn = ConnectionFactory.createConnection(clientConf)) { 228 UserGroupInformation user1 = UserGroupInformation.createUserForTesting( 229 "user1", new String[0]); 230 user1.addToken( 231 ShadeClientTokenUtil.obtainToken(conn, "user1", "not a real password".toCharArray())); 232 user1.doAs(new PrivilegedExceptionAction<Void>() { 233 @Override public Void run() throws Exception { 234 try (Connection conn = ConnectionFactory.createConnection(clientConf); 235 Table t = conn.getTable(tableName)) { 236 t.get(new Get(Bytes.toBytes("r1"))); 237 fail("Should not successfully authenticate with HBase"); 238 return null; 239 } 240 } 241 }); 242 } 243 } 244}