001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.security; 019 020import static org.junit.Assert.*; 021 022import java.io.IOException; 023import java.security.PrivilegedAction; 024import java.security.PrivilegedExceptionAction; 025import org.apache.commons.lang3.SystemUtils; 026import org.apache.hadoop.conf.Configuration; 027import org.apache.hadoop.fs.CommonConfigurationKeys; 028import org.apache.hadoop.hbase.HBaseClassTestRule; 029import org.apache.hadoop.hbase.HBaseConfiguration; 030import org.apache.hadoop.hbase.testclassification.SecurityTests; 031import org.apache.hadoop.hbase.testclassification.SmallTests; 032import org.apache.hadoop.security.UserGroupInformation; 033import org.junit.ClassRule; 034import org.junit.Test; 035import org.junit.experimental.categories.Category; 036import org.slf4j.Logger; 037import org.slf4j.LoggerFactory; 038 039import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet; 040 041@Category({SecurityTests.class, SmallTests.class}) 042public class TestUser { 043 044 @ClassRule 045 public static final HBaseClassTestRule CLASS_RULE = 046 HBaseClassTestRule.forClass(TestUser.class); 047 048 private static final Logger LOG = LoggerFactory.getLogger(TestUser.class); 049 050 @Test 051 public void testCreateUserForTestingGroupCache() throws Exception { 052 Configuration conf = HBaseConfiguration.create(); 053 User uCreated = User.createUserForTesting(conf, "group_user", new String[] { "MYGROUP" }); 054 UserProvider up = UserProvider.instantiate(conf); 055 User uProvided = up.create(UserGroupInformation.createRemoteUser("group_user")); 056 assertArrayEquals(uCreated.getGroupNames(), uProvided.getGroupNames()); 057 058 } 059 060 @Test 061 public void testCacheGetGroups() throws Exception { 062 Configuration conf = HBaseConfiguration.create(); 063 UserProvider up = UserProvider.instantiate(conf); 064 065 // VERY unlikely that this user will exist on the box. 066 // This should mean the user has no groups. 067 String nonUser = "kklvfnvhdhcenfnniilggljhdecjhidkle"; 068 069 // Create two UGI's for this username 070 UserGroupInformation ugiOne = UserGroupInformation.createRemoteUser(nonUser); 071 UserGroupInformation ugiTwo = UserGroupInformation.createRemoteUser(nonUser); 072 073 // Now try and get the user twice. 074 User uOne = up.create(ugiOne); 075 User uTwo = up.create(ugiTwo); 076 077 // Make sure that we didn't break groups and everything worked well. 078 assertArrayEquals(uOne.getGroupNames(),uTwo.getGroupNames()); 079 080 // Check that they are referentially equal. 081 // Since getting a group for a users that doesn't exist creates a new string array 082 // the only way that they should be referentially equal is if the cache worked and 083 // made sure we didn't go to hadoop's script twice. 084 assertTrue(uOne.getGroupNames() == uTwo.getGroupNames()); 085 assertEquals(0, ugiOne.getGroupNames().length); 086 } 087 088 @Test 089 public void testCacheGetGroupsRoot() throws Exception { 090 // Windows users don't have a root user. 091 // However pretty much every other *NIX os will have root. 092 if (!SystemUtils.IS_OS_WINDOWS) { 093 Configuration conf = HBaseConfiguration.create(); 094 UserProvider up = UserProvider.instantiate(conf); 095 096 097 String rootUserName = "root"; 098 099 // Create two UGI's for this username 100 UserGroupInformation ugiOne = UserGroupInformation.createRemoteUser(rootUserName); 101 UserGroupInformation ugiTwo = UserGroupInformation.createRemoteUser(rootUserName); 102 103 // Now try and get the user twice. 104 User uOne = up.create(ugiOne); 105 User uTwo = up.create(ugiTwo); 106 107 // Make sure that we didn't break groups and everything worked well. 108 assertArrayEquals(uOne.getGroupNames(),uTwo.getGroupNames()); 109 String[] groupNames = ugiOne.getGroupNames(); 110 assertTrue(groupNames.length > 0); 111 } 112 } 113 114 115 @Test 116 public void testBasicAttributes() throws Exception { 117 Configuration conf = HBaseConfiguration.create(); 118 User user = User.createUserForTesting(conf, "simple", new String[]{"foo"}); 119 assertEquals("Username should match", "simple", user.getName()); 120 assertEquals("Short username should match", "simple", user.getShortName()); 121 // don't test shortening of kerberos names because regular Hadoop doesn't support them 122 } 123 124 @Test 125 public void testRunAs() throws Exception { 126 Configuration conf = HBaseConfiguration.create(); 127 final User user = User.createUserForTesting(conf, "testuser", new String[]{"foo"}); 128 final PrivilegedExceptionAction<String> action = new PrivilegedExceptionAction<String>(){ 129 @Override 130 public String run() throws IOException { 131 User u = User.getCurrent(); 132 return u.getName(); 133 } 134 }; 135 136 String username = user.runAs(action); 137 assertEquals("Current user within runAs() should match", 138 "testuser", username); 139 140 // ensure the next run is correctly set 141 User user2 = User.createUserForTesting(conf, "testuser2", new String[]{"foo"}); 142 String username2 = user2.runAs(action); 143 assertEquals("Second username should match second user", 144 "testuser2", username2); 145 146 // check the exception version 147 username = user.runAs(new PrivilegedExceptionAction<String>(){ 148 @Override 149 public String run() throws Exception { 150 return User.getCurrent().getName(); 151 } 152 }); 153 assertEquals("User name in runAs() should match", "testuser", username); 154 155 // verify that nested contexts work 156 user2.runAs(new PrivilegedExceptionAction<Object>(){ 157 @Override 158 public Object run() throws IOException, InterruptedException{ 159 String nestedName = user.runAs(action); 160 assertEquals("Nest name should match nested user", "testuser", nestedName); 161 assertEquals("Current name should match current user", 162 "testuser2", User.getCurrent().getName()); 163 return null; 164 } 165 }); 166 167 username = user.runAs(new PrivilegedAction<String>(){ 168 String result = null; 169 @Override 170 public String run() { 171 try { 172 return User.getCurrent().getName(); 173 } catch (IOException e) { 174 result = "empty"; 175 } 176 return result; 177 } 178 }); 179 180 assertEquals("Current user within runAs() should match", 181 "testuser", username); 182 } 183 184 /** 185 * Make sure that we're returning a result for the current user. 186 * Previously getCurrent() was returning null if not initialized on 187 * non-secure Hadoop variants. 188 */ 189 @Test 190 public void testGetCurrent() throws Exception { 191 User user1 = User.getCurrent(); 192 assertNotNull(user1.ugi); 193 LOG.debug("User1 is "+user1.getName()); 194 195 for (int i =0 ; i< 100; i++) { 196 User u = User.getCurrent(); 197 assertNotNull(u); 198 assertEquals(user1.getName(), u.getName()); 199 assertEquals(user1, u); 200 assertEquals(user1.hashCode(), u.hashCode()); 201 } 202 } 203 204 @Test 205 public void testUserGroupNames() throws Exception { 206 final String username = "testuser"; 207 final ImmutableSet<String> singleGroups = ImmutableSet.of("group"); 208 final Configuration conf = HBaseConfiguration.create(); 209 User user = User.createUserForTesting(conf, username, 210 singleGroups.toArray(new String[singleGroups.size()])); 211 assertUserGroup(user, singleGroups); 212 213 final ImmutableSet<String> multiGroups = ImmutableSet.of("group", "group1", "group2"); 214 user = User.createUserForTesting(conf, username, 215 multiGroups.toArray(new String[multiGroups.size()])); 216 assertUserGroup(user, multiGroups); 217 } 218 219 private void assertUserGroup(User user, ImmutableSet<String> groups) { 220 assertNotNull("GroupNames should be not null", user.getGroupNames()); 221 assertTrue("UserGroupNames length should be == " + groups.size(), 222 user.getGroupNames().length == groups.size()); 223 224 for (String group : user.getGroupNames()) { 225 assertTrue("groupName should be in set ", groups.contains(group)); 226 } 227 } 228 229 @Test 230 public void testSecurityForNonSecureHadoop() { 231 assertFalse("Security should be disable in non-secure Hadoop", 232 User.isSecurityEnabled()); 233 234 Configuration conf = HBaseConfiguration.create(); 235 conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); 236 conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos"); 237 assertTrue("Security should be enabled", User.isHBaseSecurityEnabled(conf)); 238 239 conf = HBaseConfiguration.create(); 240 conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); 241 assertFalse("HBase security should not be enabled if " 242 + User.HBASE_SECURITY_CONF_KEY + " is not set accordingly", 243 User.isHBaseSecurityEnabled(conf)); 244 245 conf = HBaseConfiguration.create(); 246 conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos"); 247 assertTrue("HBase security should be enabled regardless of underlying " 248 + "HDFS settings", User.isHBaseSecurityEnabled(conf)); 249 } 250}