001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.security;
019
020import static org.junit.jupiter.api.Assertions.assertArrayEquals;
021import static org.junit.jupiter.api.Assertions.assertEquals;
022import static org.junit.jupiter.api.Assertions.assertFalse;
023import static org.junit.jupiter.api.Assertions.assertNotNull;
024import static org.junit.jupiter.api.Assertions.assertTrue;
025
026import java.io.IOException;
027import java.security.PrivilegedAction;
028import java.security.PrivilegedExceptionAction;
029import org.apache.commons.lang3.SystemUtils;
030import org.apache.hadoop.conf.Configuration;
031import org.apache.hadoop.fs.CommonConfigurationKeys;
032import org.apache.hadoop.hbase.HBaseConfiguration;
033import org.apache.hadoop.hbase.testclassification.SecurityTests;
034import org.apache.hadoop.hbase.testclassification.SmallTests;
035import org.apache.hadoop.security.UserGroupInformation;
036import org.junit.jupiter.api.Tag;
037import org.junit.jupiter.api.Test;
038import org.slf4j.Logger;
039import org.slf4j.LoggerFactory;
040
041import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
042
043@Tag(SecurityTests.TAG)
044@Tag(SmallTests.TAG)
045public class TestUser {
046
047  private static final Logger LOG = LoggerFactory.getLogger(TestUser.class);
048
049  @Test
050  public void testCreateUserForTestingGroupCache() throws Exception {
051    Configuration conf = HBaseConfiguration.create();
052    User uCreated = User.createUserForTesting(conf, "group_user", new String[] { "MYGROUP" });
053    UserProvider up = UserProvider.instantiate(conf);
054    User uProvided = up.create(UserGroupInformation.createRemoteUser("group_user"));
055    assertArrayEquals(uCreated.getGroupNames(), uProvided.getGroupNames());
056
057  }
058
059  @Test
060  public void testCacheGetGroups() throws Exception {
061    Configuration conf = HBaseConfiguration.create();
062    UserProvider up = UserProvider.instantiate(conf);
063
064    // VERY unlikely that this user will exist on the box.
065    // This should mean the user has no groups.
066    String nonUser = "kklvfnvhdhcenfnniilggljhdecjhidkle";
067
068    // Create two UGI's for this username
069    UserGroupInformation ugiOne = UserGroupInformation.createRemoteUser(nonUser);
070    UserGroupInformation ugiTwo = UserGroupInformation.createRemoteUser(nonUser);
071
072    // Now try and get the user twice.
073    User uOne = up.create(ugiOne);
074    User uTwo = up.create(ugiTwo);
075
076    // Make sure that we didn't break groups and everything worked well.
077    assertArrayEquals(uOne.getGroupNames(), uTwo.getGroupNames());
078
079    // Check that they are referentially equal.
080    // Since getting a group for a users that doesn't exist creates a new string array
081    // the only way that they should be referentially equal is if the cache worked and
082    // made sure we didn't go to hadoop's script twice.
083    assertTrue(uOne.getGroupNames() == uTwo.getGroupNames());
084    assertEquals(0, ugiOne.getGroupNames().length);
085  }
086
087  @Test
088  public void testCacheGetGroupsRoot() throws Exception {
089    // Windows users don't have a root user.
090    // However pretty much every other *NIX os will have root.
091    if (!SystemUtils.IS_OS_WINDOWS) {
092      Configuration conf = HBaseConfiguration.create();
093      UserProvider up = UserProvider.instantiate(conf);
094
095      String rootUserName = "root";
096
097      // Create two UGI's for this username
098      UserGroupInformation ugiOne = UserGroupInformation.createRemoteUser(rootUserName);
099      UserGroupInformation ugiTwo = UserGroupInformation.createRemoteUser(rootUserName);
100
101      // Now try and get the user twice.
102      User uOne = up.create(ugiOne);
103      User uTwo = up.create(ugiTwo);
104
105      // Make sure that we didn't break groups and everything worked well.
106      assertArrayEquals(uOne.getGroupNames(), uTwo.getGroupNames());
107      String[] groupNames = ugiOne.getGroupNames();
108      assertTrue(groupNames.length > 0);
109    }
110  }
111
112  @Test
113  public void testBasicAttributes() throws Exception {
114    Configuration conf = HBaseConfiguration.create();
115    User user = User.createUserForTesting(conf, "simple", new String[] { "foo" });
116    assertEquals("simple", user.getName(), "Username should match");
117    assertEquals("simple", user.getShortName(), "Short username should match");
118    // don't test shortening of kerberos names because regular Hadoop doesn't support them
119  }
120
121  @Test
122  public void testRunAs() throws Exception {
123    Configuration conf = HBaseConfiguration.create();
124    final User user = User.createUserForTesting(conf, "testuser", new String[] { "foo" });
125    final PrivilegedExceptionAction<String> action = new PrivilegedExceptionAction<String>() {
126      @Override
127      public String run() throws IOException {
128        User u = User.getCurrent();
129        return u.getName();
130      }
131    };
132
133    String username = user.runAs(action);
134    assertEquals("testuser", username, "Current user within runAs() should match");
135
136    // ensure the next run is correctly set
137    User user2 = User.createUserForTesting(conf, "testuser2", new String[] { "foo" });
138    String username2 = user2.runAs(action);
139    assertEquals("testuser2", username2, "Second username should match second user");
140
141    // check the exception version
142    username = user.runAs(new PrivilegedExceptionAction<String>() {
143      @Override
144      public String run() throws Exception {
145        return User.getCurrent().getName();
146      }
147    });
148    assertEquals("testuser", username, "User name in runAs() should match");
149
150    // verify that nested contexts work
151    user2.runAs(new PrivilegedExceptionAction<Object>() {
152      @Override
153      public Object run() throws IOException, InterruptedException {
154        String nestedName = user.runAs(action);
155        assertEquals("testuser", nestedName, "Nest name should match nested user");
156        assertEquals("testuser2", User.getCurrent().getName(),
157          "Current name should match current user");
158        return null;
159      }
160    });
161
162    username = user.runAs(new PrivilegedAction<String>() {
163      String result = null;
164
165      @Override
166      public String run() {
167        try {
168          return User.getCurrent().getName();
169        } catch (IOException e) {
170          result = "empty";
171        }
172        return result;
173      }
174    });
175
176    assertEquals("testuser", username, "Current user within runAs() should match");
177  }
178
179  /**
180   * Make sure that we're returning a result for the current user. Previously getCurrent() was
181   * returning null if not initialized on non-secure Hadoop variants.
182   */
183  @Test
184  public void testGetCurrent() throws Exception {
185    User user1 = User.getCurrent();
186    assertNotNull(user1.ugi);
187    LOG.debug("User1 is " + user1.getName());
188
189    for (int i = 0; i < 100; i++) {
190      User u = User.getCurrent();
191      assertNotNull(u);
192      assertEquals(user1.getName(), u.getName());
193      assertEquals(user1, u);
194      assertEquals(user1.hashCode(), u.hashCode());
195    }
196  }
197
198  @Test
199  public void testUserGroupNames() throws Exception {
200    final String username = "testuser";
201    final ImmutableSet<String> singleGroups = ImmutableSet.of("group");
202    final Configuration conf = HBaseConfiguration.create();
203    User user = User.createUserForTesting(conf, username,
204      singleGroups.toArray(new String[singleGroups.size()]));
205    assertUserGroup(user, singleGroups);
206
207    final ImmutableSet<String> multiGroups = ImmutableSet.of("group", "group1", "group2");
208    user = User.createUserForTesting(conf, username,
209      multiGroups.toArray(new String[multiGroups.size()]));
210    assertUserGroup(user, multiGroups);
211  }
212
213  private void assertUserGroup(User user, ImmutableSet<String> groups) {
214    assertNotNull(user.getGroupNames(), "GroupNames should be not null");
215    assertTrue(user.getGroupNames().length == groups.size(),
216      "UserGroupNames length should be == " + groups.size());
217
218    for (String group : user.getGroupNames()) {
219      assertTrue(groups.contains(group), "groupName should be in set ");
220    }
221  }
222
223  @Test
224  public void testSecurityForNonSecureHadoop() {
225    assertFalse(User.isSecurityEnabled(), "Security should be disable in non-secure Hadoop");
226
227    Configuration conf = HBaseConfiguration.create();
228    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
229    conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
230    assertTrue(User.isHBaseSecurityEnabled(conf), "Security should be enabled");
231
232    conf = HBaseConfiguration.create();
233    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
234    assertFalse(User.isHBaseSecurityEnabled(conf), "HBase security should not be enabled if "
235      + User.HBASE_SECURITY_CONF_KEY + " is not set accordingly");
236
237    conf = HBaseConfiguration.create();
238    conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
239    assertTrue(User.isHBaseSecurityEnabled(conf),
240      "HBase security should be enabled regardless of underlying " + "HDFS settings");
241  }
242}