001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.security; 019 020import java.io.IOException; 021import java.util.LinkedHashSet; 022import java.util.Set; 023import java.util.concurrent.Callable; 024import java.util.concurrent.Executors; 025import java.util.concurrent.TimeUnit; 026 027import org.apache.hadoop.conf.Configuration; 028import org.apache.hadoop.fs.CommonConfigurationKeys; 029import org.apache.hadoop.hbase.BaseConfigurable; 030import org.apache.hadoop.security.Groups; 031import org.apache.hadoop.security.UserGroupInformation; 032import org.apache.hadoop.util.ReflectionUtils; 033import org.apache.yetus.audience.InterfaceAudience; 034 035import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder; 036import org.apache.hbase.thirdparty.com.google.common.cache.CacheLoader; 037import org.apache.hbase.thirdparty.com.google.common.cache.LoadingCache; 038import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture; 039import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningExecutorService; 040import org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors; 041import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; 042 043/** 044 * Provide an instance of a user. Allows custom {@link User} creation. 045 */ 046@InterfaceAudience.Private 047public class UserProvider extends BaseConfigurable { 048 049 private static final String USER_PROVIDER_CONF_KEY = "hbase.client.userprovider.class"; 050 private static final ListeningExecutorService executor = MoreExecutors.listeningDecorator( 051 Executors.newScheduledThreadPool( 052 1, 053 new ThreadFactoryBuilder().setDaemon(true).setNameFormat("group-cache-%d").build())); 054 055 private LoadingCache<String, String[]> groupCache = null; 056 057 static Groups groups = Groups.getUserToGroupsMappingService(); 058 059 @Override 060 public void setConf(final Configuration conf) { 061 super.setConf(conf); 062 063 synchronized (UserProvider.class) { 064 if (!(groups instanceof User.TestingGroups)) { 065 groups = Groups.getUserToGroupsMappingService(conf); 066 } 067 } 068 069 long cacheTimeout = 070 getConf().getLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 071 CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS_DEFAULT) * 1000; 072 073 this.groupCache = CacheBuilder.newBuilder() 074 // This is the same timeout that hadoop uses. So we'll follow suit. 075 .refreshAfterWrite(cacheTimeout, TimeUnit.MILLISECONDS) 076 .expireAfterWrite(10 * cacheTimeout, TimeUnit.MILLISECONDS) 077 // Set concurrency level equal to the default number of handlers that 078 // the simple handler spins up. 079 .concurrencyLevel(20) 080 // create the loader 081 // This just delegates to UGI. 082 .build(new CacheLoader<String, String[]>() { 083 084 // Since UGI's don't hash based on the user id 085 // The cache needs to be keyed on the same thing that Hadoop's Groups class 086 // uses. So this cache uses shortname. 087 @Override 088 public String[] load(String ugi) throws Exception { 089 return getGroupStrings(ugi); 090 } 091 092 private String[] getGroupStrings(String ugi) { 093 try { 094 Set<String> result = new LinkedHashSet<>(groups.getGroups(ugi)); 095 return result.toArray(new String[result.size()]); 096 } catch (Exception e) { 097 return new String[0]; 098 } 099 } 100 101 // Provide the reload function that uses the executor thread. 102 @Override 103 public ListenableFuture<String[]> reload(final String k, String[] oldValue) 104 throws Exception { 105 106 return executor.submit(new Callable<String[]>() { 107 @Override 108 public String[] call() throws Exception { 109 return getGroupStrings(k); 110 } 111 }); 112 } 113 }); 114 } 115 116 /** 117 * Instantiate the {@link UserProvider} specified in the configuration and set the passed 118 * configuration via {@link UserProvider#setConf(Configuration)} 119 * @param conf to read and set on the created {@link UserProvider} 120 * @return a {@link UserProvider} ready for use. 121 */ 122 public static UserProvider instantiate(Configuration conf) { 123 Class<? extends UserProvider> clazz = 124 conf.getClass(USER_PROVIDER_CONF_KEY, UserProvider.class, UserProvider.class); 125 return ReflectionUtils.newInstance(clazz, conf); 126 } 127 128 /** 129 * Set the {@link UserProvider} in the given configuration that should be instantiated 130 * @param conf to update 131 * @param provider class of the provider to set 132 */ 133 public static void setUserProviderForTesting(Configuration conf, 134 Class<? extends UserProvider> provider) { 135 conf.set(USER_PROVIDER_CONF_KEY, provider.getName()); 136 } 137 138 /** 139 * @return the userName for the current logged-in user. 140 * @throws IOException if the underlying user cannot be obtained 141 */ 142 public String getCurrentUserName() throws IOException { 143 User user = getCurrent(); 144 return user == null ? null : user.getName(); 145 } 146 147 /** 148 * @return <tt>true</tt> if security is enabled, <tt>false</tt> otherwise 149 */ 150 public boolean isHBaseSecurityEnabled() { 151 return User.isHBaseSecurityEnabled(this.getConf()); 152 } 153 154 /** 155 * @return whether or not Kerberos authentication is configured for Hadoop. For non-secure Hadoop, 156 * this always returns <code>false</code>. For secure Hadoop, it will return the value 157 * from {@code UserGroupInformation.isSecurityEnabled()}. 158 */ 159 public boolean isHadoopSecurityEnabled() { 160 return User.isSecurityEnabled(); 161 } 162 163 /** 164 * @return the current user within the current execution context 165 * @throws IOException if the user cannot be loaded 166 */ 167 public User getCurrent() throws IOException { 168 return User.getCurrent(); 169 } 170 171 /** 172 * Wraps an underlying {@code UserGroupInformation} instance. 173 * @param ugi The base Hadoop user 174 * @return User 175 */ 176 public User create(UserGroupInformation ugi) { 177 if (ugi == null) { 178 return null; 179 } 180 return new User.SecureHadoopUser(ugi, groupCache); 181 } 182 183 /** 184 * Log in the current process using the given configuration keys for the credential file and login 185 * principal. 186 * <p> 187 * <strong>This is only applicable when running on secure Hadoop</strong> -- see 188 * org.apache.hadoop.security.SecurityUtil#login(Configuration,String,String,String). On regular 189 * Hadoop (without security features), this will safely be ignored. 190 * </p> 191 * @param fileConfKey Property key used to configure path to the credential file 192 * @param principalConfKey Property key used to configure login principal 193 * @param localhost Current hostname to use in any credentials 194 * @throws IOException underlying exception from SecurityUtil.login() call 195 */ 196 public void login(String fileConfKey, String principalConfKey, String localhost) 197 throws IOException { 198 User.login(getConf(), fileConfKey, principalConfKey, localhost); 199 } 200}