001/** 002 * 003 * Licensed to the Apache Software Foundation (ASF) under one 004 * or more contributor license agreements. See the NOTICE file 005 * distributed with this work for additional information 006 * regarding copyright ownership. The ASF licenses this file 007 * to you under the Apache License, Version 2.0 (the 008 * "License"); you may not use this file except in compliance 009 * with the License. You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 */ 019 020package org.apache.hadoop.hbase.tool.coprocessor; 021 022import java.io.IOException; 023import java.lang.reflect.Method; 024import java.net.URL; 025import java.net.URLClassLoader; 026import java.nio.file.Files; 027import java.nio.file.Path; 028import java.nio.file.Paths; 029import java.security.AccessController; 030import java.security.PrivilegedAction; 031import java.util.ArrayList; 032import java.util.Arrays; 033import java.util.Collection; 034import java.util.Collections; 035import java.util.List; 036import java.util.Optional; 037import java.util.regex.Pattern; 038import java.util.stream.Collectors; 039import java.util.stream.Stream; 040import org.apache.hadoop.fs.FileSystem; 041import org.apache.hadoop.hbase.HBaseInterfaceAudience; 042import org.apache.hadoop.hbase.client.Admin; 043import org.apache.hadoop.hbase.client.Connection; 044import org.apache.hadoop.hbase.client.ConnectionFactory; 045import org.apache.hadoop.hbase.client.CoprocessorDescriptor; 046import org.apache.hadoop.hbase.client.TableDescriptor; 047import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; 048import org.apache.hadoop.hbase.tool.PreUpgradeValidator; 049import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity; 050import org.apache.hadoop.hbase.util.AbstractHBaseTool; 051import org.apache.yetus.audience.InterfaceAudience; 052import org.slf4j.Logger; 053import org.slf4j.LoggerFactory; 054 055import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; 056 057@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) 058public class CoprocessorValidator extends AbstractHBaseTool { 059 private static final Logger LOG = LoggerFactory 060 .getLogger(CoprocessorValidator.class); 061 062 private CoprocessorMethods branch1; 063 private CoprocessorMethods current; 064 065 private final List<String> jars; 066 private final List<Pattern> tablePatterns; 067 private final List<String> classes; 068 private boolean config; 069 070 private boolean dieOnWarnings; 071 072 public CoprocessorValidator() { 073 branch1 = new Branch1CoprocessorMethods(); 074 current = new CurrentCoprocessorMethods(); 075 076 jars = new ArrayList<>(); 077 tablePatterns = new ArrayList<>(); 078 classes = new ArrayList<>(); 079 } 080 081 /** 082 * This classloader implementation calls {@link #resolveClass(Class)} 083 * method for every loaded class. It means that some extra validation will 084 * take place <a 085 * href="https://docs.oracle.com/javase/specs/jls/se8/html/jls-12.html#jls-12.3"> 086 * according to JLS</a>. 087 */ 088 private static final class ResolverUrlClassLoader extends URLClassLoader { 089 private ResolverUrlClassLoader(URL[] urls, ClassLoader parent) { 090 super(urls, parent); 091 } 092 093 @Override 094 public Class<?> loadClass(String name) throws ClassNotFoundException { 095 return loadClass(name, true); 096 } 097 } 098 099 private ResolverUrlClassLoader createClassLoader(URL[] urls) { 100 return createClassLoader(urls, getClass().getClassLoader()); 101 } 102 103 private ResolverUrlClassLoader createClassLoader(URL[] urls, ClassLoader parent) { 104 return AccessController.doPrivileged(new PrivilegedAction<ResolverUrlClassLoader>() { 105 @Override 106 public ResolverUrlClassLoader run() { 107 return new ResolverUrlClassLoader(urls, parent); 108 } 109 }); 110 } 111 112 private ResolverUrlClassLoader createClassLoader(ClassLoader parent, 113 org.apache.hadoop.fs.Path path) throws IOException { 114 Path tempPath = Files.createTempFile("hbase-coprocessor-", ".jar"); 115 org.apache.hadoop.fs.Path destination = new org.apache.hadoop.fs.Path(tempPath.toString()); 116 117 LOG.debug("Copying coprocessor jar '{}' to '{}'.", path, tempPath); 118 119 FileSystem fileSystem = FileSystem.get(getConf()); 120 fileSystem.copyToLocalFile(path, destination); 121 122 URL url = tempPath.toUri().toURL(); 123 124 return createClassLoader(new URL[] { url }, parent); 125 } 126 127 private void validate(ClassLoader classLoader, String className, 128 List<CoprocessorViolation> violations) { 129 LOG.debug("Validating class '{}'.", className); 130 131 try { 132 Class<?> clazz = classLoader.loadClass(className); 133 134 for (Method method : clazz.getDeclaredMethods()) { 135 LOG.trace("Validating method '{}'.", method); 136 137 if (branch1.hasMethod(method) && !current.hasMethod(method)) { 138 CoprocessorViolation violation = new CoprocessorViolation( 139 className, Severity.WARNING, "method '" + method + 140 "' was removed from new coprocessor API, so it won't be called by HBase"); 141 violations.add(violation); 142 } 143 } 144 } catch (ClassNotFoundException e) { 145 CoprocessorViolation violation = new CoprocessorViolation( 146 className, Severity.ERROR, "no such class", e); 147 violations.add(violation); 148 } catch (RuntimeException | Error e) { 149 CoprocessorViolation violation = new CoprocessorViolation( 150 className, Severity.ERROR, "could not validate class", e); 151 violations.add(violation); 152 } 153 } 154 155 public void validateClasses(ClassLoader classLoader, List<String> classNames, 156 List<CoprocessorViolation> violations) { 157 for (String className : classNames) { 158 validate(classLoader, className, violations); 159 } 160 } 161 162 public void validateClasses(ClassLoader classLoader, String[] classNames, 163 List<CoprocessorViolation> violations) { 164 validateClasses(classLoader, Arrays.asList(classNames), violations); 165 } 166 167 @InterfaceAudience.Private 168 protected void validateTables(ClassLoader classLoader, Admin admin, 169 Pattern pattern, List<CoprocessorViolation> violations) throws IOException { 170 List<TableDescriptor> tableDescriptors = admin.listTableDescriptors(pattern); 171 172 for (TableDescriptor tableDescriptor : tableDescriptors) { 173 LOG.debug("Validating table {}", tableDescriptor.getTableName()); 174 175 Collection<CoprocessorDescriptor> coprocessorDescriptors = 176 tableDescriptor.getCoprocessorDescriptors(); 177 178 for (CoprocessorDescriptor coprocessorDescriptor : coprocessorDescriptors) { 179 String className = coprocessorDescriptor.getClassName(); 180 Optional<String> jarPath = coprocessorDescriptor.getJarPath(); 181 182 if (jarPath.isPresent()) { 183 org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(jarPath.get()); 184 try (ResolverUrlClassLoader cpClassLoader = createClassLoader(classLoader, path)) { 185 validate(cpClassLoader, className, violations); 186 } catch (IOException e) { 187 CoprocessorViolation violation = new CoprocessorViolation( 188 className, Severity.ERROR, 189 "could not validate jar file '" + path + "'", e); 190 violations.add(violation); 191 } 192 } else { 193 validate(classLoader, className, violations); 194 } 195 } 196 } 197 } 198 199 private void validateTables(ClassLoader classLoader, Pattern pattern, 200 List<CoprocessorViolation> violations) throws IOException { 201 try (Connection connection = ConnectionFactory.createConnection(getConf()); 202 Admin admin = connection.getAdmin()) { 203 validateTables(classLoader, admin, pattern, violations); 204 } 205 } 206 207 @Override 208 protected void printUsage() { 209 String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " + 210 PreUpgradeValidator.VALIDATE_CP_NAME + 211 " [-jar ...] [-class ... | -table ... | -config]"; 212 printUsage(header, "Options:", ""); 213 } 214 215 @Override 216 protected void addOptions() { 217 addOptNoArg("e", "Treat warnings as errors."); 218 addOptWithArg("jar", "Jar file/directory of the coprocessor."); 219 addOptWithArg("table", "Table coprocessor(s) to check."); 220 addOptWithArg("class", "Coprocessor class(es) to check."); 221 addOptNoArg("config", "Obtain coprocessor class(es) from configuration."); 222 } 223 224 @Override 225 protected void processOptions(CommandLine cmd) { 226 String[] jars = cmd.getOptionValues("jar"); 227 if (jars != null) { 228 Collections.addAll(this.jars, jars); 229 } 230 231 String[] tables = cmd.getOptionValues("table"); 232 if (tables != null) { 233 Arrays.stream(tables).map(Pattern::compile).forEach(tablePatterns::add); 234 } 235 236 String[] classes = cmd.getOptionValues("class"); 237 if (classes != null) { 238 Collections.addAll(this.classes, classes); 239 } 240 241 config = cmd.hasOption("config"); 242 dieOnWarnings = cmd.hasOption("e"); 243 } 244 245 private List<URL> buildClasspath(List<String> jars) throws IOException { 246 List<URL> urls = new ArrayList<>(); 247 248 for (String jar : jars) { 249 Path jarPath = Paths.get(jar); 250 if (Files.isDirectory(jarPath)) { 251 try (Stream<Path> stream = Files.list(jarPath)) { 252 List<Path> files = stream 253 .filter((path) -> Files.isRegularFile(path)) 254 .collect(Collectors.toList()); 255 256 for (Path file : files) { 257 URL url = file.toUri().toURL(); 258 urls.add(url); 259 } 260 } 261 } else { 262 URL url = jarPath.toUri().toURL(); 263 urls.add(url); 264 } 265 } 266 267 return urls; 268 } 269 270 @Override 271 protected int doWork() throws Exception { 272 if (tablePatterns.isEmpty() && classes.isEmpty() && !config) { 273 LOG.error("Please give at least one -table, -class or -config parameter."); 274 printUsage(); 275 return EXIT_FAILURE; 276 } 277 278 List<URL> urlList = buildClasspath(jars); 279 URL[] urls = urlList.toArray(new URL[urlList.size()]); 280 281 LOG.debug("Classpath: {}", urlList); 282 283 List<CoprocessorViolation> violations = new ArrayList<>(); 284 285 try (ResolverUrlClassLoader classLoader = createClassLoader(urls)) { 286 for (Pattern tablePattern : tablePatterns) { 287 validateTables(classLoader, tablePattern, violations); 288 } 289 290 validateClasses(classLoader, classes, violations); 291 292 if (config) { 293 String[] masterCoprocessors = 294 getConf().getStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY); 295 if (masterCoprocessors != null) { 296 validateClasses(classLoader, masterCoprocessors, violations); 297 } 298 299 String[] regionCoprocessors = 300 getConf().getStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY); 301 if (regionCoprocessors != null) { 302 validateClasses(classLoader, regionCoprocessors, violations); 303 } 304 } 305 } 306 307 boolean error = false; 308 309 for (CoprocessorViolation violation : violations) { 310 String className = violation.getClassName(); 311 String message = violation.getMessage(); 312 Throwable throwable = violation.getThrowable(); 313 314 switch (violation.getSeverity()) { 315 case WARNING: 316 if (throwable == null) { 317 LOG.warn("Warning in class '{}': {}.", className, message); 318 } else { 319 LOG.warn("Warning in class '{}': {}.", className, message, throwable); 320 } 321 322 if (dieOnWarnings) { 323 error = true; 324 } 325 326 break; 327 case ERROR: 328 if (throwable == null) { 329 LOG.error("Error in class '{}': {}.", className, message); 330 } else { 331 LOG.error("Error in class '{}': {}.", className, message, throwable); 332 } 333 334 error = true; 335 336 break; 337 } 338 } 339 340 return (error) ? EXIT_FAILURE : EXIT_SUCCESS; 341 } 342}