001/**
002 *
003 * Licensed to the Apache Software Foundation (ASF) under one
004 * or more contributor license agreements.  See the NOTICE file
005 * distributed with this work for additional information
006 * regarding copyright ownership.  The ASF licenses this file
007 * to you under the Apache License, Version 2.0 (the
008 * "License"); you may not use this file except in compliance
009 * with the License.  You may obtain a copy of the License at
010 *
011 *     http://www.apache.org/licenses/LICENSE-2.0
012 *
013 * Unless required by applicable law or agreed to in writing, software
014 * distributed under the License is distributed on an "AS IS" BASIS,
015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
016 * See the License for the specific language governing permissions and
017 * limitations under the License.
018 */
019
020package org.apache.hadoop.hbase.tool.coprocessor;
021
022import java.io.IOException;
023import java.lang.reflect.Method;
024import java.net.URL;
025import java.net.URLClassLoader;
026import java.nio.file.Files;
027import java.nio.file.Path;
028import java.nio.file.Paths;
029import java.security.AccessController;
030import java.security.PrivilegedAction;
031import java.util.ArrayList;
032import java.util.Arrays;
033import java.util.Collection;
034import java.util.Collections;
035import java.util.List;
036import java.util.Optional;
037import java.util.regex.Pattern;
038import java.util.stream.Collectors;
039import java.util.stream.Stream;
040
041import org.apache.hadoop.fs.FileSystem;
042import org.apache.hadoop.hbase.HBaseInterfaceAudience;
043import org.apache.hadoop.hbase.client.Admin;
044import org.apache.hadoop.hbase.client.Connection;
045import org.apache.hadoop.hbase.client.ConnectionFactory;
046import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
047import org.apache.hadoop.hbase.client.TableDescriptor;
048import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
049import org.apache.hadoop.hbase.tool.PreUpgradeValidator;
050import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity;
051import org.apache.hadoop.hbase.util.AbstractHBaseTool;
052import org.apache.yetus.audience.InterfaceAudience;
053import org.slf4j.Logger;
054import org.slf4j.LoggerFactory;
055
056import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
057import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
058
059@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
060public class CoprocessorValidator extends AbstractHBaseTool {
061  private static final Logger LOG = LoggerFactory
062      .getLogger(CoprocessorValidator.class);
063
064  private CoprocessorMethods branch1;
065  private CoprocessorMethods current;
066
067  private final List<String> jars;
068  private final List<Pattern> tablePatterns;
069  private final List<String> classes;
070  private boolean config;
071
072  private boolean dieOnWarnings;
073
074  public CoprocessorValidator() {
075    branch1 = new Branch1CoprocessorMethods();
076    current = new CurrentCoprocessorMethods();
077
078    jars = new ArrayList<>();
079    tablePatterns = new ArrayList<>();
080    classes = new ArrayList<>();
081  }
082
083  /**
084   * This classloader implementation calls {@link #resolveClass(Class)}
085   * method for every loaded class. It means that some extra validation will
086   * take place <a
087   * href="https://docs.oracle.com/javase/specs/jls/se8/html/jls-12.html#jls-12.3">
088   * according to JLS</a>.
089   */
090  private static final class ResolverUrlClassLoader extends URLClassLoader {
091    private ResolverUrlClassLoader(URL[] urls, ClassLoader parent) {
092      super(urls, parent);
093    }
094
095    @Override
096    public Class<?> loadClass(String name) throws ClassNotFoundException {
097      return loadClass(name, true);
098    }
099  }
100
101  private ResolverUrlClassLoader createClassLoader(URL[] urls) {
102    return createClassLoader(urls, getClass().getClassLoader());
103  }
104
105  private ResolverUrlClassLoader createClassLoader(URL[] urls, ClassLoader parent) {
106    return AccessController.doPrivileged(new PrivilegedAction<ResolverUrlClassLoader>() {
107      @Override
108      public ResolverUrlClassLoader run() {
109        return new ResolverUrlClassLoader(urls, parent);
110      }
111    });
112  }
113
114  private ResolverUrlClassLoader createClassLoader(ClassLoader parent,
115      org.apache.hadoop.fs.Path path) throws IOException {
116    Path tempPath = Files.createTempFile("hbase-coprocessor-", ".jar");
117    org.apache.hadoop.fs.Path destination = new org.apache.hadoop.fs.Path(tempPath.toString());
118
119    LOG.debug("Copying coprocessor jar '{}' to '{}'.", path, tempPath);
120
121    FileSystem fileSystem = FileSystem.get(getConf());
122    fileSystem.copyToLocalFile(path, destination);
123
124    URL url = tempPath.toUri().toURL();
125
126    return createClassLoader(new URL[] { url }, parent);
127  }
128
129  private void validate(ClassLoader classLoader, String className,
130      List<CoprocessorViolation> violations) {
131    LOG.debug("Validating class '{}'.", className);
132
133    try {
134      Class<?> clazz = classLoader.loadClass(className);
135
136      for (Method method : clazz.getDeclaredMethods()) {
137        LOG.trace("Validating method '{}'.", method);
138
139        if (branch1.hasMethod(method) && !current.hasMethod(method)) {
140          CoprocessorViolation violation = new CoprocessorViolation(
141              className, Severity.WARNING, "method '" + method +
142              "' was removed from new coprocessor API, so it won't be called by HBase");
143          violations.add(violation);
144        }
145      }
146    } catch (ClassNotFoundException e) {
147      CoprocessorViolation violation = new CoprocessorViolation(
148          className, Severity.ERROR, "no such class", e);
149      violations.add(violation);
150    } catch (RuntimeException | Error e) {
151      CoprocessorViolation violation = new CoprocessorViolation(
152          className, Severity.ERROR, "could not validate class", e);
153      violations.add(violation);
154    }
155  }
156
157  public void validateClasses(ClassLoader classLoader, List<String> classNames,
158      List<CoprocessorViolation> violations) {
159    for (String className : classNames) {
160      validate(classLoader, className, violations);
161    }
162  }
163
164  public void validateClasses(ClassLoader classLoader, String[] classNames,
165      List<CoprocessorViolation> violations) {
166    validateClasses(classLoader, Arrays.asList(classNames), violations);
167  }
168
169  @VisibleForTesting
170  protected void validateTables(ClassLoader classLoader, Admin admin,
171      Pattern pattern, List<CoprocessorViolation> violations) throws IOException {
172    List<TableDescriptor> tableDescriptors = admin.listTableDescriptors(pattern);
173
174    for (TableDescriptor tableDescriptor : tableDescriptors) {
175      LOG.debug("Validating table {}", tableDescriptor.getTableName());
176
177      Collection<CoprocessorDescriptor> coprocessorDescriptors =
178          tableDescriptor.getCoprocessorDescriptors();
179
180      for (CoprocessorDescriptor coprocessorDescriptor : coprocessorDescriptors) {
181        String className = coprocessorDescriptor.getClassName();
182        Optional<String> jarPath = coprocessorDescriptor.getJarPath();
183
184        if (jarPath.isPresent()) {
185          org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(jarPath.get());
186          try (ResolverUrlClassLoader cpClassLoader = createClassLoader(classLoader, path)) {
187            validate(cpClassLoader, className, violations);
188          } catch (IOException e) {
189            CoprocessorViolation violation = new CoprocessorViolation(
190                className, Severity.ERROR,
191                "could not validate jar file '" + path + "'", e);
192            violations.add(violation);
193          }
194        } else {
195          validate(classLoader, className, violations);
196        }
197      }
198    }
199  }
200
201  private void validateTables(ClassLoader classLoader, Pattern pattern,
202      List<CoprocessorViolation> violations) throws IOException {
203    try (Connection connection = ConnectionFactory.createConnection(getConf());
204        Admin admin = connection.getAdmin()) {
205      validateTables(classLoader, admin, pattern, violations);
206    }
207  }
208
209  @Override
210  protected void printUsage() {
211    String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " +
212        PreUpgradeValidator.VALIDATE_CP_NAME +
213        " [-jar ...] [-class ... | -table ... | -config]";
214    printUsage(header, "Options:", "");
215  }
216
217  @Override
218  protected void addOptions() {
219    addOptNoArg("e", "Treat warnings as errors.");
220    addOptWithArg("jar", "Jar file/directory of the coprocessor.");
221    addOptWithArg("table", "Table coprocessor(s) to check.");
222    addOptWithArg("class", "Coprocessor class(es) to check.");
223    addOptNoArg("config", "Obtain coprocessor class(es) from configuration.");
224  }
225
226  @Override
227  protected void processOptions(CommandLine cmd) {
228    String[] jars = cmd.getOptionValues("jar");
229    if (jars != null) {
230      Collections.addAll(this.jars, jars);
231    }
232
233    String[] tables = cmd.getOptionValues("table");
234    if (tables != null) {
235      Arrays.stream(tables).map(Pattern::compile).forEach(tablePatterns::add);
236    }
237
238    String[] classes = cmd.getOptionValues("class");
239    if (classes != null) {
240      Collections.addAll(this.classes, classes);
241    }
242
243    config = cmd.hasOption("config");
244    dieOnWarnings = cmd.hasOption("e");
245  }
246
247  private List<URL> buildClasspath(List<String> jars) throws IOException {
248    List<URL> urls = new ArrayList<>();
249
250    for (String jar : jars) {
251      Path jarPath = Paths.get(jar);
252      if (Files.isDirectory(jarPath)) {
253        try (Stream<Path> stream = Files.list(jarPath)) {
254          List<Path> files = stream
255              .filter((path) -> Files.isRegularFile(path))
256              .collect(Collectors.toList());
257
258          for (Path file : files) {
259            URL url = file.toUri().toURL();
260            urls.add(url);
261          }
262        }
263      } else {
264        URL url = jarPath.toUri().toURL();
265        urls.add(url);
266      }
267    }
268
269    return urls;
270  }
271
272  @Override
273  protected int doWork() throws Exception {
274    if (tablePatterns.isEmpty() && classes.isEmpty() && !config) {
275      LOG.error("Please give at least one -table, -class or -config parameter.");
276      printUsage();
277      return EXIT_FAILURE;
278    }
279
280    List<URL> urlList = buildClasspath(jars);
281    URL[] urls = urlList.toArray(new URL[urlList.size()]);
282
283    LOG.debug("Classpath: {}", urlList);
284
285    List<CoprocessorViolation> violations = new ArrayList<>();
286
287    try (ResolverUrlClassLoader classLoader = createClassLoader(urls)) {
288      for (Pattern tablePattern : tablePatterns) {
289        validateTables(classLoader, tablePattern, violations);
290      }
291
292      validateClasses(classLoader, classes, violations);
293
294      if (config) {
295        String[] masterCoprocessors =
296            getConf().getStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY);
297        if (masterCoprocessors != null) {
298          validateClasses(classLoader, masterCoprocessors, violations);
299        }
300
301        String[] regionCoprocessors =
302            getConf().getStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY);
303        if (regionCoprocessors != null) {
304          validateClasses(classLoader, regionCoprocessors, violations);
305        }
306      }
307    }
308
309    boolean error = false;
310
311    for (CoprocessorViolation violation : violations) {
312      String className = violation.getClassName();
313      String message = violation.getMessage();
314      Throwable throwable = violation.getThrowable();
315
316      switch (violation.getSeverity()) {
317        case WARNING:
318          if (throwable == null) {
319            LOG.warn("Warning in class '{}': {}.", className, message);
320          } else {
321            LOG.warn("Warning in class '{}': {}.", className, message, throwable);
322          }
323
324          if (dieOnWarnings) {
325            error = true;
326          }
327
328          break;
329        case ERROR:
330          if (throwable == null) {
331            LOG.error("Error in class '{}': {}.", className, message);
332          } else {
333            LOG.error("Error in class '{}': {}.", className, message, throwable);
334          }
335
336          error = true;
337
338          break;
339      }
340    }
341
342    return (error) ? EXIT_FAILURE : EXIT_SUCCESS;
343  }
344}