001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.util;
019
020import org.apache.hadoop.conf.Configuration;
021import org.apache.hadoop.hbase.HBaseConfiguration;
022import org.apache.hadoop.hbase.HBaseInterfaceAudience;
023import org.apache.hadoop.hbase.logging.Log4jUtils;
024import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
025import org.apache.hadoop.util.Tool;
026import org.apache.hadoop.util.ToolRunner;
027import org.apache.yetus.audience.InterfaceAudience;
028
029/**
030 * Generate a classpath string containing any jars required by mapreduce jobs. Specify additional
031 * values by providing a comma-separated list of paths via -Dtmpjars.
032 */
033@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
034public class MapreduceDependencyClasspathTool implements Tool {
035
036  private Configuration conf;
037
038  @Override
039  public void setConf(Configuration conf) {
040    this.conf = conf;
041  }
042
043  @Override
044  public Configuration getConf() {
045    return conf;
046  }
047
048  @Override
049  public int run(String[] args) throws Exception {
050    if (args.length > 0) {
051      System.err.println("Usage: hbase mapredcp [-Dtmpjars=...]");
052      System.err
053        .println("  Construct a CLASSPATH containing dependency jars required to run a mapreduce");
054      System.err
055        .println("  job. By default, includes any jars detected by TableMapReduceUtils. Provide");
056      System.err.println("  additional entries by specifying a comma-separated list in tmpjars.");
057      return 0;
058    }
059
060    TableMapReduceUtil.addHBaseDependencyJars(getConf());
061    System.out.println(TableMapReduceUtil.buildDependencyClasspath(getConf()));
062    return 0;
063  }
064
065  public static void main(String[] argv) throws Exception {
066    // Silence the usual noise. This is probably fragile...
067    Log4jUtils.setLogLevel("org.apache.hadoop.hbase", "WARN");
068    System.exit(
069      ToolRunner.run(HBaseConfiguration.create(), new MapreduceDependencyClasspathTool(), argv));
070  }
071}