001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.mapreduce; 019 020import static org.junit.Assert.assertTrue; 021import static org.junit.Assume.assumeTrue; 022 023import org.apache.hadoop.conf.Configurable; 024import org.apache.hadoop.conf.Configuration; 025import org.apache.hadoop.hbase.HBaseConfiguration; 026import org.apache.hadoop.hbase.IntegrationTestingUtility; 027import org.apache.hadoop.hbase.testclassification.IntegrationTests; 028import org.apache.hadoop.mapreduce.Job; 029import org.apache.hadoop.util.Tool; 030import org.apache.hadoop.util.ToolRunner; 031import org.junit.Before; 032import org.junit.BeforeClass; 033import org.junit.Test; 034import org.junit.experimental.categories.Category; 035 036/** 037 * Test that we add tmpjars correctly including the named dependencies. Runs as an integration test 038 * so that classpath is realistic. 039 */ 040@Category(IntegrationTests.class) 041public class IntegrationTestTableMapReduceUtil implements Configurable, Tool { 042 043 private static IntegrationTestingUtility util; 044 045 @BeforeClass 046 public static void provisionCluster() throws Exception { 047 if (null == util) { 048 util = new IntegrationTestingUtility(); 049 } 050 } 051 052 @Before 053 public void skipMiniCluster() { 054 // test probably also works with a local cluster, but 055 // IntegrationTestingUtility doesn't support this concept. 056 assumeTrue("test requires a distributed cluster.", util.isDistributedCluster()); 057 } 058 059 /** 060 * Look for jars we expect to be on the classpath by name. 061 */ 062 @Test 063 public void testAddDependencyJars() throws Exception { 064 Job job = new Job(); 065 TableMapReduceUtil.addDependencyJars(job); 066 String tmpjars = job.getConfiguration().get("tmpjars"); 067 068 // verify presence of modules 069 assertTrue(tmpjars.contains("hbase-common")); 070 assertTrue(tmpjars.contains("hbase-protocol")); 071 assertTrue(tmpjars.contains("hbase-client")); 072 assertTrue(tmpjars.contains("hbase-hadoop-compat")); 073 assertTrue(tmpjars.contains("hbase-server")); 074 075 // verify presence of 3rd party dependencies. 076 assertTrue(tmpjars.contains("zookeeper")); 077 assertTrue(tmpjars.contains("netty")); 078 assertTrue(tmpjars.contains("protobuf")); 079 assertTrue(tmpjars.contains("guava")); 080 } 081 082 @Override 083 public int run(String[] args) throws Exception { 084 provisionCluster(); 085 skipMiniCluster(); 086 testAddDependencyJars(); 087 return 0; 088 } 089 090 @Override 091 public void setConf(Configuration conf) { 092 if (util != null) { 093 throw new IllegalArgumentException( 094 "setConf not supported after the test has been initialized."); 095 } 096 util = new IntegrationTestingUtility(conf); 097 } 098 099 @Override 100 public Configuration getConf() { 101 return util.getConfiguration(); 102 } 103 104 public static void main(String[] args) throws Exception { 105 Configuration conf = HBaseConfiguration.create(); 106 IntegrationTestingUtility.setUseDistributedCluster(conf); 107 int status = ToolRunner.run(conf, new IntegrationTestTableMapReduceUtil(), args); 108 System.exit(status); 109 } 110}