001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.ipc; 019 020import static org.hamcrest.CoreMatchers.instanceOf; 021import static org.hamcrest.MatcherAssert.assertThat; 022import static org.junit.jupiter.api.Assertions.assertFalse; 023import static org.junit.jupiter.api.Assertions.assertTrue; 024 025import java.io.IOException; 026import java.lang.reflect.Constructor; 027import java.lang.reflect.InvocationTargetException; 028import java.util.ArrayList; 029import java.util.List; 030import java.util.concurrent.CompletableFuture; 031import java.util.concurrent.TimeoutException; 032import org.apache.commons.lang3.mutable.MutableInt; 033import org.apache.hadoop.hbase.client.RegionInfoBuilder; 034import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; 035import org.apache.hadoop.hbase.exceptions.TimeoutIOException; 036import org.apache.hadoop.hbase.net.Address; 037import org.apache.hadoop.hbase.testclassification.ClientTests; 038import org.apache.hadoop.hbase.testclassification.SmallTests; 039import org.apache.hadoop.hbase.util.FutureUtils; 040import org.junit.jupiter.api.Tag; 041import org.junit.jupiter.api.Test; 042 043import org.apache.hbase.thirdparty.io.netty.channel.DefaultEventLoop; 044import org.apache.hbase.thirdparty.io.netty.channel.EventLoop; 045 046import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse; 047 048@Tag(ClientTests.TAG) 049@Tag(SmallTests.TAG) 050public class TestIPCUtil { 051 052 private static Throwable create(Class<? extends Throwable> clazz) throws InstantiationException, 053 IllegalAccessException, InvocationTargetException, NoSuchMethodException { 054 try { 055 Constructor<? extends Throwable> c = clazz.getDeclaredConstructor(); 056 c.setAccessible(true); 057 return c.newInstance(); 058 } catch (NoSuchMethodException e) { 059 // fall through 060 } 061 062 try { 063 Constructor<? extends Throwable> c = clazz.getDeclaredConstructor(String.class); 064 c.setAccessible(true); 065 return c.newInstance("error"); 066 } catch (NoSuchMethodException e) { 067 // fall through 068 } 069 070 try { 071 Constructor<? extends Throwable> c = clazz.getDeclaredConstructor(Throwable.class); 072 c.setAccessible(true); 073 return c.newInstance(new Exception("error")); 074 } catch (NoSuchMethodException e) { 075 // fall through 076 } 077 078 try { 079 Constructor<? extends Throwable> c = 080 clazz.getDeclaredConstructor(String.class, Throwable.class); 081 c.setAccessible(true); 082 return c.newInstance("error", new Exception("error")); 083 } catch (NoSuchMethodException e) { 084 // fall through 085 } 086 087 Constructor<? extends Throwable> c = 088 clazz.getDeclaredConstructor(Throwable.class, Throwable.class); 089 c.setAccessible(true); 090 return c.newInstance(new Exception("error"), "error"); 091 } 092 093 /** 094 * See HBASE-21862, it is important to keep original exception type for connection exceptions. 095 */ 096 @Test 097 public void testWrapConnectionException() throws Exception { 098 List<Throwable> exceptions = new ArrayList<>(); 099 for (Class<? extends Throwable> clazz : ClientExceptionsUtil.getConnectionExceptionTypes()) { 100 exceptions.add(create(clazz)); 101 } 102 Address addr = Address.fromParts("127.0.0.1", 12345); 103 for (Throwable exception : exceptions) { 104 if (exception instanceof TimeoutException) { 105 assertThat(IPCUtil.wrapException(addr, null, exception), 106 instanceOf(TimeoutIOException.class)); 107 } else { 108 IOException ioe = 109 IPCUtil.wrapException(addr, RegionInfoBuilder.FIRST_META_REGIONINFO, exception); 110 // Assert that the exception contains the Region name if supplied. HBASE-25735. 111 // Not all exceptions get the region stuffed into it. 112 if (ioe.getMessage() != null) { 113 assertTrue(ioe.getMessage() 114 .contains(RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionNameAsString())); 115 } 116 assertThat(ioe, instanceOf(exception.getClass())); 117 } 118 } 119 } 120 121 @Test 122 public void testExecute() throws Exception { 123 EventLoop eventLoop = new DefaultEventLoop(); 124 MutableInt executed = new MutableInt(0); 125 MutableInt numStackTraceElements = new MutableInt(0); 126 CompletableFuture<Void> future = new CompletableFuture<>(); 127 try { 128 IPCUtil.execute(eventLoop, new Runnable() { 129 130 @Override 131 public void run() { 132 int numElements = new Exception().getStackTrace().length; 133 int depth = executed.getAndIncrement(); 134 if (depth <= IPCUtil.MAX_DEPTH) { 135 if (numElements <= numStackTraceElements.intValue()) { 136 future.completeExceptionally( 137 new AssertionError("should call run directly but stack trace decreased from " 138 + numStackTraceElements.intValue() + " to " + numElements)); 139 return; 140 } 141 numStackTraceElements.setValue(numElements); 142 IPCUtil.execute(eventLoop, this); 143 } else { 144 if (numElements >= numStackTraceElements.intValue()) { 145 future.completeExceptionally( 146 new AssertionError("should call eventLoop.execute to prevent stack overflow but" 147 + " stack trace increased from " + numStackTraceElements.intValue() + " to " 148 + numElements)); 149 } else { 150 future.complete(null); 151 } 152 } 153 } 154 }); 155 FutureUtils.get(future); 156 } finally { 157 eventLoop.shutdownGracefully().get(); 158 } 159 } 160 161 @Test 162 public void testIsFatalConnectionException() { 163 // intentionally not reference the class object directly, so here we will not load the class, to 164 // make sure that in isFatalConnectionException, we can use initialized = false when calling 165 // Class.forName 166 ExceptionResponse resp = ExceptionResponse.newBuilder() 167 .setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyFatalConnectionException").build(); 168 assertTrue(IPCUtil.isFatalConnectionException(resp)); 169 170 resp = ExceptionResponse.newBuilder() 171 .setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyException").build(); 172 assertFalse(IPCUtil.isFatalConnectionException(resp)); 173 174 // class not found 175 resp = ExceptionResponse.newBuilder() 176 .setExceptionClassName("org.apache.hadoop.hbase.ipc.WhatEver").build(); 177 assertFalse(IPCUtil.isFatalConnectionException(resp)); 178 } 179}