001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.ipc;
019
020import static org.hamcrest.CoreMatchers.instanceOf;
021import static org.hamcrest.MatcherAssert.assertThat;
022import static org.junit.Assert.assertFalse;
023import static org.junit.Assert.assertTrue;
024
025import java.io.IOException;
026import java.lang.reflect.Constructor;
027import java.lang.reflect.InvocationTargetException;
028import java.util.ArrayList;
029import java.util.List;
030import java.util.concurrent.CompletableFuture;
031import java.util.concurrent.TimeoutException;
032import org.apache.commons.lang3.mutable.MutableInt;
033import org.apache.hadoop.hbase.HBaseClassTestRule;
034import org.apache.hadoop.hbase.client.RegionInfoBuilder;
035import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
036import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
037import org.apache.hadoop.hbase.net.Address;
038import org.apache.hadoop.hbase.testclassification.ClientTests;
039import org.apache.hadoop.hbase.testclassification.SmallTests;
040import org.apache.hadoop.hbase.util.FutureUtils;
041import org.junit.ClassRule;
042import org.junit.Test;
043import org.junit.experimental.categories.Category;
044
045import org.apache.hbase.thirdparty.io.netty.channel.DefaultEventLoop;
046import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;
047
048import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
049
050@Category({ ClientTests.class, SmallTests.class })
051public class TestIPCUtil {
052
053  @ClassRule
054  public static final HBaseClassTestRule CLASS_RULE =
055    HBaseClassTestRule.forClass(TestIPCUtil.class);
056
057  private static Throwable create(Class<? extends Throwable> clazz) throws InstantiationException,
058    IllegalAccessException, InvocationTargetException, NoSuchMethodException {
059    try {
060      Constructor<? extends Throwable> c = clazz.getDeclaredConstructor();
061      c.setAccessible(true);
062      return c.newInstance();
063    } catch (NoSuchMethodException e) {
064      // fall through
065    }
066
067    try {
068      Constructor<? extends Throwable> c = clazz.getDeclaredConstructor(String.class);
069      c.setAccessible(true);
070      return c.newInstance("error");
071    } catch (NoSuchMethodException e) {
072      // fall through
073    }
074
075    try {
076      Constructor<? extends Throwable> c = clazz.getDeclaredConstructor(Throwable.class);
077      c.setAccessible(true);
078      return c.newInstance(new Exception("error"));
079    } catch (NoSuchMethodException e) {
080      // fall through
081    }
082
083    try {
084      Constructor<? extends Throwable> c =
085        clazz.getDeclaredConstructor(String.class, Throwable.class);
086      c.setAccessible(true);
087      return c.newInstance("error", new Exception("error"));
088    } catch (NoSuchMethodException e) {
089      // fall through
090    }
091
092    Constructor<? extends Throwable> c =
093      clazz.getDeclaredConstructor(Throwable.class, Throwable.class);
094    c.setAccessible(true);
095    return c.newInstance(new Exception("error"), "error");
096  }
097
098  /**
099   * See HBASE-21862, it is important to keep original exception type for connection exceptions.
100   */
101  @Test
102  public void testWrapConnectionException() throws Exception {
103    List<Throwable> exceptions = new ArrayList<>();
104    for (Class<? extends Throwable> clazz : ClientExceptionsUtil.getConnectionExceptionTypes()) {
105      exceptions.add(create(clazz));
106    }
107    Address addr = Address.fromParts("127.0.0.1", 12345);
108    for (Throwable exception : exceptions) {
109      if (exception instanceof TimeoutException) {
110        assertThat(IPCUtil.wrapException(addr, null, exception),
111          instanceOf(TimeoutIOException.class));
112      } else {
113        IOException ioe =
114          IPCUtil.wrapException(addr, RegionInfoBuilder.FIRST_META_REGIONINFO, exception);
115        // Assert that the exception contains the Region name if supplied. HBASE-25735.
116        // Not all exceptions get the region stuffed into it.
117        if (ioe.getMessage() != null) {
118          assertTrue(ioe.getMessage()
119            .contains(RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionNameAsString()));
120        }
121        assertThat(ioe, instanceOf(exception.getClass()));
122      }
123    }
124  }
125
126  @Test
127  public void testExecute() throws Exception {
128    EventLoop eventLoop = new DefaultEventLoop();
129    MutableInt executed = new MutableInt(0);
130    MutableInt numStackTraceElements = new MutableInt(0);
131    CompletableFuture<Void> future = new CompletableFuture<>();
132    try {
133      IPCUtil.execute(eventLoop, new Runnable() {
134
135        @Override
136        public void run() {
137          int numElements = new Exception().getStackTrace().length;
138          int depth = executed.getAndIncrement();
139          if (depth <= IPCUtil.MAX_DEPTH) {
140            if (numElements <= numStackTraceElements.intValue()) {
141              future.completeExceptionally(
142                new AssertionError("should call run directly but stack trace decreased from "
143                  + numStackTraceElements.intValue() + " to " + numElements));
144              return;
145            }
146            numStackTraceElements.setValue(numElements);
147            IPCUtil.execute(eventLoop, this);
148          } else {
149            if (numElements >= numStackTraceElements.intValue()) {
150              future.completeExceptionally(
151                new AssertionError("should call eventLoop.execute to prevent stack overflow but"
152                  + " stack trace increased from " + numStackTraceElements.intValue() + " to "
153                  + numElements));
154            } else {
155              future.complete(null);
156            }
157          }
158        }
159      });
160      FutureUtils.get(future);
161    } finally {
162      eventLoop.shutdownGracefully().get();
163    }
164  }
165
166  @Test
167  public void testIsFatalConnectionException() {
168    // intentionally not reference the class object directly, so here we will not load the class, to
169    // make sure that in isFatalConnectionException, we can use initialized = false when calling
170    // Class.forName
171    ExceptionResponse resp = ExceptionResponse.newBuilder()
172      .setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyFatalConnectionException").build();
173    assertTrue(IPCUtil.isFatalConnectionException(resp));
174
175    resp = ExceptionResponse.newBuilder()
176      .setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyException").build();
177    assertFalse(IPCUtil.isFatalConnectionException(resp));
178
179    // class not found
180    resp = ExceptionResponse.newBuilder()
181      .setExceptionClassName("org.apache.hadoop.hbase.ipc.WhatEver").build();
182    assertFalse(IPCUtil.isFatalConnectionException(resp));
183  }
184}