protected class RpcClientImpl.Connection extends Thread
| Modifier and Type | Class and Description |
|---|---|
private class |
RpcClientImpl.Connection.CallSender
If the client wants to interrupt its calls easily (i.e.
|
Thread.State, Thread.UncaughtExceptionHandler| Modifier and Type | Field and Description |
|---|---|
private AuthMethod |
authMethod |
protected ConcurrentSkipListMap<Integer,Call> |
calls |
protected RpcClientImpl.Connection.CallSender |
callSender |
private Codec |
codec |
private org.apache.hadoop.io.compress.CompressionCodec |
compressor |
private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader |
header |
protected DataInputStream |
in |
protected DataOutputStream |
out |
private Object |
outLock |
private int |
reloginMaxBackoff |
protected ConnectionId |
remoteId |
private HBaseSaslRpcClient |
saslRpcClient |
private InetSocketAddress |
server |
private String |
serverPrincipal |
protected AtomicBoolean |
shouldCloseConnection |
protected Socket |
socket |
private org.apache.hadoop.security.token.Token<? extends org.apache.hadoop.security.token.TokenIdentifier> |
token |
private boolean |
useSasl |
MAX_PRIORITY, MIN_PRIORITY, NORM_PRIORITY| Constructor and Description |
|---|
RpcClientImpl.Connection(ConnectionId remoteId,
Codec codec,
org.apache.hadoop.io.compress.CompressionCodec compressor) |
| Modifier and Type | Method and Description |
|---|---|
private void |
checkIsOpen() |
protected void |
cleanupCalls(boolean allCalls)
Cleanup the calls older than a given timeout, in milli seconds.
|
protected void |
close()
Close the connection.
|
protected void |
closeConnection() |
private org.apache.hadoop.ipc.RemoteException |
createRemoteException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse e) |
private void |
disposeSasl() |
private void |
doNotify() |
InetSocketAddress |
getRemoteAddress() |
private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation |
getUserInfo(org.apache.hadoop.security.UserGroupInformation ugi) |
private void |
handleConnectionFailure(int curRetries,
int maxRetries,
IOException ioe)
Handle connection failures
If the current number of retries is equal to the max number of retries,
stop retrying and throw the exception; Otherwise backoff N seconds and
try connecting again.
|
private void |
handleSaslConnectionFailure(int currRetries,
int maxRetries,
Exception ex,
Random rand,
org.apache.hadoop.security.UserGroupInformation user)
If multiple clients with the same principal try to connect
to the same server at the same time, the server assumes a
replay attack is in progress.
|
private boolean |
isFatalConnectionException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse e) |
protected boolean |
markClosed(IOException e) |
protected void |
readResponse() |
void |
run() |
protected void |
setupConnection() |
protected void |
setupIOstreams() |
private boolean |
setupSaslConnection(InputStream in2,
OutputStream out2) |
private boolean |
shouldAuthenticateOverKrb() |
protected void |
tracedWriteRequest(Call call,
int priority,
org.apache.htrace.Span span) |
protected boolean |
waitForWork() |
private void |
writeConnectionHeader()
Write the connection header.
|
private void |
writeConnectionHeaderPreamble(OutputStream outStream)
Write the RPC header:
|
private void |
writeRequest(Call call,
int priority,
org.apache.htrace.Span span)
Initiates a call by sending the parameter to the remote server.
|
activeCount, checkAccess, clone, countStackFrames, currentThread, destroy, dumpStack, enumerate, getAllStackTraces, getContextClassLoader, getDefaultUncaughtExceptionHandler, getId, getName, getPriority, getStackTrace, getState, getThreadGroup, getUncaughtExceptionHandler, holdsLock, interrupt, interrupted, isAlive, isDaemon, isInterrupted, join, join, join, resume, setContextClassLoader, setDaemon, setDefaultUncaughtExceptionHandler, setName, setPriority, setUncaughtExceptionHandler, sleep, sleep, start, stop, stop, suspend, toString, yieldprivate org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader header
protected ConnectionId remoteId
protected Socket socket
protected DataInputStream in
protected DataOutputStream out
private Object outLock
private InetSocketAddress server
private String serverPrincipal
private AuthMethod authMethod
private boolean useSasl
private org.apache.hadoop.security.token.Token<? extends org.apache.hadoop.security.token.TokenIdentifier> token
private HBaseSaslRpcClient saslRpcClient
private int reloginMaxBackoff
private final Codec codec
private final org.apache.hadoop.io.compress.CompressionCodec compressor
protected final ConcurrentSkipListMap<Integer,Call> calls
protected final AtomicBoolean shouldCloseConnection
protected final RpcClientImpl.Connection.CallSender callSender
RpcClientImpl.Connection(ConnectionId remoteId, Codec codec, org.apache.hadoop.io.compress.CompressionCodec compressor) throws IOException
IOExceptionprivate org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo(org.apache.hadoop.security.UserGroupInformation ugi)
protected void setupConnection()
throws IOException
IOExceptionprotected void closeConnection()
private void handleConnectionFailure(int curRetries,
int maxRetries,
IOException ioe)
throws IOException
curRetries - current number of retriesmaxRetries - max number of retries allowedioe - failure reasonIOException - if max number of retries is reachedprivate void checkIsOpen()
throws IOException
IOException - if the connection is not open.protected boolean waitForWork()
throws InterruptedException
InterruptedExceptionpublic InetSocketAddress getRemoteAddress()
private void disposeSasl()
private boolean shouldAuthenticateOverKrb()
throws IOException
IOExceptionprivate boolean setupSaslConnection(InputStream in2, OutputStream out2) throws IOException
IOExceptionprivate void handleSaslConnectionFailure(int currRetries,
int maxRetries,
Exception ex,
Random rand,
org.apache.hadoop.security.UserGroupInformation user)
throws IOException,
InterruptedException
The retry logic is governed by the shouldAuthenticateOverKrb()
method. In case when the user doesn't have valid credentials, we don't
need to retry (from cache or ticket). In such cases, it is prudent to
throw a runtime exception when we receive a SaslException from the
underlying authentication implementation, so there is no retry from
other high level (for eg, HCM or HBaseAdmin).
IOExceptionInterruptedExceptionprotected void setupIOstreams()
throws IOException
IOExceptionprivate void writeConnectionHeaderPreamble(OutputStream outStream) throws IOException
IOExceptionprivate void writeConnectionHeader()
throws IOException
IOExceptionprotected void close()
protected void tracedWriteRequest(Call call, int priority, org.apache.htrace.Span span) throws IOException
IOExceptionprivate void writeRequest(Call call, int priority, org.apache.htrace.Span span) throws IOException
IOExceptionreadResponse()private void doNotify()
protected void readResponse()
private boolean isFatalConnectionException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse e)
private org.apache.hadoop.ipc.RemoteException createRemoteException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse e)
e - exception to be wrappedeprotected boolean markClosed(IOException e)
protected void cleanupCalls(boolean allCalls)
allCalls - true for all calls, false for only the calls in timeoutCopyright © 2007–2019 The Apache Software Foundation. All rights reserved.