View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   * http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  
19  package org.apache.hadoop.hbase.ipc;
20  
21  import java.io.IOException;
22  
23  import org.apache.hadoop.hbase.util.ByteStringer;
24  import org.apache.commons.logging.Log;
25  import org.apache.commons.logging.LogFactory;
26  import org.apache.hadoop.hbase.classification.InterfaceAudience;
27  import org.apache.hadoop.hbase.HConstants;
28  import org.apache.hadoop.hbase.client.HConnection;
29  import org.apache.hadoop.hbase.client.ClusterConnection;
30  import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
31  import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
32  import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
33  
34  import com.google.protobuf.Descriptors;
35  import com.google.protobuf.Message;
36  import com.google.protobuf.RpcController;
37  
38  /**
39   * Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s
40   * against the active master.  An instance of this class may be obtained
41   * by calling {@link org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService()},
42   * but should normally only be used in creating a new {@link com.google.protobuf.Service} stub to call the endpoint
43   * methods.
44   * @see org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService()
45   */
46  @InterfaceAudience.Private
47  public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
48    private static Log LOG = LogFactory.getLog(MasterCoprocessorRpcChannel.class);
49  
50    private final ClusterConnection connection;
51  
52    public MasterCoprocessorRpcChannel(ClusterConnection conn) {
53      this.connection = conn;
54    }
55  
56    @Override
57    protected Message callExecService(RpcController controller, Descriptors.MethodDescriptor method,
58                                    Message request, Message responsePrototype)
59        throws IOException {
60      if (LOG.isTraceEnabled()) {
61        LOG.trace("Call: "+method.getName()+", "+request.toString());
62      }
63  
64      final ClientProtos.CoprocessorServiceCall call =
65          ClientProtos.CoprocessorServiceCall.newBuilder()
66              .setRow(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY))
67              .setServiceName(method.getService().getFullName())
68              .setMethodName(method.getName())
69              .setRequest(request.toByteString()).build();
70  
71      // TODO: Are we retrying here? Does not seem so. We should use RetryingRpcCaller
72      CoprocessorServiceResponse result = ProtobufUtil.execService(controller,
73        connection.getMaster(), call);
74      Message response = null;
75      if (result.getValue().hasValue()) {
76        Message.Builder builder = responsePrototype.newBuilderForType();
77        ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
78        response = builder.build();
79      } else {
80        response = responsePrototype.getDefaultInstanceForType();
81      }
82      if (LOG.isTraceEnabled()) {
83        LOG.trace("Master Result is value=" + response);
84      }
85      return response;
86    }
87  
88  }