001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.client;
019
020import static org.junit.Assert.assertEquals;
021
022import java.io.IOException;
023import java.util.HashMap;
024import java.util.List;
025import java.util.Map;
026import java.util.Optional;
027import java.util.Random;
028import org.apache.hadoop.conf.Configuration;
029import org.apache.hadoop.hbase.AuthUtil;
030import org.apache.hadoop.hbase.Cell;
031import org.apache.hadoop.hbase.CellComparator;
032import org.apache.hadoop.hbase.HBaseClassTestRule;
033import org.apache.hadoop.hbase.HBaseTestingUtil;
034import org.apache.hadoop.hbase.HConstants;
035import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
036import org.apache.hadoop.hbase.TableName;
037import org.apache.hadoop.hbase.coprocessor.ObserverContext;
038import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
039import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
040import org.apache.hadoop.hbase.coprocessor.RegionObserver;
041import org.apache.hadoop.hbase.ipc.RpcCall;
042import org.apache.hadoop.hbase.ipc.RpcServer;
043import org.apache.hadoop.hbase.testclassification.ClientTests;
044import org.apache.hadoop.hbase.testclassification.MediumTests;
045import org.apache.hadoop.hbase.util.Bytes;
046import org.junit.AfterClass;
047import org.junit.BeforeClass;
048import org.junit.ClassRule;
049import org.junit.Test;
050import org.junit.experimental.categories.Category;
051
052@Category({ ClientTests.class, MediumTests.class })
053public class TestConnectionAttributes {
054
055  @ClassRule
056  public static final HBaseClassTestRule CLASS_RULE =
057    HBaseClassTestRule.forClass(TestConnectionAttributes.class);
058
059  private static final Map<String, byte[]> CONNECTION_ATTRIBUTES = new HashMap<>();
060  static {
061    CONNECTION_ATTRIBUTES.put("clientId", Bytes.toBytes("foo"));
062  }
063  private static final byte[] FAMILY = Bytes.toBytes("0");
064  private static final TableName TABLE_NAME = TableName.valueOf("testConnectionAttributes");
065
066  private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
067  private static SingleProcessHBaseCluster cluster;
068
069  @BeforeClass
070  public static void setUp() throws Exception {
071    cluster = TEST_UTIL.startMiniCluster(1);
072    Table table = TEST_UTIL.createTable(TABLE_NAME, new byte[][] { FAMILY }, 1,
073      HConstants.DEFAULT_BLOCKSIZE, TestConnectionAttributes.AttributesCoprocessor.class.getName());
074    table.close();
075  }
076
077  @AfterClass
078  public static void afterClass() throws Exception {
079    cluster.close();
080    TEST_UTIL.shutdownMiniCluster();
081  }
082
083  @Test
084  public void testConnectionHeaderOverwrittenAttributesRemain() throws IOException {
085    Configuration conf = TEST_UTIL.getConfiguration();
086    try (Connection conn = ConnectionFactory.createConnection(conf, null,
087      AuthUtil.loginClient(conf), CONNECTION_ATTRIBUTES); Table table = conn.getTable(TABLE_NAME)) {
088
089      // submit a 300 byte rowkey here to encourage netty's allocator to overwrite the connection
090      // header
091      byte[] bytes = new byte[300];
092      new Random().nextBytes(bytes);
093      Result result = table.get(new Get(bytes));
094
095      assertEquals(CONNECTION_ATTRIBUTES.size(), result.size());
096      for (Map.Entry<String, byte[]> attr : CONNECTION_ATTRIBUTES.entrySet()) {
097        byte[] val = result.getValue(FAMILY, Bytes.toBytes(attr.getKey()));
098        assertEquals(Bytes.toStringBinary(attr.getValue()), Bytes.toStringBinary(val));
099      }
100    }
101  }
102
103  public static class AttributesCoprocessor implements RegionObserver, RegionCoprocessor {
104
105    @Override
106    public Optional<RegionObserver> getRegionObserver() {
107      return Optional.of(this);
108    }
109
110    @Override
111    public void preGetOp(ObserverContext<? extends RegionCoprocessorEnvironment> c, Get get,
112      List<Cell> result) throws IOException {
113      RpcCall rpcCall = RpcServer.getCurrentCall().get();
114      for (Map.Entry<String, byte[]> attr : rpcCall.getConnectionAttributes().entrySet()) {
115        result.add(c.getEnvironment().getCellBuilder().clear().setRow(get.getRow())
116          .setFamily(FAMILY).setQualifier(Bytes.toBytes(attr.getKey())).setValue(attr.getValue())
117          .setType(Cell.Type.Put).setTimestamp(1).build());
118      }
119      result.sort(CellComparator.getInstance());
120      c.bypass();
121    }
122  }
123}