001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.client; 019 020import static org.junit.jupiter.api.Assertions.assertEquals; 021 022import java.io.IOException; 023import java.util.HashMap; 024import java.util.List; 025import java.util.Map; 026import java.util.Optional; 027import java.util.Random; 028import org.apache.hadoop.conf.Configuration; 029import org.apache.hadoop.hbase.AuthUtil; 030import org.apache.hadoop.hbase.Cell; 031import org.apache.hadoop.hbase.CellComparator; 032import org.apache.hadoop.hbase.HBaseTestingUtil; 033import org.apache.hadoop.hbase.HConstants; 034import org.apache.hadoop.hbase.SingleProcessHBaseCluster; 035import org.apache.hadoop.hbase.TableName; 036import org.apache.hadoop.hbase.coprocessor.ObserverContext; 037import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; 038import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; 039import org.apache.hadoop.hbase.coprocessor.RegionObserver; 040import org.apache.hadoop.hbase.ipc.RpcCall; 041import org.apache.hadoop.hbase.ipc.RpcServer; 042import org.apache.hadoop.hbase.testclassification.ClientTests; 043import org.apache.hadoop.hbase.testclassification.MediumTests; 044import org.apache.hadoop.hbase.util.Bytes; 045import org.junit.jupiter.api.AfterAll; 046import org.junit.jupiter.api.BeforeAll; 047import org.junit.jupiter.api.Tag; 048import org.junit.jupiter.api.Test; 049 050@Tag(ClientTests.TAG) 051@Tag(MediumTests.TAG) 052public class TestConnectionAttributes { 053 054 private static final Map<String, byte[]> CONNECTION_ATTRIBUTES = new HashMap<>(); 055 static { 056 CONNECTION_ATTRIBUTES.put("clientId", Bytes.toBytes("foo")); 057 } 058 private static final byte[] FAMILY = Bytes.toBytes("0"); 059 private static final TableName TABLE_NAME = TableName.valueOf("testConnectionAttributes"); 060 061 private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 062 private static SingleProcessHBaseCluster cluster; 063 064 @BeforeAll 065 public static void setUp() throws Exception { 066 cluster = TEST_UTIL.startMiniCluster(1); 067 Table table = TEST_UTIL.createTable(TABLE_NAME, new byte[][] { FAMILY }, 1, 068 HConstants.DEFAULT_BLOCKSIZE, TestConnectionAttributes.AttributesCoprocessor.class.getName()); 069 table.close(); 070 } 071 072 @AfterAll 073 public static void afterClass() throws Exception { 074 cluster.close(); 075 TEST_UTIL.shutdownMiniCluster(); 076 } 077 078 @Test 079 public void testConnectionHeaderOverwrittenAttributesRemain() throws IOException { 080 Configuration conf = TEST_UTIL.getConfiguration(); 081 try (Connection conn = ConnectionFactory.createConnection(conf, null, 082 AuthUtil.loginClient(conf), CONNECTION_ATTRIBUTES); Table table = conn.getTable(TABLE_NAME)) { 083 084 // submit a 300 byte rowkey here to encourage netty's allocator to overwrite the connection 085 // header 086 byte[] bytes = new byte[300]; 087 new Random().nextBytes(bytes); 088 Result result = table.get(new Get(bytes)); 089 090 assertEquals(CONNECTION_ATTRIBUTES.size(), result.size()); 091 for (Map.Entry<String, byte[]> attr : CONNECTION_ATTRIBUTES.entrySet()) { 092 byte[] val = result.getValue(FAMILY, Bytes.toBytes(attr.getKey())); 093 assertEquals(Bytes.toStringBinary(attr.getValue()), Bytes.toStringBinary(val)); 094 } 095 } 096 } 097 098 public static class AttributesCoprocessor implements RegionObserver, RegionCoprocessor { 099 100 @Override 101 public Optional<RegionObserver> getRegionObserver() { 102 return Optional.of(this); 103 } 104 105 @Override 106 public void preGetOp(ObserverContext<? extends RegionCoprocessorEnvironment> c, Get get, 107 List<Cell> result) throws IOException { 108 RpcCall rpcCall = RpcServer.getCurrentCall().get(); 109 for (Map.Entry<String, byte[]> attr : rpcCall.getConnectionAttributes().entrySet()) { 110 result.add(c.getEnvironment().getCellBuilder().clear().setRow(get.getRow()) 111 .setFamily(FAMILY).setQualifier(Bytes.toBytes(attr.getKey())).setValue(attr.getValue()) 112 .setType(Cell.Type.Put).setTimestamp(1).build()); 113 } 114 result.sort(CellComparator.getInstance()); 115 c.bypass(); 116 } 117 } 118}