001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.mapreduce; 019 020import static org.junit.jupiter.api.Assertions.assertEquals; 021 022import java.io.IOException; 023import java.util.ArrayList; 024import java.util.List; 025import org.apache.hadoop.conf.Configuration; 026import org.apache.hadoop.hbase.HBaseTestingUtil; 027import org.apache.hadoop.hbase.HConstants; 028import org.apache.hadoop.hbase.HTestConst; 029import org.apache.hadoop.hbase.KeyValue; 030import org.apache.hadoop.hbase.TableName; 031import org.apache.hadoop.hbase.client.Connection; 032import org.apache.hadoop.hbase.client.ConnectionFactory; 033import org.apache.hadoop.hbase.client.Put; 034import org.apache.hadoop.hbase.client.Scan; 035import org.apache.hadoop.hbase.client.Table; 036import org.apache.hadoop.hbase.regionserver.StoreScanner; 037import org.apache.hadoop.hbase.testclassification.MediumTests; 038import org.apache.hadoop.hbase.util.Bytes; 039import org.junit.jupiter.api.AfterAll; 040import org.junit.jupiter.api.BeforeAll; 041import org.junit.jupiter.api.Tag; 042import org.junit.jupiter.api.Test; 043 044@Tag(MediumTests.TAG) 045public class TestTableRecordReader { 046 private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 047 048 private static TableName TABLE_NAME = TableName.valueOf("TestTableRecordReader"); 049 050 private static int NUM_ROWS = 5; 051 private static byte[] ROW = Bytes.toBytes("testRow"); 052 private static byte[][] ROWS = HTestConst.makeNAscii(ROW, NUM_ROWS); 053 054 private static int NUM_FAMILIES = 2; 055 private static byte[] FAMILY = Bytes.toBytes("testFamily"); 056 private static byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, NUM_FAMILIES); 057 058 private static int NUM_QUALIFIERS = 2; 059 private static byte[] QUALIFIER = Bytes.toBytes("testQualifier"); 060 private static byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, NUM_QUALIFIERS); 061 062 private static int VALUE_SIZE = 10; 063 private static byte[] VALUE = Bytes.createMaxByteArray(VALUE_SIZE); 064 065 private static final int TIMEOUT = 4000; 066 067 @BeforeAll 068 public static void setUpBeforeClass() throws Exception { 069 Configuration conf = TEST_UTIL.getConfiguration(); 070 071 conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, TIMEOUT); 072 conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, TIMEOUT); 073 074 // Check the timeout condition after every cell 075 conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 1); 076 TEST_UTIL.startMiniCluster(1); 077 078 createTestTable(TABLE_NAME, ROWS, FAMILIES, QUALIFIERS, VALUE); 079 } 080 081 private static void createTestTable(TableName name, byte[][] rows, byte[][] families, 082 byte[][] qualifiers, byte[] cellValue) throws IOException { 083 TEST_UTIL.createTable(name, families).put(createPuts(rows, families, qualifiers, cellValue)); 084 } 085 086 private static List<Put> createPuts(byte[][] rows, byte[][] families, byte[][] qualifiers, 087 byte[] value) throws IOException { 088 List<Put> puts = new ArrayList<>(); 089 for (int row = 0; row < rows.length; row++) { 090 Put put = new Put(rows[row]); 091 for (int fam = 0; fam < families.length; fam++) { 092 for (int qual = 0; qual < qualifiers.length; qual++) { 093 KeyValue kv = new KeyValue(rows[row], families[fam], qualifiers[qual], qual, value); 094 put.add(kv); 095 } 096 } 097 puts.add(put); 098 } 099 return puts; 100 } 101 102 @AfterAll 103 public static void tearDownAfterClass() throws Exception { 104 TEST_UTIL.shutdownMiniCluster(); 105 } 106 107 @Test 108 public void test() throws Exception { 109 try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration()); 110 Table table = conn.getTable(TABLE_NAME)) { 111 org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl trr = 112 new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl(); 113 Scan scan = 114 new Scan().setMaxResultSize(1).setCaching(Integer.MAX_VALUE).setNeedCursorResult(true); 115 trr.setScan(scan); 116 trr.setHTable(table); 117 trr.initialize(null, null); 118 int num = 0; 119 while (trr.nextKeyValue()) { 120 num++; 121 } 122 assertEquals(NUM_ROWS * NUM_FAMILIES * NUM_QUALIFIERS, num); 123 } 124 } 125}