001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.mapreduce;
019
020import static org.junit.Assert.assertEquals;
021
022import java.io.IOException;
023import java.util.ArrayList;
024import java.util.List;
025
026import org.apache.hadoop.conf.Configuration;
027import org.apache.hadoop.hbase.HBaseClassTestRule;
028import org.apache.hadoop.hbase.HBaseTestingUtility;
029import org.apache.hadoop.hbase.HConstants;
030import org.apache.hadoop.hbase.HTestConst;
031import org.apache.hadoop.hbase.KeyValue;
032import org.apache.hadoop.hbase.TableName;
033import org.apache.hadoop.hbase.client.Connection;
034import org.apache.hadoop.hbase.client.ConnectionFactory;
035import org.apache.hadoop.hbase.client.Put;
036import org.apache.hadoop.hbase.client.Scan;
037import org.apache.hadoop.hbase.client.Table;
038import org.apache.hadoop.hbase.regionserver.StoreScanner;
039import org.apache.hadoop.hbase.testclassification.MediumTests;
040import org.apache.hadoop.hbase.util.Bytes;
041import org.junit.AfterClass;
042import org.junit.BeforeClass;
043import org.junit.ClassRule;
044import org.junit.Test;
045import org.junit.experimental.categories.Category;
046
047@Category(MediumTests.class)
048public class TestTableRecordReader {
049  private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
050
051  @ClassRule
052  public static final HBaseClassTestRule CLASS_RULE =
053      HBaseClassTestRule.forClass(TestTableRecordReader.class);
054
055  private static TableName TABLE_NAME = TableName.valueOf("TestTableRecordReader");
056
057  private static int NUM_ROWS = 5;
058  private static byte[] ROW = Bytes.toBytes("testRow");
059  private static byte[][] ROWS = HTestConst.makeNAscii(ROW, NUM_ROWS);
060
061  private static int NUM_FAMILIES = 2;
062  private static byte[] FAMILY = Bytes.toBytes("testFamily");
063  private static byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, NUM_FAMILIES);
064
065  private static int NUM_QUALIFIERS = 2;
066  private static byte[] QUALIFIER = Bytes.toBytes("testQualifier");
067  private static byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, NUM_QUALIFIERS);
068
069  private static int VALUE_SIZE = 10;
070  private static byte[] VALUE = Bytes.createMaxByteArray(VALUE_SIZE);
071
072  private static final int TIMEOUT = 4000;
073
074  @BeforeClass
075  public static void setUpBeforeClass() throws Exception {
076    Configuration conf = TEST_UTIL.getConfiguration();
077
078    conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, TIMEOUT);
079    conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, TIMEOUT);
080
081    // Check the timeout condition after every cell
082    conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 1);
083    TEST_UTIL.startMiniCluster(1);
084
085    createTestTable(TABLE_NAME, ROWS, FAMILIES, QUALIFIERS, VALUE);
086  }
087
088  private static void createTestTable(TableName name, byte[][] rows, byte[][] families,
089      byte[][] qualifiers, byte[] cellValue) throws IOException {
090    TEST_UTIL.createTable(name, families).put(createPuts(rows, families, qualifiers, cellValue));
091  }
092
093  private static List<Put> createPuts(byte[][] rows, byte[][] families, byte[][] qualifiers,
094      byte[] value) throws IOException {
095    List<Put> puts = new ArrayList<>();
096    for (int row = 0; row < rows.length; row++) {
097      Put put = new Put(rows[row]);
098      for (int fam = 0; fam < families.length; fam++) {
099        for (int qual = 0; qual < qualifiers.length; qual++) {
100          KeyValue kv = new KeyValue(rows[row], families[fam], qualifiers[qual], qual, value);
101          put.add(kv);
102        }
103      }
104      puts.add(put);
105    }
106    return puts;
107  }
108
109  @AfterClass
110  public static void tearDownAfterClass() throws Exception {
111    TEST_UTIL.shutdownMiniCluster();
112  }
113
114  @Test
115  public void test() throws Exception {
116    try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
117        Table table = conn.getTable(TABLE_NAME)) {
118      org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl trr =
119          new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl();
120      Scan scan =
121          new Scan().setMaxResultSize(1).setCaching(Integer.MAX_VALUE).setNeedCursorResult(true);
122      trr.setScan(scan);
123      trr.setHTable(table);
124      trr.initialize(null, null);
125      int num = 0;
126      while (trr.nextKeyValue()) {
127        num++;
128      }
129      assertEquals(NUM_ROWS * NUM_FAMILIES * NUM_QUALIFIERS, num);
130    }
131  }
132}