001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.filter;
019
020import static org.junit.Assert.*;
021
022import java.io.IOException;
023import java.util.ArrayList;
024import java.util.List;
025import org.apache.hadoop.conf.Configuration;
026import org.apache.hadoop.hbase.Cell;
027import org.apache.hadoop.hbase.CellUtil;
028import org.apache.hadoop.hbase.CompareOperator;
029import org.apache.hadoop.hbase.HBaseClassTestRule;
030import org.apache.hadoop.hbase.HBaseConfiguration;
031import org.apache.hadoop.hbase.HBaseTestingUtility;
032import org.apache.hadoop.hbase.HColumnDescriptor;
033import org.apache.hadoop.hbase.HConstants;
034import org.apache.hadoop.hbase.HTableDescriptor;
035import org.apache.hadoop.hbase.MasterNotRunningException;
036import org.apache.hadoop.hbase.TableName;
037import org.apache.hadoop.hbase.ZooKeeperConnectionException;
038import org.apache.hadoop.hbase.client.Admin;
039import org.apache.hadoop.hbase.client.Connection;
040import org.apache.hadoop.hbase.client.ConnectionFactory;
041import org.apache.hadoop.hbase.client.Put;
042import org.apache.hadoop.hbase.client.Result;
043import org.apache.hadoop.hbase.client.ResultScanner;
044import org.apache.hadoop.hbase.client.Scan;
045import org.apache.hadoop.hbase.client.Table;
046import org.apache.hadoop.hbase.testclassification.FilterTests;
047import org.apache.hadoop.hbase.testclassification.MediumTests;
048import org.apache.hadoop.hbase.util.Bytes;
049import org.junit.AfterClass;
050import org.junit.BeforeClass;
051import org.junit.ClassRule;
052import org.junit.Test;
053import org.junit.experimental.categories.Category;
054import org.slf4j.Logger;
055import org.slf4j.LoggerFactory;
056
057/**
058 * Test if the FilterWrapper retains the same semantics defined in the
059 * {@link org.apache.hadoop.hbase.filter.Filter}
060 */
061@Category({FilterTests.class, MediumTests.class})
062public class TestFilterWrapper {
063
064  @ClassRule
065  public static final HBaseClassTestRule CLASS_RULE =
066      HBaseClassTestRule.forClass(TestFilterWrapper.class);
067
068  private static final Logger LOG = LoggerFactory.getLogger(TestFilterWrapper.class);
069
070  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
071  private static Configuration conf = null;
072  private static Admin admin = null;
073  private static TableName name = TableName.valueOf("test");
074  private static Connection connection;
075
076  @Test
077  public void testFilterWrapper() {
078    int kv_number = 0;
079    int row_number = 0;
080    try {
081      Scan scan = new Scan();
082      List<Filter> fs = new ArrayList<>();
083
084      DependentColumnFilter f1 = new DependentColumnFilter(Bytes.toBytes("f1"),
085          Bytes.toBytes("c5"), true, CompareOperator.EQUAL,
086          new SubstringComparator("c5"));
087      PageFilter f2 = new PageFilter(2);
088      fs.add(f1);
089      fs.add(f2);
090      FilterList filter = new FilterList(fs);
091
092      scan.setFilter(filter);
093      Table table = connection.getTable(name);
094      ResultScanner scanner = table.getScanner(scan);
095
096      // row2 (c1-c4) and row3(c1-c4) are returned
097      for (Result result : scanner) {
098        row_number++;
099        for (Cell kv : result.listCells()) {
100          LOG.debug(kv_number + ". kv: " + kv);
101          kv_number++;
102          assertEquals("Returned row is not correct", new String(CellUtil.cloneRow(kv)),
103              "row" + ( row_number + 1 ));
104        }
105      }
106
107      scanner.close();
108      table.close();
109    } catch (Exception e) {
110      // no correct result is expected
111      assertNull("Exception happens in scan", e);
112    }
113    LOG.debug("check the fetched kv number");
114    assertEquals("We should get 8 results returned.", 8, kv_number);
115    assertEquals("We should get 2 rows returned", 2, row_number);
116  }
117
118  private static void prepareData() {
119    try {
120      Table table = connection.getTable(name);
121      assertTrue("Fail to create the table", admin.tableExists(name));
122      List<Put> puts = new ArrayList<>();
123
124      // row1 => <f1:c1, 1_c1, ts=1>, <f1:c2, 1_c2, ts=2>, <f1:c3, 1_c3,ts=3>,
125      // <f1:c4,1_c4, ts=4>, <f1:c5, 1_c5, ts=5>
126      // row2 => <f1:c1, 2_c1, ts=2>, <f1,c2, 2_c2, ts=2>, <f1:c3, 2_c3,ts=2>,
127      // <f1:c4,2_c4, ts=2>, <f1:c5, 2_c5, ts=2>
128      // row3 => <f1:c1, 3_c1, ts=3>, <f1:c2, 3_c2, ts=3>, <f1:c3, 3_c3,ts=2>,
129      // <f1:c4,3_c4, ts=3>, <f1:c5, 3_c5, ts=3>
130      for (int i = 1; i < 4; i++) {
131        Put put = new Put(Bytes.toBytes("row" + i));
132        for (int j = 1; j < 6; j++) {
133          long timestamp = j;
134          if (i != 1)
135            timestamp = i;
136          put.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), timestamp,
137                  Bytes.toBytes(i + "_c" + j));
138        }
139        puts.add(put);
140      }
141
142      table.put(puts);
143      table.close();
144    } catch (IOException e) {
145      assertNull("Exception found while putting data into table", e);
146    }
147  }
148
149  private static void createTable() {
150    assertNotNull("HBaseAdmin is not initialized successfully.", admin);
151    if (admin != null) {
152
153      HTableDescriptor desc = new HTableDescriptor(name);
154      HColumnDescriptor coldef = new HColumnDescriptor(Bytes.toBytes("f1"));
155      desc.addFamily(coldef);
156
157      try {
158        admin.createTable(desc);
159        assertTrue("Fail to create the table", admin.tableExists(name));
160      } catch (IOException e) {
161        assertNull("Exception found while creating table", e);
162      }
163
164    }
165  }
166
167  private static void deleteTable() {
168    if (admin != null) {
169      try {
170        admin.disableTable(name);
171        admin.deleteTable(name);
172      } catch (IOException e) {
173        assertNull("Exception found deleting the table", e);
174      }
175    }
176  }
177
178  private static void initialize(Configuration conf) {
179    TestFilterWrapper.conf = HBaseConfiguration.create(conf);
180    TestFilterWrapper.conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
181    try {
182      connection = ConnectionFactory.createConnection(TestFilterWrapper.conf);
183      admin = TEST_UTIL.getAdmin();
184    } catch (MasterNotRunningException e) {
185      assertNull("Master is not running", e);
186    } catch (ZooKeeperConnectionException e) {
187      assertNull("Cannot connect to ZooKeeper", e);
188    } catch (IOException e) {
189      assertNull("Caught IOException", e);
190    }
191    createTable();
192    prepareData();
193  }
194
195  @BeforeClass
196  public static void setUp() throws Exception {
197    TEST_UTIL.startMiniCluster(1);
198    initialize(TEST_UTIL.getConfiguration());
199  }
200
201  @AfterClass
202  public static void tearDown() throws Exception {
203    deleteTable();
204    connection.close();
205    TEST_UTIL.shutdownMiniCluster();
206  }
207
208}