001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.filter;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertNotNull;
022import static org.junit.Assert.assertNull;
023import static org.junit.Assert.assertTrue;
024
025import java.io.IOException;
026import java.util.ArrayList;
027import java.util.List;
028import org.apache.hadoop.conf.Configuration;
029import org.apache.hadoop.hbase.Cell;
030import org.apache.hadoop.hbase.CellUtil;
031import org.apache.hadoop.hbase.CompareOperator;
032import org.apache.hadoop.hbase.HBaseClassTestRule;
033import org.apache.hadoop.hbase.HBaseConfiguration;
034import org.apache.hadoop.hbase.HBaseTestingUtil;
035import org.apache.hadoop.hbase.HConstants;
036import org.apache.hadoop.hbase.MasterNotRunningException;
037import org.apache.hadoop.hbase.TableName;
038import org.apache.hadoop.hbase.ZooKeeperConnectionException;
039import org.apache.hadoop.hbase.client.Admin;
040import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
041import org.apache.hadoop.hbase.client.Connection;
042import org.apache.hadoop.hbase.client.ConnectionFactory;
043import org.apache.hadoop.hbase.client.Put;
044import org.apache.hadoop.hbase.client.Result;
045import org.apache.hadoop.hbase.client.ResultScanner;
046import org.apache.hadoop.hbase.client.Scan;
047import org.apache.hadoop.hbase.client.Table;
048import org.apache.hadoop.hbase.client.TableDescriptor;
049import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
050import org.apache.hadoop.hbase.testclassification.FilterTests;
051import org.apache.hadoop.hbase.testclassification.MediumTests;
052import org.apache.hadoop.hbase.util.Bytes;
053import org.junit.AfterClass;
054import org.junit.BeforeClass;
055import org.junit.ClassRule;
056import org.junit.Test;
057import org.junit.experimental.categories.Category;
058import org.slf4j.Logger;
059import org.slf4j.LoggerFactory;
060
061/**
062 * Test if the FilterWrapper retains the same semantics defined in the
063 * {@link org.apache.hadoop.hbase.filter.Filter}
064 */
065@Category({ FilterTests.class, MediumTests.class })
066public class TestFilterWrapper {
067
068  @ClassRule
069  public static final HBaseClassTestRule CLASS_RULE =
070    HBaseClassTestRule.forClass(TestFilterWrapper.class);
071
072  private static final Logger LOG = LoggerFactory.getLogger(TestFilterWrapper.class);
073
074  private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
075  private static Configuration conf = null;
076  private static Admin admin = null;
077  private static TableName name = TableName.valueOf("test");
078  private static Connection connection;
079
080  @Test
081  public void testFilterWrapper() {
082    int kv_number = 0;
083    int row_number = 0;
084    try {
085      Scan scan = new Scan();
086      List<Filter> fs = new ArrayList<>();
087
088      DependentColumnFilter f1 = new DependentColumnFilter(Bytes.toBytes("f1"), Bytes.toBytes("c5"),
089        true, CompareOperator.EQUAL, new SubstringComparator("c5"));
090      PageFilter f2 = new PageFilter(2);
091      fs.add(f1);
092      fs.add(f2);
093      FilterList filter = new FilterList(fs);
094
095      scan.setFilter(filter);
096      Table table = connection.getTable(name);
097      ResultScanner scanner = table.getScanner(scan);
098
099      // row2 (c1-c4) and row3(c1-c4) are returned
100      for (Result result : scanner) {
101        row_number++;
102        for (Cell kv : result.listCells()) {
103          LOG.debug(kv_number + ". kv: " + kv);
104          kv_number++;
105          assertEquals("Returned row is not correct", Bytes.toString(CellUtil.cloneRow(kv)),
106            "row" + (row_number + 1));
107        }
108      }
109
110      scanner.close();
111      table.close();
112    } catch (Exception e) {
113      // no correct result is expected
114      assertNull("Exception happens in scan", e);
115    }
116    LOG.debug("check the fetched kv number");
117    assertEquals("We should get 8 results returned.", 8, kv_number);
118    assertEquals("We should get 2 rows returned", 2, row_number);
119  }
120
121  private static void prepareData() {
122    try {
123      Table table = connection.getTable(name);
124      assertTrue("Fail to create the table", admin.tableExists(name));
125      List<Put> puts = new ArrayList<>();
126
127      // row1 => <f1:c1, 1_c1, ts=1>, <f1:c2, 1_c2, ts=2>, <f1:c3, 1_c3,ts=3>,
128      // <f1:c4,1_c4, ts=4>, <f1:c5, 1_c5, ts=5>
129      // row2 => <f1:c1, 2_c1, ts=2>, <f1,c2, 2_c2, ts=2>, <f1:c3, 2_c3,ts=2>,
130      // <f1:c4,2_c4, ts=2>, <f1:c5, 2_c5, ts=2>
131      // row3 => <f1:c1, 3_c1, ts=3>, <f1:c2, 3_c2, ts=3>, <f1:c3, 3_c3,ts=2>,
132      // <f1:c4,3_c4, ts=3>, <f1:c5, 3_c5, ts=3>
133      for (int i = 1; i < 4; i++) {
134        Put put = new Put(Bytes.toBytes("row" + i));
135        for (int j = 1; j < 6; j++) {
136          long timestamp = j;
137          if (i != 1) timestamp = i;
138          put.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), timestamp,
139            Bytes.toBytes(i + "_c" + j));
140        }
141        puts.add(put);
142      }
143
144      table.put(puts);
145      table.close();
146    } catch (IOException e) {
147      assertNull("Exception found while putting data into table", e);
148    }
149  }
150
151  private static void createTable() {
152    assertNotNull("HBaseAdmin is not initialized successfully.", admin);
153    if (admin != null) {
154      TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(name)
155        .setColumnFamily(ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("f1"))).build();
156
157      try {
158        admin.createTable(tableDescriptor);
159        assertTrue("Fail to create the table", admin.tableExists(name));
160      } catch (IOException e) {
161        assertNull("Exception found while creating table", e);
162      }
163    }
164  }
165
166  private static void deleteTable() {
167    if (admin != null) {
168      try {
169        admin.disableTable(name);
170        admin.deleteTable(name);
171      } catch (IOException e) {
172        assertNull("Exception found deleting the table", e);
173      }
174    }
175  }
176
177  private static void initialize(Configuration conf) {
178    TestFilterWrapper.conf = HBaseConfiguration.create(conf);
179    TestFilterWrapper.conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
180    try {
181      connection = ConnectionFactory.createConnection(TestFilterWrapper.conf);
182      admin = TEST_UTIL.getAdmin();
183    } catch (MasterNotRunningException e) {
184      assertNull("Master is not running", e);
185    } catch (ZooKeeperConnectionException e) {
186      assertNull("Cannot connect to ZooKeeper", e);
187    } catch (IOException e) {
188      assertNull("Caught IOException", e);
189    }
190    createTable();
191    prepareData();
192  }
193
194  @BeforeClass
195  public static void setUp() throws Exception {
196    TEST_UTIL.startMiniCluster(1);
197    initialize(TEST_UTIL.getConfiguration());
198  }
199
200  @AfterClass
201  public static void tearDown() throws Exception {
202    deleteTable();
203    connection.close();
204    TEST_UTIL.shutdownMiniCluster();
205  }
206
207}