001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static junit.framework.Assert.assertEquals; 021 022import java.io.IOException; 023import java.util.ArrayList; 024import java.util.Iterator; 025import java.util.List; 026import org.apache.hadoop.hbase.HBaseClassTestRule; 027import org.apache.hadoop.hbase.HBaseTestingUtility; 028import org.apache.hadoop.hbase.HColumnDescriptor; 029import org.apache.hadoop.hbase.HTableDescriptor; 030import org.apache.hadoop.hbase.TableExistsException; 031import org.apache.hadoop.hbase.TableName; 032import org.apache.hadoop.hbase.TableNotFoundException; 033import org.apache.hadoop.hbase.client.Admin; 034import org.apache.hadoop.hbase.client.Durability; 035import org.apache.hadoop.hbase.client.Put; 036import org.apache.hadoop.hbase.client.Result; 037import org.apache.hadoop.hbase.client.ResultScanner; 038import org.apache.hadoop.hbase.client.Scan; 039import org.apache.hadoop.hbase.client.Table; 040import org.apache.hadoop.hbase.filter.BinaryComparator; 041import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; 042import org.apache.hadoop.hbase.filter.Filter; 043import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; 044import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; 045import org.apache.hadoop.hbase.testclassification.MediumTests; 046import org.apache.hadoop.hbase.testclassification.RegionServerTests; 047import org.apache.hadoop.hbase.util.Bytes; 048import org.junit.AfterClass; 049import org.junit.BeforeClass; 050import org.junit.ClassRule; 051import org.junit.Test; 052import org.junit.experimental.categories.Category; 053 054@Category({ RegionServerTests.class, MediumTests.class }) 055/* 056 * This test verifies that the scenarios illustrated by HBASE-10850 work w.r.t. essential column 057 * family optimization 058 */ 059public class TestSCVFWithMiniCluster { 060 061 @ClassRule 062 public static final HBaseClassTestRule CLASS_RULE = 063 HBaseClassTestRule.forClass(TestSCVFWithMiniCluster.class); 064 065 private static final TableName HBASE_TABLE_NAME = TableName.valueOf("TestSCVFWithMiniCluster"); 066 067 private static final byte[] FAMILY_A = Bytes.toBytes("a"); 068 private static final byte[] FAMILY_B = Bytes.toBytes("b"); 069 070 private static final byte[] QUALIFIER_FOO = Bytes.toBytes("foo"); 071 private static final byte[] QUALIFIER_BAR = Bytes.toBytes("bar"); 072 073 private static Table htable; 074 075 private static Filter scanFilter; 076 077 private int expected = 1; 078 079 @BeforeClass 080 public static void setUp() throws Exception { 081 HBaseTestingUtility util = new HBaseTestingUtility(); 082 083 util.startMiniCluster(1); 084 085 Admin admin = util.getAdmin(); 086 destroy(admin, HBASE_TABLE_NAME); 087 create(admin, HBASE_TABLE_NAME, FAMILY_A, FAMILY_B); 088 admin.close(); 089 htable = util.getConnection().getTable(HBASE_TABLE_NAME); 090 091 /* Add some values */ 092 List<Put> puts = new ArrayList<>(); 093 094 /* Add a row with 'a:foo' = false */ 095 Put put = new Put(Bytes.toBytes("1")); 096 put.setDurability(Durability.SKIP_WAL); 097 put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("false")); 098 put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 099 put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_")); 100 put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 101 puts.add(put); 102 103 /* Add a row with 'a:foo' = true */ 104 put = new Put(Bytes.toBytes("2")); 105 put.setDurability(Durability.SKIP_WAL); 106 put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("true")); 107 put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 108 put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_")); 109 put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 110 puts.add(put); 111 112 /* Add a row with 'a:foo' qualifier not set */ 113 put = new Put(Bytes.toBytes("3")); 114 put.setDurability(Durability.SKIP_WAL); 115 put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 116 put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_")); 117 put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 118 puts.add(put); 119 120 htable.put(puts); 121 /* 122 * We want to filter out from the scan all rows that do not have the column 'a:foo' with value 123 * 'false'. Only row with key '1' should be returned in the scan. 124 */ 125 scanFilter = new SingleColumnValueFilter(FAMILY_A, QUALIFIER_FOO, CompareOp.EQUAL, 126 new BinaryComparator(Bytes.toBytes("false"))); 127 ((SingleColumnValueFilter) scanFilter).setFilterIfMissing(true); 128 } 129 130 @AfterClass 131 public static void tearDown() throws Exception { 132 htable.close(); 133 } 134 135 private void verify(Scan scan) throws IOException { 136 ResultScanner scanner = htable.getScanner(scan); 137 Iterator<Result> it = scanner.iterator(); 138 139 /* Then */ 140 int count = 0; 141 try { 142 while (it.hasNext()) { 143 it.next(); 144 count++; 145 } 146 } finally { 147 scanner.close(); 148 } 149 assertEquals(expected, count); 150 } 151 152 /** 153 * Test the filter by adding all columns of family A in the scan. (OK) 154 */ 155 @Test 156 public void scanWithAllQualifiersOfFamiliyA() throws IOException { 157 /* Given */ 158 Scan scan = new Scan(); 159 scan.addFamily(FAMILY_A); 160 scan.setFilter(scanFilter); 161 162 verify(scan); 163 } 164 165 /** 166 * Test the filter by adding all columns of family A and B in the scan. (KO: row '3' without 167 * 'a:foo' qualifier is returned) 168 */ 169 @Test 170 public void scanWithAllQualifiersOfBothFamilies() throws IOException { 171 /* When */ 172 Scan scan = new Scan(); 173 scan.setFilter(scanFilter); 174 175 verify(scan); 176 } 177 178 /** 179 * Test the filter by adding 2 columns of family A and 1 column of family B in the scan. (KO: row 180 * '3' without 'a:foo' qualifier is returned) 181 */ 182 @Test 183 public void scanWithSpecificQualifiers1() throws IOException { 184 /* When */ 185 Scan scan = new Scan(); 186 scan.addColumn(FAMILY_A, QUALIFIER_FOO); 187 scan.addColumn(FAMILY_A, QUALIFIER_BAR); 188 scan.addColumn(FAMILY_B, QUALIFIER_BAR); 189 scan.addColumn(FAMILY_B, QUALIFIER_FOO); 190 scan.setFilter(scanFilter); 191 192 verify(scan); 193 } 194 195 /** 196 * Test the filter by adding 1 column of family A (the one used in the filter) and 1 column of 197 * family B in the scan. (OK) 198 */ 199 @Test 200 public void scanWithSpecificQualifiers2() throws IOException { 201 /* When */ 202 Scan scan = new Scan(); 203 scan.addColumn(FAMILY_A, QUALIFIER_FOO); 204 scan.addColumn(FAMILY_B, QUALIFIER_BAR); 205 scan.setFilter(scanFilter); 206 207 verify(scan); 208 } 209 210 /** 211 * Test the filter by adding 2 columns of family A in the scan. (OK) 212 */ 213 @Test 214 public void scanWithSpecificQualifiers3() throws IOException { 215 /* When */ 216 Scan scan = new Scan(); 217 scan.addColumn(FAMILY_A, QUALIFIER_FOO); 218 scan.addColumn(FAMILY_A, QUALIFIER_BAR); 219 scan.setFilter(scanFilter); 220 221 verify(scan); 222 } 223 224 private static void create(Admin admin, TableName tableName, byte[]... families) 225 throws IOException { 226 HTableDescriptor desc = new HTableDescriptor(tableName); 227 for (byte[] family : families) { 228 HColumnDescriptor colDesc = new HColumnDescriptor(family); 229 colDesc.setMaxVersions(1); 230 colDesc.setCompressionType(Algorithm.GZ); 231 desc.addFamily(colDesc); 232 } 233 try { 234 admin.createTable(desc); 235 } catch (TableExistsException tee) { 236 /* Ignore */ 237 } 238 } 239 240 private static void destroy(Admin admin, TableName tableName) throws IOException { 241 try { 242 admin.disableTable(tableName); 243 admin.deleteTable(tableName); 244 } catch (TableNotFoundException tnfe) { 245 /* Ignore */ 246 } 247 } 248}