001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static junit.framework.Assert.assertEquals; 021 022import java.io.IOException; 023import java.util.ArrayList; 024import java.util.Iterator; 025import java.util.List; 026import org.apache.hadoop.hbase.HBaseClassTestRule; 027import org.apache.hadoop.hbase.HBaseTestingUtility; 028import org.apache.hadoop.hbase.HColumnDescriptor; 029import org.apache.hadoop.hbase.HTableDescriptor; 030import org.apache.hadoop.hbase.TableExistsException; 031import org.apache.hadoop.hbase.TableName; 032import org.apache.hadoop.hbase.TableNotFoundException; 033import org.apache.hadoop.hbase.client.Admin; 034import org.apache.hadoop.hbase.client.Durability; 035import org.apache.hadoop.hbase.client.Put; 036import org.apache.hadoop.hbase.client.Result; 037import org.apache.hadoop.hbase.client.ResultScanner; 038import org.apache.hadoop.hbase.client.Scan; 039import org.apache.hadoop.hbase.client.Table; 040import org.apache.hadoop.hbase.filter.BinaryComparator; 041import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; 042import org.apache.hadoop.hbase.filter.Filter; 043import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; 044import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; 045import org.apache.hadoop.hbase.testclassification.MediumTests; 046import org.apache.hadoop.hbase.testclassification.RegionServerTests; 047import org.apache.hadoop.hbase.util.Bytes; 048import org.junit.AfterClass; 049import org.junit.BeforeClass; 050import org.junit.ClassRule; 051import org.junit.Test; 052import org.junit.experimental.categories.Category; 053 054@Category({RegionServerTests.class, MediumTests.class}) 055/* 056 * This test verifies that the scenarios illustrated by HBASE-10850 work 057 * w.r.t. essential column family optimization 058 */ 059public class TestSCVFWithMiniCluster { 060 061 @ClassRule 062 public static final HBaseClassTestRule CLASS_RULE = 063 HBaseClassTestRule.forClass(TestSCVFWithMiniCluster.class); 064 065 private static final TableName HBASE_TABLE_NAME = TableName.valueOf("TestSCVFWithMiniCluster"); 066 067 private static final byte[] FAMILY_A = Bytes.toBytes("a"); 068 private static final byte[] FAMILY_B = Bytes.toBytes("b"); 069 070 private static final byte[] QUALIFIER_FOO = Bytes.toBytes("foo"); 071 private static final byte[] QUALIFIER_BAR = Bytes.toBytes("bar"); 072 073 private static Table htable; 074 075 private static Filter scanFilter; 076 077 private int expected = 1; 078 079 @BeforeClass 080 public static void setUp() throws Exception { 081 HBaseTestingUtility util = new HBaseTestingUtility(); 082 083 util.startMiniCluster(1); 084 085 Admin admin = util.getAdmin(); 086 destroy(admin, HBASE_TABLE_NAME); 087 create(admin, HBASE_TABLE_NAME, FAMILY_A, FAMILY_B); 088 admin.close(); 089 htable = util.getConnection().getTable(HBASE_TABLE_NAME); 090 091 /* Add some values */ 092 List<Put> puts = new ArrayList<>(); 093 094 /* Add a row with 'a:foo' = false */ 095 Put put = new Put(Bytes.toBytes("1")); 096 put.setDurability(Durability.SKIP_WAL); 097 put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("false")); 098 put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 099 put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_")); 100 put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 101 puts.add(put); 102 103 /* Add a row with 'a:foo' = true */ 104 put = new Put(Bytes.toBytes("2")); 105 put.setDurability(Durability.SKIP_WAL); 106 put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("true")); 107 put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 108 put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_")); 109 put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 110 puts.add(put); 111 112 /* Add a row with 'a:foo' qualifier not set */ 113 put = new Put(Bytes.toBytes("3")); 114 put.setDurability(Durability.SKIP_WAL); 115 put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 116 put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_")); 117 put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_")); 118 puts.add(put); 119 120 htable.put(puts); 121 /* 122 * We want to filter out from the scan all rows that do not have the column 'a:foo' with value 123 * 'false'. Only row with key '1' should be returned in the scan. 124 */ 125 scanFilter = new SingleColumnValueFilter(FAMILY_A, QUALIFIER_FOO, CompareOp.EQUAL, 126 new BinaryComparator(Bytes.toBytes("false"))); 127 ((SingleColumnValueFilter) scanFilter).setFilterIfMissing(true); 128 } 129 130 @AfterClass 131 public static void tearDown() throws Exception { 132 htable.close(); 133 } 134 135 private void verify(Scan scan) throws IOException { 136 ResultScanner scanner = htable.getScanner(scan); 137 Iterator<Result> it = scanner.iterator(); 138 139 /* Then */ 140 int count = 0; 141 try { 142 while (it.hasNext()) { 143 it.next(); 144 count++; 145 } 146 } finally { 147 scanner.close(); 148 } 149 assertEquals(expected, count); 150 } 151 /** 152 * Test the filter by adding all columns of family A in the scan. (OK) 153 */ 154 @Test 155 public void scanWithAllQualifiersOfFamiliyA() throws IOException { 156 /* Given */ 157 Scan scan = new Scan(); 158 scan.addFamily(FAMILY_A); 159 scan.setFilter(scanFilter); 160 161 verify(scan); 162 } 163 164 /** 165 * Test the filter by adding all columns of family A and B in the scan. (KO: row '3' without 166 * 'a:foo' qualifier is returned) 167 */ 168 @Test 169 public void scanWithAllQualifiersOfBothFamilies() throws IOException { 170 /* When */ 171 Scan scan = new Scan(); 172 scan.setFilter(scanFilter); 173 174 verify(scan); 175 } 176 177 /** 178 * Test the filter by adding 2 columns of family A and 1 column of family B in the scan. (KO: row 179 * '3' without 'a:foo' qualifier is returned) 180 */ 181 @Test 182 public void scanWithSpecificQualifiers1() throws IOException { 183 /* When */ 184 Scan scan = new Scan(); 185 scan.addColumn(FAMILY_A, QUALIFIER_FOO); 186 scan.addColumn(FAMILY_A, QUALIFIER_BAR); 187 scan.addColumn(FAMILY_B, QUALIFIER_BAR); 188 scan.addColumn(FAMILY_B, QUALIFIER_FOO); 189 scan.setFilter(scanFilter); 190 191 verify(scan); 192 } 193 194 /** 195 * Test the filter by adding 1 column of family A (the one used in the filter) and 1 column of 196 * family B in the scan. (OK) 197 */ 198 @Test 199 public void scanWithSpecificQualifiers2() throws IOException { 200 /* When */ 201 Scan scan = new Scan(); 202 scan.addColumn(FAMILY_A, QUALIFIER_FOO); 203 scan.addColumn(FAMILY_B, QUALIFIER_BAR); 204 scan.setFilter(scanFilter); 205 206 verify(scan); 207 } 208 209 /** 210 * Test the filter by adding 2 columns of family A in the scan. (OK) 211 */ 212 @Test 213 public void scanWithSpecificQualifiers3() throws IOException { 214 /* When */ 215 Scan scan = new Scan(); 216 scan.addColumn(FAMILY_A, QUALIFIER_FOO); 217 scan.addColumn(FAMILY_A, QUALIFIER_BAR); 218 scan.setFilter(scanFilter); 219 220 verify(scan); 221 } 222 223 private static void create(Admin admin, TableName tableName, byte[]... families) 224 throws IOException { 225 HTableDescriptor desc = new HTableDescriptor(tableName); 226 for (byte[] family : families) { 227 HColumnDescriptor colDesc = new HColumnDescriptor(family); 228 colDesc.setMaxVersions(1); 229 colDesc.setCompressionType(Algorithm.GZ); 230 desc.addFamily(colDesc); 231 } 232 try { 233 admin.createTable(desc); 234 } catch (TableExistsException tee) { 235 /* Ignore */ 236 } 237 } 238 239 private static void destroy(Admin admin, TableName tableName) throws IOException { 240 try { 241 admin.disableTable(tableName); 242 admin.deleteTable(tableName); 243 } catch (TableNotFoundException tnfe) { 244 /* Ignore */ 245 } 246 } 247}