001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.test; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertTrue; 022 023import java.io.IOException; 024import java.security.PrivilegedExceptionAction; 025import java.util.List; 026import java.util.Random; 027import java.util.concurrent.ThreadLocalRandom; 028import org.apache.hadoop.conf.Configuration; 029import org.apache.hadoop.fs.Path; 030import org.apache.hadoop.hbase.HBaseConfiguration; 031import org.apache.hadoop.hbase.HConstants; 032import org.apache.hadoop.hbase.IntegrationTestingUtility; 033import org.apache.hadoop.hbase.client.Admin; 034import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 035import org.apache.hadoop.hbase.client.Connection; 036import org.apache.hadoop.hbase.client.ConnectionFactory; 037import org.apache.hadoop.hbase.client.Put; 038import org.apache.hadoop.hbase.client.Result; 039import org.apache.hadoop.hbase.client.Scan; 040import org.apache.hadoop.hbase.client.TableDescriptor; 041import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 042import org.apache.hadoop.hbase.io.ImmutableBytesWritable; 043import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; 044import org.apache.hadoop.hbase.mapreduce.TableMapper; 045import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl; 046import org.apache.hadoop.hbase.security.User; 047import org.apache.hadoop.hbase.security.visibility.Authorizations; 048import org.apache.hadoop.hbase.security.visibility.CellVisibility; 049import org.apache.hadoop.hbase.security.visibility.VisibilityClient; 050import org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil; 051import org.apache.hadoop.hbase.testclassification.IntegrationTests; 052import org.apache.hadoop.hbase.util.AbstractHBaseTool; 053import org.apache.hadoop.hbase.util.Bytes; 054import org.apache.hadoop.io.BytesWritable; 055import org.apache.hadoop.io.NullWritable; 056import org.apache.hadoop.mapreduce.Counter; 057import org.apache.hadoop.mapreduce.Job; 058import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 059import org.apache.hadoop.util.ToolRunner; 060import org.junit.experimental.categories.Category; 061 062import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; 063 064/** 065 * A large test which loads a lot of data with cell visibility, and verifies the data. Test adds 2 066 * users with different sets of visibility labels authenticated for them. Every row (so cells in 067 * that) added with visibility expressions. In load step, 200 map tasks are launched, which in turn 068 * write loadmapper.num_to_write (default 100K) rows to an hbase table. Rows are written in blocks, 069 * for a total of 100 blocks. Verify step scans the table as both users with Authorizations. This 070 * step asserts that user can see only those rows (and so cells) with visibility for which they have 071 * label auth. This class can be run as a unit test, as an integration test, or from the command 072 * line. Originally taken from Apache Bigtop. Issue user names as comma seperated list. ./hbase 073 * IntegrationTestWithCellVisibilityLoadAndVerify -u usera,userb 074 */ 075@Category(IntegrationTests.class) 076public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationTestLoadAndVerify { 077 private static final String ERROR_STR = 078 "Two user names are to be specified seperated by a ',' like 'usera,userb'"; 079 private static final char NOT = '!'; 080 private static final char OR = '|'; 081 private static final char AND = '&'; 082 private static final String TEST_NAME = "IntegrationTestCellVisibilityLoadAndVerify"; 083 private static final String CONFIDENTIAL = "confidential"; 084 private static final String TOPSECRET = "topsecret"; 085 private static final String SECRET = "secret"; 086 private static final String PUBLIC = "public"; 087 private static final String PRIVATE = "private"; 088 private static final String[] LABELS = { CONFIDENTIAL, TOPSECRET, SECRET, PRIVATE, PUBLIC }; 089 private static final String[] VISIBILITY_EXPS = 090 { CONFIDENTIAL + AND + TOPSECRET + AND + PRIVATE, CONFIDENTIAL + OR + TOPSECRET, PUBLIC, 091 '(' + SECRET + OR + PRIVATE + ')' + AND + NOT + CONFIDENTIAL }; 092 private static final int VISIBILITY_EXPS_COUNT = VISIBILITY_EXPS.length; 093 private static final byte[] TEST_FAMILY = Bytes.toBytes("f1"); 094 private static final byte[] TEST_QUALIFIER = Bytes.toBytes("q1"); 095 private static final String NUM_TO_WRITE_KEY = "loadmapper.num_to_write"; 096 private static final long NUM_TO_WRITE_DEFAULT = 100 * 1000; 097 private static final int SCANNER_CACHING = 500; 098 private static String USER_OPT = "users"; 099 private static String userNames = "user1,user2"; 100 101 private long numRowsLoadedWithExp1, numRowsLoadedWithExp2, numRowsLoadWithExp3, 102 numRowsLoadWithExp4; 103 private long numRowsReadWithExp1, numRowsReadWithExp2, numRowsReadWithExp3, numRowsReadWithExp4; 104 105 private static User USER1, USER2; 106 107 private enum Counters { 108 ROWS_VIS_EXP_1, 109 ROWS_VIS_EXP_2, 110 ROWS_VIS_EXP_3, 111 ROWS_VIS_EXP_4 112 } 113 114 @Override 115 public void setUpCluster() throws Exception { 116 util = getTestingUtil(null); 117 Configuration conf = util.getConfiguration(); 118 VisibilityTestUtil.enableVisiblityLabels(conf); 119 conf.set("hbase.superuser", User.getCurrent().getName()); 120 conf.setBoolean("dfs.permissions", false); 121 super.setUpCluster(); 122 String[] users = userNames.split(","); 123 if (users.length != 2) { 124 System.err.println(ERROR_STR); 125 throw new IOException(ERROR_STR); 126 } 127 System.out.println(userNames + " " + users[0] + " " + users[1]); 128 USER1 = User.createUserForTesting(conf, users[0], new String[] {}); 129 USER2 = User.createUserForTesting(conf, users[1], new String[] {}); 130 addLabelsAndAuths(); 131 } 132 133 @Override 134 protected void addOptions() { 135 super.addOptions(); 136 addOptWithArg("u", USER_OPT, "User names to be passed"); 137 } 138 139 private void addLabelsAndAuths() throws Exception { 140 try { 141 VisibilityClient.addLabels(util.getConnection(), LABELS); 142 VisibilityClient.setAuths(util.getConnection(), 143 new String[] { CONFIDENTIAL, TOPSECRET, SECRET, PRIVATE }, USER1.getName()); 144 VisibilityClient.setAuths(util.getConnection(), new String[] { PUBLIC }, USER2.getName()); 145 } catch (Throwable t) { 146 throw new IOException(t); 147 } 148 } 149 150 public static class LoadWithCellVisibilityMapper extends LoadMapper { 151 private Counter rowsExp1, rowsExp2, rowsExp3, rowsexp4; 152 153 @Override 154 public void setup(Context context) throws IOException { 155 super.setup(context); 156 rowsExp1 = context.getCounter(Counters.ROWS_VIS_EXP_1); 157 rowsExp2 = context.getCounter(Counters.ROWS_VIS_EXP_2); 158 rowsExp3 = context.getCounter(Counters.ROWS_VIS_EXP_3); 159 rowsexp4 = context.getCounter(Counters.ROWS_VIS_EXP_4); 160 } 161 162 @Override 163 protected void map(NullWritable key, NullWritable value, Context context) 164 throws IOException, InterruptedException { 165 String suffix = "/" + shortTaskId; 166 int BLOCK_SIZE = (int) (recordsToWrite / 100); 167 Random rand = ThreadLocalRandom.current(); 168 for (long i = 0; i < recordsToWrite;) { 169 for (long idx = 0; idx < BLOCK_SIZE && i < recordsToWrite; idx++, i++) { 170 int expIdx = rand.nextInt(VISIBILITY_EXPS_COUNT); 171 String exp = VISIBILITY_EXPS[expIdx]; 172 byte[] row = Bytes.add(Bytes.toBytes(i), Bytes.toBytes(suffix), Bytes.toBytes(exp)); 173 Put p = new Put(row); 174 p.addColumn(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY); 175 p.setCellVisibility(new CellVisibility(exp)); 176 getCounter(expIdx).increment(1); 177 mutator.mutate(p); 178 179 if (i % 100 == 0) { 180 context.setStatus("Written " + i + "/" + recordsToWrite + " records"); 181 context.progress(); 182 } 183 } 184 // End of block, flush all of them before we start writing anything 185 // pointing to these! 186 mutator.flush(); 187 } 188 } 189 190 private Counter getCounter(int idx) { 191 switch (idx) { 192 case 0: 193 return rowsExp1; 194 case 1: 195 return rowsExp2; 196 case 2: 197 return rowsExp3; 198 case 3: 199 return rowsexp4; 200 default: 201 return null; 202 } 203 } 204 } 205 206 public static class VerifyMapper extends TableMapper<BytesWritable, BytesWritable> { 207 private Counter rowsExp1, rowsExp2, rowsExp3, rowsExp4; 208 209 @Override 210 public void setup(Context context) throws IOException { 211 rowsExp1 = context.getCounter(Counters.ROWS_VIS_EXP_1); 212 rowsExp2 = context.getCounter(Counters.ROWS_VIS_EXP_2); 213 rowsExp3 = context.getCounter(Counters.ROWS_VIS_EXP_3); 214 rowsExp4 = context.getCounter(Counters.ROWS_VIS_EXP_4); 215 } 216 217 @Override 218 protected void map(ImmutableBytesWritable key, Result value, Context context) 219 throws IOException, InterruptedException { 220 byte[] row = value.getRow(); 221 Counter c = getCounter(row); 222 c.increment(1); 223 } 224 225 private Counter getCounter(byte[] row) { 226 Counter c = null; 227 if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[0])) != -1) { 228 c = rowsExp1; 229 } else if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[1])) != -1) { 230 c = rowsExp2; 231 } else if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[2])) != -1) { 232 c = rowsExp3; 233 } else if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[3])) != -1) { 234 c = rowsExp4; 235 } 236 return c; 237 } 238 } 239 240 @Override 241 protected Job doLoad(Configuration conf, TableDescriptor htd) throws Exception { 242 Job job = super.doLoad(conf, htd); 243 this.numRowsLoadedWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue(); 244 this.numRowsLoadedWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue(); 245 this.numRowsLoadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue(); 246 this.numRowsLoadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue(); 247 System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[0] + " : " 248 + this.numRowsLoadedWithExp1); 249 System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[1] + " : " 250 + this.numRowsLoadedWithExp2); 251 System.out.println( 252 "Rows loaded with cell visibility " + VISIBILITY_EXPS[2] + " : " + this.numRowsLoadWithExp3); 253 System.out.println( 254 "Rows loaded with cell visibility " + VISIBILITY_EXPS[3] + " : " + this.numRowsLoadWithExp4); 255 return job; 256 } 257 258 @Override 259 protected void setMapperClass(Job job) { 260 job.setMapperClass(LoadWithCellVisibilityMapper.class); 261 } 262 263 @Override 264 protected void doVerify(final Configuration conf, final TableDescriptor tableDescriptor) 265 throws Exception { 266 System.out.println(String.format("Verifying for auths %s, %s, %s, %s", CONFIDENTIAL, TOPSECRET, 267 SECRET, PRIVATE)); 268 PrivilegedExceptionAction<Job> scanAction = new PrivilegedExceptionAction<Job>() { 269 @Override 270 public Job run() throws Exception { 271 return doVerify(conf, tableDescriptor, CONFIDENTIAL, TOPSECRET, SECRET, PRIVATE); 272 } 273 }; 274 Job job = USER1.runAs(scanAction); 275 this.numRowsReadWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue(); 276 this.numRowsReadWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue(); 277 this.numRowsReadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue(); 278 this.numRowsReadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue(); 279 assertEquals(this.numRowsLoadedWithExp1, this.numRowsReadWithExp1); 280 assertEquals(this.numRowsLoadedWithExp2, this.numRowsReadWithExp2); 281 assertEquals(0, this.numRowsReadWithExp3); 282 assertEquals(0, this.numRowsReadWithExp4); 283 284 // PUBLIC label auth is not provided for user1 user. 285 System.out.println(String.format("Verifying for auths %s, %s", PRIVATE, PUBLIC)); 286 scanAction = new PrivilegedExceptionAction<Job>() { 287 @Override 288 public Job run() throws Exception { 289 return doVerify(conf, tableDescriptor, PRIVATE, PUBLIC); 290 } 291 }; 292 job = USER1.runAs(scanAction); 293 this.numRowsReadWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue(); 294 this.numRowsReadWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue(); 295 this.numRowsReadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue(); 296 this.numRowsReadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue(); 297 assertEquals(0, this.numRowsReadWithExp1); 298 assertEquals(0, this.numRowsReadWithExp2); 299 assertEquals(0, this.numRowsReadWithExp3); 300 assertEquals(this.numRowsLoadWithExp4, this.numRowsReadWithExp4); 301 302 // Normal user only having PUBLIC label auth and can view only those cells. 303 System.out.println(String.format("Verifying for auths %s, %s", PRIVATE, PUBLIC)); 304 scanAction = new PrivilegedExceptionAction<Job>() { 305 @Override 306 public Job run() throws Exception { 307 return doVerify(conf, tableDescriptor, PRIVATE, PUBLIC); 308 } 309 }; 310 job = USER2.runAs(scanAction); 311 this.numRowsReadWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue(); 312 this.numRowsReadWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue(); 313 this.numRowsReadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue(); 314 this.numRowsReadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue(); 315 assertEquals(0, this.numRowsReadWithExp1); 316 assertEquals(0, this.numRowsReadWithExp2); 317 assertEquals(this.numRowsLoadWithExp3, this.numRowsReadWithExp3); 318 assertEquals(0, this.numRowsReadWithExp4); 319 } 320 321 private Job doVerify(Configuration conf, TableDescriptor tableDescriptor, String... auths) 322 throws IOException, InterruptedException, ClassNotFoundException { 323 Path outputDir = getTestDir(TEST_NAME, "verify-output"); 324 Job job = new Job(conf); 325 job.setJarByClass(this.getClass()); 326 job.setJobName(TEST_NAME + " Verification for " + tableDescriptor.getTableName()); 327 setJobScannerConf(job); 328 Scan scan = new Scan(); 329 scan.setAuthorizations(new Authorizations(auths)); 330 TableMapReduceUtil.initTableMapperJob(tableDescriptor.getTableName().getNameAsString(), scan, 331 VerifyMapper.class, NullWritable.class, NullWritable.class, job); 332 TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); 333 int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING); 334 TableMapReduceUtil.setScannerCaching(job, scannerCaching); 335 job.setNumReduceTasks(0); 336 FileOutputFormat.setOutputPath(job, outputDir); 337 assertTrue(job.waitForCompletion(true)); 338 return job; 339 } 340 341 private static void setJobScannerConf(Job job) { 342 long lpr = job.getConfiguration().getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT) / 100; 343 job.getConfiguration().setInt(TableRecordReaderImpl.LOG_PER_ROW_COUNT, (int) lpr); 344 } 345 346 @Override 347 public void printUsage() { 348 System.err.println(this.getClass().getSimpleName() + " -u usera,userb [-Doptions]"); 349 System.err.println(" Loads a table with cell visibilities and verifies with Authorizations"); 350 System.err.println("Options"); 351 System.err 352 .println(" -Dloadmapper.table=<name> Table to write/verify (default autogen)"); 353 System.err.println(" -Dloadmapper.num_to_write=<n> " 354 + "Number of rows per mapper (default 100,000 per mapper)"); 355 System.err.println(" -Dloadmapper.numPresplits=<n> " 356 + "Number of presplit regions to start with (default 40)"); 357 System.err 358 .println(" -Dloadmapper.map.tasks=<n> Number of map tasks for load (default 200)"); 359 System.err.println(" -Dverify.scannercaching=<n> " 360 + "Number hbase scanner caching rows to read (default 50)"); 361 } 362 363 @Override 364 public int runTestFromCommandLine() throws Exception { 365 IntegrationTestingUtility.setUseDistributedCluster(getConf()); 366 int numPresplits = getConf().getInt("loadmapper.numPresplits", 5); 367 // create HTableDescriptor for specified table 368 TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(getTablename()) 369 .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)).build(); 370 371 try (Connection conn = ConnectionFactory.createConnection(getConf()); 372 Admin admin = conn.getAdmin()) { 373 admin.createTable(tableDescriptor, Bytes.toBytes(0L), Bytes.toBytes(-1L), numPresplits); 374 } 375 doLoad(getConf(), tableDescriptor); 376 doVerify(getConf(), tableDescriptor); 377 getTestingUtil(getConf()).deleteTable(getTablename()); 378 return 0; 379 } 380 381 @SuppressWarnings("unchecked") 382 @Override 383 protected void processOptions(CommandLine cmd) { 384 List args = cmd.getArgList(); 385 if (args.size() > 0) { 386 printUsage(); 387 throw new RuntimeException("No args expected."); 388 } 389 // We always want loadAndVerify action 390 args.add("loadAndVerify"); 391 if (cmd.hasOption(USER_OPT)) { 392 userNames = cmd.getOptionValue(USER_OPT); 393 } 394 super.processOptions(cmd); 395 } 396 397 public static void main(String argv[]) throws Exception { 398 Configuration conf = HBaseConfiguration.create(); 399 IntegrationTestingUtility.setUseDistributedCluster(conf); 400 int ret = ToolRunner.run(conf, new IntegrationTestWithCellVisibilityLoadAndVerify(), argv); 401 System.exit(ret); 402 } 403}