001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase; 019 020import static org.junit.jupiter.api.Assertions.assertEquals; 021import static org.junit.jupiter.api.Assertions.assertFalse; 022import static org.junit.jupiter.api.Assertions.assertNotNull; 023import static org.junit.jupiter.api.Assertions.assertThrows; 024import static org.junit.jupiter.api.Assertions.assertTrue; 025import static org.junit.jupiter.api.Assertions.fail; 026 027import com.codahale.metrics.Histogram; 028import com.codahale.metrics.Snapshot; 029import com.codahale.metrics.UniformReservoir; 030import java.io.BufferedReader; 031import java.io.ByteArrayInputStream; 032import java.io.File; 033import java.io.FileWriter; 034import java.io.IOException; 035import java.io.InputStreamReader; 036import java.lang.reflect.Constructor; 037import java.lang.reflect.InvocationTargetException; 038import java.nio.charset.StandardCharsets; 039import java.util.LinkedList; 040import java.util.NoSuchElementException; 041import java.util.Properties; 042import java.util.Queue; 043import org.apache.hadoop.fs.FSDataInputStream; 044import org.apache.hadoop.fs.FileSystem; 045import org.apache.hadoop.fs.Path; 046import org.apache.hadoop.hbase.PerformanceEvaluation.RandomReadTest; 047import org.apache.hadoop.hbase.PerformanceEvaluation.Status; 048import org.apache.hadoop.hbase.PerformanceEvaluation.TestOptions; 049import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; 050import org.apache.hadoop.hbase.client.Connection; 051import org.apache.hadoop.hbase.client.TableDescriptor; 052import org.apache.hadoop.hbase.regionserver.CompactingMemStore; 053import org.apache.hadoop.hbase.testclassification.MiscTests; 054import org.apache.hadoop.hbase.testclassification.SmallTests; 055import org.apache.hadoop.hbase.util.GsonUtil; 056import org.junit.jupiter.api.Tag; 057import org.junit.jupiter.api.Test; 058 059import org.apache.hbase.thirdparty.com.google.gson.Gson; 060 061@Tag(MiscTests.TAG) 062@Tag(SmallTests.TAG) 063public class TestPerformanceEvaluation { 064 065 private static final HBaseTestingUtil HTU = new HBaseTestingUtil(); 066 067 @Test 068 public void testDefaultInMemoryCompaction() { 069 PerformanceEvaluation.TestOptions defaultOpts = new PerformanceEvaluation.TestOptions(); 070 assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT, 071 defaultOpts.getInMemoryCompaction().toString()); 072 TableDescriptor tableDescriptor = PerformanceEvaluation.getTableDescriptor(defaultOpts); 073 for (ColumnFamilyDescriptor familyDescriptor : tableDescriptor.getColumnFamilies()) { 074 assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT, 075 familyDescriptor.getInMemoryCompaction().toString()); 076 } 077 } 078 079 @Test 080 public void testSerialization() { 081 PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions(); 082 assertFalse(options.isAutoFlush()); 083 options.setAutoFlush(true); 084 Gson gson = GsonUtil.createGson().create(); 085 String optionsString = gson.toJson(options); 086 PerformanceEvaluation.TestOptions optionsDeserialized = 087 gson.fromJson(optionsString, PerformanceEvaluation.TestOptions.class); 088 assertTrue(optionsDeserialized.isAutoFlush()); 089 } 090 091 /** 092 * Exercise the mr spec writing. Simple assertions to make sure it is basically working. 093 */ 094 @Test 095 public void testWriteInputFile() throws IOException { 096 TestOptions opts = new PerformanceEvaluation.TestOptions(); 097 final int clients = 10; 098 opts.setNumClientThreads(clients); 099 opts.setPerClientRunRows(10); 100 Path dir = 101 PerformanceEvaluation.writeInputFile(HTU.getConfiguration(), opts, HTU.getDataTestDir()); 102 FileSystem fs = FileSystem.get(HTU.getConfiguration()); 103 Path p = new Path(dir, PerformanceEvaluation.JOB_INPUT_FILENAME); 104 long len = fs.getFileStatus(p).getLen(); 105 assertTrue(len > 0); 106 byte[] content = new byte[(int) len]; 107 try (FSDataInputStream dis = fs.open(p)) { 108 dis.readFully(content); 109 BufferedReader br = new BufferedReader( 110 new InputStreamReader(new ByteArrayInputStream(content), StandardCharsets.UTF_8)); 111 int count = 0; 112 while (br.readLine() != null) { 113 count++; 114 } 115 assertEquals(clients, count); 116 } 117 } 118 119 @Test 120 public void testSizeCalculation() { 121 TestOptions opts = new PerformanceEvaluation.TestOptions(); 122 opts = PerformanceEvaluation.calculateRowsAndSize(opts); 123 long rows = opts.getPerClientRunRows(); 124 // Default row count 125 final int defaultPerClientRunRows = 1024 * 1024; 126 assertEquals(defaultPerClientRunRows, rows); 127 // If size is 2G, then twice the row count. 128 opts.setSize(2.0f); 129 opts = PerformanceEvaluation.calculateRowsAndSize(opts); 130 assertEquals(defaultPerClientRunRows * 2, opts.getPerClientRunRows()); 131 // If two clients, then they get half the rows each. 132 opts.setNumClientThreads(2); 133 opts = PerformanceEvaluation.calculateRowsAndSize(opts); 134 assertEquals(defaultPerClientRunRows, opts.getPerClientRunRows()); 135 // What if valueSize is 'random'? Then half of the valueSize so twice the rows. 136 opts.valueRandom = true; 137 opts = PerformanceEvaluation.calculateRowsAndSize(opts); 138 assertEquals(defaultPerClientRunRows * 2, opts.getPerClientRunRows()); 139 } 140 141 @Test 142 public void testRandomReadCalculation() { 143 TestOptions opts = new PerformanceEvaluation.TestOptions(); 144 opts = PerformanceEvaluation.calculateRowsAndSize(opts); 145 long rows = opts.getPerClientRunRows(); 146 // Default row count 147 final int defaultPerClientRunRows = 1024 * 1024; 148 assertEquals(defaultPerClientRunRows, rows); 149 // If size is 2G, then twice the row count. 150 opts.setSize(2.0f); 151 opts.setPerClientRunRows(1000); 152 opts.setCmdName(PerformanceEvaluation.RANDOM_READ); 153 opts = PerformanceEvaluation.calculateRowsAndSize(opts); 154 assertEquals(1000, opts.getPerClientRunRows()); 155 // If two clients, then they get half the rows each. 156 opts.setNumClientThreads(2); 157 opts = PerformanceEvaluation.calculateRowsAndSize(opts); 158 assertEquals(1000, opts.getPerClientRunRows()); 159 // assuming we will get one before this loop expires 160 boolean foundValue = false; 161 for (int i = 0; i < 10000000; i++) { 162 long randomRow = PerformanceEvaluation.generateRandomRow(opts.totalRows); 163 if (randomRow > 1000) { 164 foundValue = true; 165 break; 166 } 167 } 168 assertTrue(foundValue, "We need to get a value more than 1000"); 169 } 170 171 @Test 172 public void testZipfian() throws NoSuchMethodException, SecurityException, InstantiationException, 173 IllegalAccessException, IllegalArgumentException, InvocationTargetException { 174 TestOptions opts = new PerformanceEvaluation.TestOptions(); 175 opts.setValueZipf(true); 176 final int valueSize = 1024; 177 opts.setValueSize(valueSize); 178 RandomReadTest rrt = new RandomReadTest(null, opts, null); 179 Constructor<?> ctor = 180 Histogram.class.getDeclaredConstructor(com.codahale.metrics.Reservoir.class); 181 ctor.setAccessible(true); 182 Histogram histogram = (Histogram) ctor.newInstance(new UniformReservoir(1024 * 500)); 183 for (int i = 0; i < 100; i++) { 184 histogram.update(rrt.getValueLength()); 185 } 186 Snapshot snapshot = histogram.getSnapshot(); 187 double stddev = snapshot.getStdDev(); 188 assertTrue(stddev != 0 && stddev != 1.0); 189 assertTrue(snapshot.getStdDev() != 0); 190 double median = snapshot.getMedian(); 191 assertTrue(median != 0 && median != 1 && median != valueSize); 192 } 193 194 @Test 195 public void testSetBufferSizeOption() { 196 TestOptions opts = new PerformanceEvaluation.TestOptions(); 197 long bufferSize = opts.getBufferSize(); 198 assertEquals(bufferSize, 2L * 1024L * 1024L); 199 opts.setBufferSize(64L * 1024L); 200 bufferSize = opts.getBufferSize(); 201 assertEquals(bufferSize, 64L * 1024L); 202 } 203 204 @Test 205 public void testParseOptsWithThreads() { 206 Queue<String> opts = new LinkedList<>(); 207 String cmdName = "sequentialWrite"; 208 int threads = 1; 209 opts.offer(cmdName); 210 opts.offer(String.valueOf(threads)); 211 PerformanceEvaluation.TestOptions options = PerformanceEvaluation.parseOpts(opts); 212 assertNotNull(options); 213 assertNotNull(options.getCmdName()); 214 assertEquals(cmdName, options.getCmdName()); 215 assertEquals(threads, options.getNumClientThreads()); 216 } 217 218 @Test 219 public void testParseOptsWrongThreads() { 220 Queue<String> opts = new LinkedList<>(); 221 String cmdName = "sequentialWrite"; 222 opts.offer(cmdName); 223 opts.offer("qq"); 224 try { 225 PerformanceEvaluation.parseOpts(opts); 226 } catch (IllegalArgumentException e) { 227 System.out.println(e.getMessage()); 228 assertEquals("Command " + cmdName + " does not have threads number", e.getMessage()); 229 assertTrue(e.getCause() instanceof NumberFormatException); 230 } 231 } 232 233 @Test 234 public void testParseOptsNoThreads() { 235 Queue<String> opts = new LinkedList<>(); 236 String cmdName = "sequentialWrite"; 237 try { 238 PerformanceEvaluation.parseOpts(opts); 239 } catch (IllegalArgumentException e) { 240 System.out.println(e.getMessage()); 241 assertEquals("Command " + cmdName + " does not have threads number", e.getMessage()); 242 assertTrue(e.getCause() instanceof NoSuchElementException); 243 } 244 } 245 246 @Test 247 public void testParseOptsMultiPuts() { 248 Queue<String> opts = new LinkedList<>(); 249 String cmdName = "sequentialWrite"; 250 opts.offer("--multiPut=10"); 251 opts.offer(cmdName); 252 opts.offer("64"); 253 PerformanceEvaluation.TestOptions options = null; 254 try { 255 options = PerformanceEvaluation.parseOpts(opts); 256 fail("should fail"); 257 } catch (IllegalArgumentException e) { 258 System.out.println(e.getMessage()); 259 } 260 261 // Re-create options 262 opts = new LinkedList<>(); 263 opts.offer("--autoFlush=true"); 264 opts.offer("--multiPut=10"); 265 opts.offer(cmdName); 266 opts.offer("64"); 267 268 options = PerformanceEvaluation.parseOpts(opts); 269 assertNotNull(options); 270 assertNotNull(options.getCmdName()); 271 assertEquals(cmdName, options.getCmdName()); 272 assertEquals(10, options.getMultiPut()); 273 } 274 275 @Test 276 public void testParseOptsMultiPutsAndAutoFlushOrder() { 277 Queue<String> opts = new LinkedList<>(); 278 String cmdName = "sequentialWrite"; 279 String cmdMultiPut = "--multiPut=10"; 280 String cmdAutoFlush = "--autoFlush=true"; 281 opts.offer(cmdAutoFlush); 282 opts.offer(cmdMultiPut); 283 opts.offer(cmdName); 284 opts.offer("64"); 285 PerformanceEvaluation.TestOptions options = null; 286 options = PerformanceEvaluation.parseOpts(opts); 287 assertNotNull(options); 288 assertEquals(true, options.autoFlush); 289 assertEquals(10, options.getMultiPut()); 290 291 // Change the order of AutoFlush and Multiput 292 opts = new LinkedList<>(); 293 opts.offer(cmdMultiPut); 294 opts.offer(cmdAutoFlush); 295 opts.offer(cmdName); 296 opts.offer("64"); 297 298 options = null; 299 options = PerformanceEvaluation.parseOpts(opts); 300 assertNotNull(options); 301 assertEquals(10, options.getMultiPut()); 302 assertEquals(true, options.autoFlush); 303 } 304 305 @Test 306 public void testParseOptsConnCount() { 307 Queue<String> opts = new LinkedList<>(); 308 String cmdName = "sequentialWrite"; 309 opts.offer("--oneCon=true"); 310 opts.offer("--connCount=10"); 311 opts.offer(cmdName); 312 opts.offer("64"); 313 PerformanceEvaluation.TestOptions options = null; 314 try { 315 options = PerformanceEvaluation.parseOpts(opts); 316 fail("should fail"); 317 } catch (IllegalArgumentException e) { 318 System.out.println(e.getMessage()); 319 } 320 321 opts = new LinkedList<>(); 322 opts.offer("--connCount=10"); 323 opts.offer(cmdName); 324 opts.offer("64"); 325 326 options = PerformanceEvaluation.parseOpts(opts); 327 assertNotNull(options); 328 assertNotNull(options.getCmdName()); 329 assertEquals(cmdName, options.getCmdName()); 330 assertEquals(10, options.getConnCount()); 331 } 332 333 @Test 334 public void testParseOptsValueRandom() { 335 Queue<String> opts = new LinkedList<>(); 336 String cmdName = "sequentialWrite"; 337 opts.offer("--valueRandom"); 338 opts.offer("--valueZipf"); 339 opts.offer(cmdName); 340 opts.offer("64"); 341 PerformanceEvaluation.TestOptions options = null; 342 try { 343 options = PerformanceEvaluation.parseOpts(opts); 344 fail("should fail"); 345 } catch (IllegalStateException e) { 346 System.out.println(e.getMessage()); 347 } 348 349 opts = new LinkedList<>(); 350 opts.offer("--valueRandom"); 351 opts.offer(cmdName); 352 opts.offer("64"); 353 354 options = PerformanceEvaluation.parseOpts(opts); 355 356 assertNotNull(options); 357 assertNotNull(options.getCmdName()); 358 assertEquals(cmdName, options.getCmdName()); 359 assertEquals(true, options.valueRandom); 360 } 361 362 @Test 363 public void testCustomTestClassOptions() throws IOException { 364 Queue<String> opts = new LinkedList<>(); 365 // create custom properties that can be used for a custom test class 366 Properties commandProps = new Properties(); 367 commandProps.put("prop1", "val1"); 368 String cmdPropsFilePath = 369 this.getClass().getClassLoader().getResource("").getPath() + "cmd_properties.txt"; 370 FileWriter writer = new FileWriter(new File(cmdPropsFilePath)); 371 commandProps.store(writer, null); 372 // create opts for the custom test class - commandPropertiesFile, testClassName 373 opts.offer("--commandPropertiesFile=" + "cmd_properties.txt"); 374 String testClassName = "org.apache.hadoop.hbase.TestPerformanceEvaluation$PESampleTestImpl"; 375 opts.offer(testClassName); 376 opts.offer("1"); 377 PerformanceEvaluation.TestOptions options = PerformanceEvaluation.parseOpts(opts); 378 assertNotNull(options); 379 assertNotNull(options.getCmdName()); 380 assertEquals(testClassName, options.getCmdName()); 381 assertNotNull(options.getCommandProperties()); 382 assertEquals("val1", options.getCommandProperties().get("prop1")); 383 } 384 385 static class PESampleTestImpl extends PerformanceEvaluation.Test { 386 387 PESampleTestImpl(Connection con, TestOptions options, Status status) { 388 super(con, options, status); 389 } 390 391 @Override 392 void onStartup() throws IOException { 393 } 394 395 @Override 396 void onTakedown() throws IOException { 397 } 398 399 @Override 400 boolean testRow(long i, long startTime) throws IOException, InterruptedException { 401 return false; 402 } 403 } 404 405 @Test 406 public void testParseBooleanFlags() { 407 final Queue<String> opts = new LinkedList<>(); 408 opts.offer("--valueRandom"); 409 opts.offer("--autoFlush"); // default: false 410 opts.offer("--inmemory=true"); // default: false 411 opts.offer("--writeToWAL=false"); // default: true 412 opts.offer(PerformanceEvaluation.RANDOM_READ); 413 opts.offer("1"); 414 415 final PerformanceEvaluation.TestOptions options = PerformanceEvaluation.parseOpts(opts); 416 assertTrue(options.valueRandom); 417 assertTrue(options.autoFlush); 418 assertTrue(options.inMemoryCF); 419 assertFalse(options.writeToWAL); 420 assertEquals(PerformanceEvaluation.RANDOM_READ, options.getCmdName()); 421 assertEquals(1, options.getNumClientThreads()); 422 } 423 424 @Test 425 public void testOptionMissingValue() { 426 final Queue<String> opts = new LinkedList<>(); 427 opts.offer("--presplit"); 428 assertThrows(IllegalArgumentException.class, () -> PerformanceEvaluation.parseOpts(opts)); 429 } 430}