001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.apache.hadoop.hbase.io.ByteBuffAllocator.HEAP; 021import static org.junit.Assert.assertEquals; 022import static org.junit.Assert.assertNotNull; 023import static org.junit.Assert.assertNull; 024import static org.junit.Assert.assertTrue; 025 026import java.nio.ByteBuffer; 027import java.util.Random; 028import java.util.concurrent.ExecutorService; 029import java.util.concurrent.Executors; 030import java.util.concurrent.TimeUnit; 031import java.util.concurrent.atomic.AtomicBoolean; 032import java.util.concurrent.atomic.AtomicInteger; 033import org.apache.hadoop.conf.Configuration; 034import org.apache.hadoop.hbase.HBaseClassTestRule; 035import org.apache.hadoop.hbase.HBaseConfiguration; 036import org.apache.hadoop.hbase.HConstants; 037import org.apache.hadoop.hbase.Waiter; 038import org.apache.hadoop.hbase.Waiter.ExplainingPredicate; 039import org.apache.hadoop.hbase.io.HeapSize; 040import org.apache.hadoop.hbase.io.hfile.LruBlockCache.EvictionThread; 041import org.apache.hadoop.hbase.nio.ByteBuff; 042import org.apache.hadoop.hbase.testclassification.IOTests; 043import org.apache.hadoop.hbase.testclassification.SmallTests; 044import org.apache.hadoop.hbase.util.ClassSize; 045import org.junit.Assert; 046import org.junit.ClassRule; 047import org.junit.Test; 048import org.junit.experimental.categories.Category; 049import org.slf4j.Logger; 050import org.slf4j.LoggerFactory; 051 052/** 053 * Tests the concurrent LruBlockCache.<p> 054 * 055 * Tests will ensure it grows and shrinks in size properly, 056 * evictions run when they're supposed to and do what they should, 057 * and that cached blocks are accessible when expected to be. 058 */ 059@Category({IOTests.class, SmallTests.class}) 060public class TestLruBlockCache { 061 062 @ClassRule 063 public static final HBaseClassTestRule CLASS_RULE = 064 HBaseClassTestRule.forClass(TestLruBlockCache.class); 065 066 private static final Logger LOG = LoggerFactory.getLogger(TestLruBlockCache.class); 067 068 @Test 069 public void testCacheEvictionThreadSafe() throws Exception { 070 long maxSize = 100000; 071 int numBlocks = 9; 072 int testRuns = 10; 073 final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); 074 assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); 075 076 final Configuration conf = HBaseConfiguration.create(); 077 final LruBlockCache cache = new LruBlockCache(maxSize, blockSize); 078 EvictionThread evictionThread = cache.getEvictionThread(); 079 assertTrue(evictionThread != null); 080 while (!evictionThread.isEnteringRun()) { 081 Thread.sleep(1); 082 } 083 final String hfileName = "hfile"; 084 int threads = 10; 085 final int blocksPerThread = 5 * numBlocks; 086 for (int run = 0; run != testRuns; ++run) { 087 final AtomicInteger blockCount = new AtomicInteger(0); 088 ExecutorService service = Executors.newFixedThreadPool(threads); 089 for (int i = 0; i != threads; ++i) { 090 service.execute(new Runnable() { 091 @Override 092 public void run() { 093 for (int blockIndex = 0; blockIndex < blocksPerThread || (!cache.isEvictionInProgress()); ++blockIndex) { 094 CachedItem block = new CachedItem(hfileName, (int) blockSize, blockCount.getAndIncrement()); 095 boolean inMemory = Math.random() > 0.5; 096 cache.cacheBlock(block.cacheKey, block, inMemory); 097 } 098 cache.evictBlocksByHfileName(hfileName); 099 } 100 }); 101 } 102 service.shutdown(); 103 // The test may fail here if the evict thread frees the blocks too fast 104 service.awaitTermination(10, TimeUnit.MINUTES); 105 Waiter.waitFor(conf, 10000, 100, new ExplainingPredicate<Exception>() { 106 @Override 107 public boolean evaluate() throws Exception { 108 return cache.getBlockCount() == 0; 109 } 110 111 @Override 112 public String explainFailure() throws Exception { 113 return "Cache block count failed to return to 0"; 114 } 115 }); 116 assertEquals(0, cache.getBlockCount()); 117 assertEquals(cache.getOverhead(), cache.getCurrentSize()); 118 } 119 } 120 @Test 121 public void testBackgroundEvictionThread() throws Exception { 122 long maxSize = 100000; 123 int numBlocks = 9; 124 long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); 125 assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); 126 127 LruBlockCache cache = new LruBlockCache(maxSize,blockSize); 128 EvictionThread evictionThread = cache.getEvictionThread(); 129 assertTrue(evictionThread != null); 130 131 CachedItem[] blocks = generateFixedBlocks(numBlocks + 1, blockSize, "block"); 132 133 // Make sure eviction thread has entered run method 134 while (!evictionThread.isEnteringRun()) { 135 Thread.sleep(1); 136 } 137 138 // Add all the blocks 139 for (CachedItem block : blocks) { 140 cache.cacheBlock(block.cacheKey, block); 141 } 142 143 // wait until at least one eviction has run 144 int n = 0; 145 while(cache.getStats().getEvictionCount() == 0) { 146 Thread.sleep(200); 147 assertTrue("Eviction never happened.", n++ < 20); 148 } 149 150 // let cache stabilize 151 // On some systems, the cache will run multiple evictions before it attains 152 // steady-state. For instance, after populating the cache with 10 blocks, 153 // the first eviction evicts a single block and then a second eviction 154 // evicts another. I think this is due to the delta between minSize and 155 // acceptableSize, combined with variance between object overhead on 156 // different environments. 157 n = 0; 158 for (long prevCnt = 0 /* < number of blocks added */, 159 curCnt = cache.getBlockCount(); 160 prevCnt != curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) { 161 Thread.sleep(200); 162 assertTrue("Cache never stabilized.", n++ < 20); 163 } 164 165 long evictionCount = cache.getStats().getEvictionCount(); 166 assertTrue(evictionCount >= 1); 167 System.out.println("Background Evictions run: " + evictionCount); 168 } 169 170 @Test 171 public void testCacheSimple() throws Exception { 172 173 long maxSize = 1000000; 174 long blockSize = calculateBlockSizeDefault(maxSize, 101); 175 176 LruBlockCache cache = new LruBlockCache(maxSize, blockSize); 177 178 CachedItem [] blocks = generateRandomBlocks(100, blockSize); 179 180 long expectedCacheSize = cache.heapSize(); 181 182 // Confirm empty 183 for (CachedItem block : blocks) { 184 assertTrue(cache.getBlock(block.cacheKey, true, false, true) == null); 185 } 186 187 // Add blocks 188 for (CachedItem block : blocks) { 189 cache.cacheBlock(block.cacheKey, block); 190 expectedCacheSize += block.cacheBlockHeapSize(); 191 } 192 193 // Verify correctly calculated cache heap size 194 assertEquals(expectedCacheSize, cache.heapSize()); 195 196 // Check if all blocks are properly cached and retrieved 197 for (CachedItem block : blocks) { 198 HeapSize buf = cache.getBlock(block.cacheKey, true, false, true); 199 assertTrue(buf != null); 200 assertEquals(buf.heapSize(), block.heapSize()); 201 } 202 203 // Re-add same blocks and ensure nothing has changed 204 long expectedBlockCount = cache.getBlockCount(); 205 for (CachedItem block : blocks) { 206 cache.cacheBlock(block.cacheKey, block); 207 } 208 assertEquals( 209 "Cache should ignore cache requests for blocks already in cache", 210 expectedBlockCount, cache.getBlockCount()); 211 212 // Verify correctly calculated cache heap size 213 assertEquals(expectedCacheSize, cache.heapSize()); 214 215 // Check if all blocks are properly cached and retrieved 216 for (CachedItem block : blocks) { 217 HeapSize buf = cache.getBlock(block.cacheKey, true, false, true); 218 assertTrue(buf != null); 219 assertEquals(buf.heapSize(), block.heapSize()); 220 } 221 222 // Expect no evictions 223 assertEquals(0, cache.getStats().getEvictionCount()); 224 Thread t = new LruBlockCache.StatisticsThread(cache); 225 t.start(); 226 t.join(); 227 } 228 229 @Test 230 public void testCacheEvictionSimple() throws Exception { 231 232 long maxSize = 100000; 233 long blockSize = calculateBlockSizeDefault(maxSize, 10); 234 235 LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false); 236 237 CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block"); 238 239 long expectedCacheSize = cache.heapSize(); 240 241 // Add all the blocks 242 for (CachedItem block : blocks) { 243 cache.cacheBlock(block.cacheKey, block); 244 expectedCacheSize += block.cacheBlockHeapSize(); 245 } 246 247 // A single eviction run should have occurred 248 assertEquals(1, cache.getStats().getEvictionCount()); 249 250 // Our expected size overruns acceptable limit 251 assertTrue(expectedCacheSize > 252 (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); 253 254 // But the cache did not grow beyond max 255 assertTrue(cache.heapSize() < maxSize); 256 257 // And is still below the acceptable limit 258 assertTrue(cache.heapSize() < 259 (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); 260 261 // All blocks except block 0 should be in the cache 262 assertTrue(cache.getBlock(blocks[0].cacheKey, true, false, true) == null); 263 for(int i=1;i<blocks.length;i++) { 264 assertEquals(cache.getBlock(blocks[i].cacheKey, true, false, true), 265 blocks[i]); 266 } 267 } 268 269 @Test 270 public void testCacheEvictionTwoPriorities() throws Exception { 271 272 long maxSize = 100000; 273 long blockSize = calculateBlockSizeDefault(maxSize, 10); 274 275 LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false); 276 277 CachedItem [] singleBlocks = generateFixedBlocks(5, 10000, "single"); 278 CachedItem [] multiBlocks = generateFixedBlocks(5, 10000, "multi"); 279 280 long expectedCacheSize = cache.heapSize(); 281 282 // Add and get the multi blocks 283 for (CachedItem block : multiBlocks) { 284 cache.cacheBlock(block.cacheKey, block); 285 expectedCacheSize += block.cacheBlockHeapSize(); 286 assertEquals(cache.getBlock(block.cacheKey, true, false, true), block); 287 } 288 289 // Add the single blocks (no get) 290 for (CachedItem block : singleBlocks) { 291 cache.cacheBlock(block.cacheKey, block); 292 expectedCacheSize += block.heapSize(); 293 } 294 295 // A single eviction run should have occurred 296 assertEquals(1, cache.getStats().getEvictionCount()); 297 298 // We expect two entries evicted 299 assertEquals(2, cache.getStats().getEvictedCount()); 300 301 // Our expected size overruns acceptable limit 302 assertTrue(expectedCacheSize > 303 (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); 304 305 // But the cache did not grow beyond max 306 assertTrue(cache.heapSize() <= maxSize); 307 308 // And is now below the acceptable limit 309 assertTrue(cache.heapSize() <= 310 (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); 311 312 // We expect fairness across the two priorities. 313 // This test makes multi go barely over its limit, in-memory 314 // empty, and the rest in single. Two single evictions and 315 // one multi eviction expected. 316 assertTrue(cache.getBlock(singleBlocks[0].cacheKey, true, false, true) == null); 317 assertTrue(cache.getBlock(multiBlocks[0].cacheKey, true, false, true) == null); 318 319 // And all others to be cached 320 for(int i=1;i<4;i++) { 321 assertEquals(cache.getBlock(singleBlocks[i].cacheKey, true, false, true), 322 singleBlocks[i]); 323 assertEquals(cache.getBlock(multiBlocks[i].cacheKey, true, false, true), 324 multiBlocks[i]); 325 } 326 } 327 328 @Test 329 public void testCacheEvictionThreePriorities() throws Exception { 330 331 long maxSize = 100000; 332 long blockSize = calculateBlockSize(maxSize, 10); 333 334 LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, 335 (int)Math.ceil(1.2*maxSize/blockSize), 336 LruBlockCache.DEFAULT_LOAD_FACTOR, 337 LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 338 0.98f, // min 339 0.99f, // acceptable 340 0.33f, // single 341 0.33f, // multi 342 0.34f, // memory 343 1.2f, // limit 344 false, 345 16 * 1024 * 1024); 346 347 CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single"); 348 CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); 349 CachedItem [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory"); 350 351 long expectedCacheSize = cache.heapSize(); 352 353 // Add 3 blocks from each priority 354 for(int i=0;i<3;i++) { 355 356 // Just add single blocks 357 cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); 358 expectedCacheSize += singleBlocks[i].cacheBlockHeapSize(); 359 360 // Add and get multi blocks 361 cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]); 362 expectedCacheSize += multiBlocks[i].cacheBlockHeapSize(); 363 cache.getBlock(multiBlocks[i].cacheKey, true, false, true); 364 365 // Add memory blocks as such 366 cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true); 367 expectedCacheSize += memoryBlocks[i].cacheBlockHeapSize(); 368 369 } 370 371 // Do not expect any evictions yet 372 assertEquals(0, cache.getStats().getEvictionCount()); 373 374 // Verify cache size 375 assertEquals(expectedCacheSize, cache.heapSize()); 376 377 // Insert a single block, oldest single should be evicted 378 cache.cacheBlock(singleBlocks[3].cacheKey, singleBlocks[3]); 379 380 // Single eviction, one thing evicted 381 assertEquals(1, cache.getStats().getEvictionCount()); 382 assertEquals(1, cache.getStats().getEvictedCount()); 383 384 // Verify oldest single block is the one evicted 385 assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false, true)); 386 387 // Change the oldest remaining single block to a multi 388 cache.getBlock(singleBlocks[1].cacheKey, true, false, true); 389 390 // Insert another single block 391 cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]); 392 393 // Two evictions, two evicted. 394 assertEquals(2, cache.getStats().getEvictionCount()); 395 assertEquals(2, cache.getStats().getEvictedCount()); 396 397 // Oldest multi block should be evicted now 398 assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false, true)); 399 400 // Insert another memory block 401 cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true); 402 403 // Three evictions, three evicted. 404 assertEquals(3, cache.getStats().getEvictionCount()); 405 assertEquals(3, cache.getStats().getEvictedCount()); 406 407 // Oldest memory block should be evicted now 408 assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false, true)); 409 410 // Add a block that is twice as big (should force two evictions) 411 CachedItem [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big"); 412 cache.cacheBlock(bigBlocks[0].cacheKey, bigBlocks[0]); 413 414 // Four evictions, six evicted (inserted block 3X size, expect +3 evicted) 415 assertEquals(4, cache.getStats().getEvictionCount()); 416 assertEquals(6, cache.getStats().getEvictedCount()); 417 418 // Expect three remaining singles to be evicted 419 assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false, true)); 420 assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false, true)); 421 assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false, true)); 422 423 // Make the big block a multi block 424 cache.getBlock(bigBlocks[0].cacheKey, true, false, true); 425 426 // Cache another single big block 427 cache.cacheBlock(bigBlocks[1].cacheKey, bigBlocks[1]); 428 429 // Five evictions, nine evicted (3 new) 430 assertEquals(5, cache.getStats().getEvictionCount()); 431 assertEquals(9, cache.getStats().getEvictedCount()); 432 433 // Expect three remaining multis to be evicted 434 assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false, true)); 435 assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false, true)); 436 assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false, true)); 437 438 // Cache a big memory block 439 cache.cacheBlock(bigBlocks[2].cacheKey, bigBlocks[2], true); 440 441 // Six evictions, twelve evicted (3 new) 442 assertEquals(6, cache.getStats().getEvictionCount()); 443 assertEquals(12, cache.getStats().getEvictedCount()); 444 445 // Expect three remaining in-memory to be evicted 446 assertEquals(null, cache.getBlock(memoryBlocks[1].cacheKey, true, false, true)); 447 assertEquals(null, cache.getBlock(memoryBlocks[2].cacheKey, true, false, true)); 448 assertEquals(null, cache.getBlock(memoryBlocks[3].cacheKey, true, false, true)); 449 } 450 451 @Test 452 public void testCacheEvictionInMemoryForceMode() throws Exception { 453 long maxSize = 100000; 454 long blockSize = calculateBlockSize(maxSize, 10); 455 456 LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, 457 (int)Math.ceil(1.2*maxSize/blockSize), 458 LruBlockCache.DEFAULT_LOAD_FACTOR, 459 LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 460 0.98f, // min 461 0.99f, // acceptable 462 0.2f, // single 463 0.3f, // multi 464 0.5f, // memory 465 1.2f, // limit 466 true, 467 16 * 1024 * 1024); 468 469 CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single"); 470 CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi"); 471 CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory"); 472 473 long expectedCacheSize = cache.heapSize(); 474 475 // 0. Add 5 single blocks and 4 multi blocks to make cache full, si:mu:me = 5:4:0 476 for(int i = 0; i < 4; i++) { 477 // Just add single blocks 478 cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); 479 expectedCacheSize += singleBlocks[i].cacheBlockHeapSize(); 480 // Add and get multi blocks 481 cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]); 482 expectedCacheSize += multiBlocks[i].cacheBlockHeapSize(); 483 cache.getBlock(multiBlocks[i].cacheKey, true, false, true); 484 } 485 // 5th single block 486 cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]); 487 expectedCacheSize += singleBlocks[4].cacheBlockHeapSize(); 488 // Do not expect any evictions yet 489 assertEquals(0, cache.getStats().getEvictionCount()); 490 // Verify cache size 491 assertEquals(expectedCacheSize, cache.heapSize()); 492 493 // 1. Insert a memory block, oldest single should be evicted, si:mu:me = 4:4:1 494 cache.cacheBlock(memoryBlocks[0].cacheKey, memoryBlocks[0], true); 495 // Single eviction, one block evicted 496 assertEquals(1, cache.getStats().getEvictionCount()); 497 assertEquals(1, cache.getStats().getEvictedCount()); 498 // Verify oldest single block (index = 0) is the one evicted 499 assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false, true)); 500 501 // 2. Insert another memory block, another single evicted, si:mu:me = 3:4:2 502 cache.cacheBlock(memoryBlocks[1].cacheKey, memoryBlocks[1], true); 503 // Two evictions, two evicted. 504 assertEquals(2, cache.getStats().getEvictionCount()); 505 assertEquals(2, cache.getStats().getEvictedCount()); 506 // Current oldest single block (index = 1) should be evicted now 507 assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false, true)); 508 509 // 3. Insert 4 memory blocks, 2 single and 2 multi evicted, si:mu:me = 1:2:6 510 cache.cacheBlock(memoryBlocks[2].cacheKey, memoryBlocks[2], true); 511 cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true); 512 cache.cacheBlock(memoryBlocks[4].cacheKey, memoryBlocks[4], true); 513 cache.cacheBlock(memoryBlocks[5].cacheKey, memoryBlocks[5], true); 514 // Three evictions, three evicted. 515 assertEquals(6, cache.getStats().getEvictionCount()); 516 assertEquals(6, cache.getStats().getEvictedCount()); 517 // two oldest single blocks and two oldest multi blocks evicted 518 assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false, true)); 519 assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false, true)); 520 assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false, true)); 521 assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false, true)); 522 523 // 4. Insert 3 memory blocks, the remaining 1 single and 2 multi evicted 524 // si:mu:me = 0:0:9 525 cache.cacheBlock(memoryBlocks[6].cacheKey, memoryBlocks[6], true); 526 cache.cacheBlock(memoryBlocks[7].cacheKey, memoryBlocks[7], true); 527 cache.cacheBlock(memoryBlocks[8].cacheKey, memoryBlocks[8], true); 528 // Three evictions, three evicted. 529 assertEquals(9, cache.getStats().getEvictionCount()); 530 assertEquals(9, cache.getStats().getEvictedCount()); 531 // one oldest single block and two oldest multi blocks evicted 532 assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false, true)); 533 assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false, true)); 534 assertEquals(null, cache.getBlock(multiBlocks[3].cacheKey, true, false, true)); 535 536 // 5. Insert one memory block, the oldest memory evicted 537 // si:mu:me = 0:0:9 538 cache.cacheBlock(memoryBlocks[9].cacheKey, memoryBlocks[9], true); 539 // one eviction, one evicted. 540 assertEquals(10, cache.getStats().getEvictionCount()); 541 assertEquals(10, cache.getStats().getEvictedCount()); 542 // oldest memory block evicted 543 assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false, true)); 544 545 // 6. Insert one new single block, itself evicted immediately since 546 // all blocks in cache are memory-type which have higher priority 547 // si:mu:me = 0:0:9 (no change) 548 cache.cacheBlock(singleBlocks[9].cacheKey, singleBlocks[9]); 549 // one eviction, one evicted. 550 assertEquals(11, cache.getStats().getEvictionCount()); 551 assertEquals(11, cache.getStats().getEvictedCount()); 552 // the single block just cached now evicted (can't evict memory) 553 assertEquals(null, cache.getBlock(singleBlocks[9].cacheKey, true, false, true)); 554 } 555 556 // test scan resistance 557 @Test 558 public void testScanResistance() throws Exception { 559 560 long maxSize = 100000; 561 long blockSize = calculateBlockSize(maxSize, 10); 562 563 LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, 564 (int)Math.ceil(1.2*maxSize/blockSize), 565 LruBlockCache.DEFAULT_LOAD_FACTOR, 566 LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 567 0.66f, // min 568 0.99f, // acceptable 569 0.33f, // single 570 0.33f, // multi 571 0.34f, // memory 572 1.2f, // limit 573 false, 574 16 * 1024 * 1024); 575 576 CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single"); 577 CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); 578 579 // Add 5 multi blocks 580 for (CachedItem block : multiBlocks) { 581 cache.cacheBlock(block.cacheKey, block); 582 cache.getBlock(block.cacheKey, true, false, true); 583 } 584 585 // Add 5 single blocks 586 for(int i=0;i<5;i++) { 587 cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); 588 } 589 590 // An eviction ran 591 assertEquals(1, cache.getStats().getEvictionCount()); 592 593 // To drop down to 2/3 capacity, we'll need to evict 4 blocks 594 assertEquals(4, cache.getStats().getEvictedCount()); 595 596 // Should have been taken off equally from single and multi 597 assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false, true)); 598 assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false, true)); 599 assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false, true)); 600 assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false, true)); 601 602 // Let's keep "scanning" by adding single blocks. From here on we only 603 // expect evictions from the single bucket. 604 605 // Every time we reach 10 total blocks (every 4 inserts) we get 4 single 606 // blocks evicted. Inserting 13 blocks should yield 3 more evictions and 607 // 12 more evicted. 608 609 for(int i=5;i<18;i++) { 610 cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); 611 } 612 613 // 4 total evictions, 16 total evicted 614 assertEquals(4, cache.getStats().getEvictionCount()); 615 assertEquals(16, cache.getStats().getEvictedCount()); 616 617 // Should now have 7 total blocks 618 assertEquals(7, cache.getBlockCount()); 619 620 } 621 622 @Test 623 public void testMaxBlockSize() throws Exception { 624 long maxSize = 100000; 625 long blockSize = calculateBlockSize(maxSize, 10); 626 627 LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, 628 (int)Math.ceil(1.2*maxSize/blockSize), 629 LruBlockCache.DEFAULT_LOAD_FACTOR, 630 LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 631 0.66f, // min 632 0.99f, // acceptable 633 0.33f, // single 634 0.33f, // multi 635 0.34f, // memory 636 1.2f, // limit 637 false, 638 1024); 639 CachedItem [] tooLong = generateFixedBlocks(10, 1024+5, "long"); 640 CachedItem [] small = generateFixedBlocks(15, 600, "small"); 641 642 643 for (CachedItem i:tooLong) { 644 cache.cacheBlock(i.cacheKey, i); 645 } 646 for (CachedItem i:small) { 647 cache.cacheBlock(i.cacheKey, i); 648 } 649 assertEquals(15,cache.getBlockCount()); 650 for (CachedItem i:small) { 651 assertNotNull(cache.getBlock(i.cacheKey, true, false, false)); 652 } 653 for (CachedItem i:tooLong) { 654 assertNull(cache.getBlock(i.cacheKey, true, false, false)); 655 } 656 657 assertEquals(10, cache.getStats().getFailedInserts()); 658 } 659 660 // test setMaxSize 661 @Test 662 public void testResizeBlockCache() throws Exception { 663 664 long maxSize = 300000; 665 long blockSize = calculateBlockSize(maxSize, 31); 666 667 LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, 668 (int)Math.ceil(1.2*maxSize/blockSize), 669 LruBlockCache.DEFAULT_LOAD_FACTOR, 670 LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 671 0.98f, // min 672 0.99f, // acceptable 673 0.33f, // single 674 0.33f, // multi 675 0.34f, // memory 676 1.2f, // limit 677 false, 678 16 * 1024 * 1024); 679 680 CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single"); 681 CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi"); 682 CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory"); 683 684 // Add all blocks from all priorities 685 for(int i=0;i<10;i++) { 686 687 // Just add single blocks 688 cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); 689 690 // Add and get multi blocks 691 cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]); 692 cache.getBlock(multiBlocks[i].cacheKey, true, false, true); 693 694 // Add memory blocks as such 695 cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true); 696 } 697 698 // Do not expect any evictions yet 699 assertEquals(0, cache.getStats().getEvictionCount()); 700 701 // Resize to half capacity plus an extra block (otherwise we evict an extra) 702 cache.setMaxSize((long)(maxSize * 0.5f)); 703 704 // Should have run a single eviction 705 assertEquals(1, cache.getStats().getEvictionCount()); 706 707 // And we expect 1/2 of the blocks to be evicted 708 assertEquals(15, cache.getStats().getEvictedCount()); 709 710 // And the oldest 5 blocks from each category should be gone 711 for(int i=0;i<5;i++) { 712 assertEquals(null, cache.getBlock(singleBlocks[i].cacheKey, true, false, true)); 713 assertEquals(null, cache.getBlock(multiBlocks[i].cacheKey, true, false, true)); 714 assertEquals(null, cache.getBlock(memoryBlocks[i].cacheKey, true, false, true)); 715 } 716 717 // And the newest 5 blocks should still be accessible 718 for(int i=5;i<10;i++) { 719 assertEquals(singleBlocks[i], cache.getBlock(singleBlocks[i].cacheKey, true, false, true)); 720 assertEquals(multiBlocks[i], cache.getBlock(multiBlocks[i].cacheKey, true, false, true)); 721 assertEquals(memoryBlocks[i], cache.getBlock(memoryBlocks[i].cacheKey, true, false, true)); 722 } 723 } 724 725 // test metricsPastNPeriods 726 @Test 727 public void testPastNPeriodsMetrics() throws Exception { 728 double delta = 0.01; 729 730 // 3 total periods 731 CacheStats stats = new CacheStats("test", 3); 732 733 // No accesses, should be 0 734 stats.rollMetricsPeriod(); 735 assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta); 736 assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); 737 738 // period 1, 1 hit caching, 1 hit non-caching, 2 miss non-caching 739 // should be (2/4)=0.5 and (1/1)=1 740 stats.hit(false, true, BlockType.DATA); 741 stats.hit(true, true, BlockType.DATA); 742 stats.miss(false, false, BlockType.DATA); 743 stats.miss(false, false, BlockType.DATA); 744 stats.rollMetricsPeriod(); 745 assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); 746 assertEquals(1.0, stats.getHitCachingRatioPastNPeriods(), delta); 747 748 // period 2, 1 miss caching, 3 miss non-caching 749 // should be (2/8)=0.25 and (1/2)=0.5 750 stats.miss(true, false, BlockType.DATA); 751 stats.miss(false, false, BlockType.DATA); 752 stats.miss(false, false, BlockType.DATA); 753 stats.miss(false, false, BlockType.DATA); 754 stats.rollMetricsPeriod(); 755 assertEquals(0.25, stats.getHitRatioPastNPeriods(), delta); 756 assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta); 757 758 // period 3, 2 hits of each type 759 // should be (6/12)=0.5 and (3/4)=0.75 760 stats.hit(false, true, BlockType.DATA); 761 stats.hit(true, true, BlockType.DATA); 762 stats.hit(false, true, BlockType.DATA); 763 stats.hit(true, true, BlockType.DATA); 764 stats.rollMetricsPeriod(); 765 assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); 766 assertEquals(0.75, stats.getHitCachingRatioPastNPeriods(), delta); 767 768 // period 4, evict period 1, two caching misses 769 // should be (4/10)=0.4 and (2/5)=0.4 770 stats.miss(true, false, BlockType.DATA); 771 stats.miss(true, false, BlockType.DATA); 772 stats.rollMetricsPeriod(); 773 assertEquals(0.4, stats.getHitRatioPastNPeriods(), delta); 774 assertEquals(0.4, stats.getHitCachingRatioPastNPeriods(), delta); 775 776 // period 5, evict period 2, 2 caching misses, 2 non-caching hit 777 // should be (6/10)=0.6 and (2/6)=1/3 778 stats.miss(true, false, BlockType.DATA); 779 stats.miss(true, false, BlockType.DATA); 780 stats.hit(false, true, BlockType.DATA); 781 stats.hit(false, true, BlockType.DATA); 782 stats.rollMetricsPeriod(); 783 assertEquals(0.6, stats.getHitRatioPastNPeriods(), delta); 784 assertEquals((double)1/3, stats.getHitCachingRatioPastNPeriods(), delta); 785 786 // period 6, evict period 3 787 // should be (2/6)=1/3 and (0/4)=0 788 stats.rollMetricsPeriod(); 789 assertEquals((double)1/3, stats.getHitRatioPastNPeriods(), delta); 790 assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); 791 792 // period 7, evict period 4 793 // should be (2/4)=0.5 and (0/2)=0 794 stats.rollMetricsPeriod(); 795 assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); 796 assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); 797 798 // period 8, evict period 5 799 // should be 0 and 0 800 stats.rollMetricsPeriod(); 801 assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta); 802 assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); 803 804 // period 9, one of each 805 // should be (2/4)=0.5 and (1/2)=0.5 806 stats.miss(true, false, BlockType.DATA); 807 stats.miss(false, false, BlockType.DATA); 808 stats.hit(true, true, BlockType.DATA); 809 stats.hit(false, true, BlockType.DATA); 810 stats.rollMetricsPeriod(); 811 assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); 812 assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta); 813 } 814 815 @Test 816 public void testCacheBlockNextBlockMetadataMissing() { 817 long maxSize = 100000; 818 long blockSize = calculateBlockSize(maxSize, 10); 819 int size = 100; 820 int length = HConstants.HFILEBLOCK_HEADER_SIZE + size; 821 byte[] byteArr = new byte[length]; 822 ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size); 823 HFileContext meta = new HFileContextBuilder().build(); 824 HFileBlock blockWithNextBlockMetadata = new HFileBlock(BlockType.DATA, size, size, -1, 825 ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, -1, 52, -1, meta, HEAP); 826 HFileBlock blockWithoutNextBlockMetadata = new HFileBlock(BlockType.DATA, size, size, -1, 827 ByteBuff.wrap(buf), HFileBlock.FILL_HEADER, -1, -1, -1, meta, HEAP); 828 829 LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, 830 (int)Math.ceil(1.2*maxSize/blockSize), 831 LruBlockCache.DEFAULT_LOAD_FACTOR, 832 LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 833 0.66f, // min 834 0.99f, // acceptable 835 0.33f, // single 836 0.33f, // multi 837 0.34f, // memory 838 1.2f, // limit 839 false, 840 1024); 841 842 BlockCacheKey key = new BlockCacheKey("key1", 0); 843 ByteBuffer actualBuffer = ByteBuffer.allocate(length); 844 ByteBuffer block1Buffer = ByteBuffer.allocate(length); 845 ByteBuffer block2Buffer = ByteBuffer.allocate(length); 846 blockWithNextBlockMetadata.serialize(block1Buffer, true); 847 blockWithoutNextBlockMetadata.serialize(block2Buffer, true); 848 849 //Add blockWithNextBlockMetadata, expect blockWithNextBlockMetadata back. 850 CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithNextBlockMetadata, actualBuffer, 851 block1Buffer); 852 853 //Add blockWithoutNextBlockMetada, expect blockWithNextBlockMetadata back. 854 CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithoutNextBlockMetadata, actualBuffer, 855 block1Buffer); 856 857 //Clear and add blockWithoutNextBlockMetadata 858 cache.clearCache(); 859 assertNull(cache.getBlock(key, false, false, false)); 860 CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithoutNextBlockMetadata, actualBuffer, 861 block2Buffer); 862 863 //Add blockWithNextBlockMetadata, expect blockWithNextBlockMetadata to replace. 864 CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithNextBlockMetadata, actualBuffer, 865 block1Buffer); 866 } 867 868 private CachedItem [] generateFixedBlocks(int numBlocks, int size, String pfx) { 869 CachedItem [] blocks = new CachedItem[numBlocks]; 870 for(int i=0;i<numBlocks;i++) { 871 blocks[i] = new CachedItem(pfx + i, size); 872 } 873 return blocks; 874 } 875 876 private CachedItem [] generateFixedBlocks(int numBlocks, long size, String pfx) { 877 return generateFixedBlocks(numBlocks, (int)size, pfx); 878 } 879 880 private CachedItem [] generateRandomBlocks(int numBlocks, long maxSize) { 881 CachedItem [] blocks = new CachedItem[numBlocks]; 882 Random r = new Random(); 883 for(int i=0;i<numBlocks;i++) { 884 blocks[i] = new CachedItem("block" + i, r.nextInt((int)maxSize)+1); 885 } 886 return blocks; 887 } 888 889 private long calculateBlockSize(long maxSize, int numBlocks) { 890 long roughBlockSize = maxSize / numBlocks; 891 int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize); 892 long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + 893 ClassSize.CONCURRENT_HASHMAP + 894 (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) + 895 (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT); 896 long negateBlockSize = (long)(totalOverhead/numEntries); 897 negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD; 898 return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f)); 899 } 900 901 private long calculateBlockSizeDefault(long maxSize, int numBlocks) { 902 long roughBlockSize = maxSize / numBlocks; 903 int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize); 904 long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + 905 ClassSize.CONCURRENT_HASHMAP + 906 (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) + 907 (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT); 908 long negateBlockSize = totalOverhead / numEntries; 909 negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD; 910 return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)* 911 LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); 912 } 913 914 private static class CachedItem implements Cacheable { 915 BlockCacheKey cacheKey; 916 int size; 917 918 CachedItem(String blockName, int size, int offset) { 919 this.cacheKey = new BlockCacheKey(blockName, offset); 920 this.size = size; 921 } 922 923 CachedItem(String blockName, int size) { 924 this.cacheKey = new BlockCacheKey(blockName, 0); 925 this.size = size; 926 } 927 928 /** The size of this item reported to the block cache layer */ 929 @Override 930 public long heapSize() { 931 return ClassSize.align(size); 932 } 933 934 /** Size of the cache block holding this item. Used for verification. */ 935 public long cacheBlockHeapSize() { 936 return LruCachedBlock.PER_BLOCK_OVERHEAD 937 + ClassSize.align(cacheKey.heapSize()) 938 + ClassSize.align(size); 939 } 940 941 @Override 942 public int getSerializedLength() { 943 return 0; 944 } 945 946 @Override 947 public CacheableDeserializer<Cacheable> getDeserializer() { 948 return null; 949 } 950 951 @Override 952 public void serialize(ByteBuffer destination, boolean includeNextBlockMetadata) { 953 } 954 955 @Override 956 public BlockType getBlockType() { 957 return BlockType.DATA; 958 } 959 } 960 961 static void testMultiThreadGetAndEvictBlockInternal(BlockCache cache) throws Exception { 962 int size = 100; 963 int length = HConstants.HFILEBLOCK_HEADER_SIZE + size; 964 byte[] byteArr = new byte[length]; 965 HFileContext meta = new HFileContextBuilder().build(); 966 BlockCacheKey key = new BlockCacheKey("key1", 0); 967 HFileBlock blk = new HFileBlock(BlockType.DATA, size, size, -1, 968 ByteBuff.wrap(ByteBuffer.wrap(byteArr, 0, size)), HFileBlock.FILL_HEADER, -1, 52, -1, meta, 969 HEAP); 970 AtomicBoolean err1 = new AtomicBoolean(false); 971 Thread t1 = new Thread(() -> { 972 for (int i = 0; i < 10000 && !err1.get(); i++) { 973 try { 974 cache.getBlock(key, false, false, true); 975 } catch (Exception e) { 976 err1.set(true); 977 LOG.info("Cache block or get block failure: ", e); 978 } 979 } 980 }); 981 982 AtomicBoolean err2 = new AtomicBoolean(false); 983 Thread t2 = new Thread(() -> { 984 for (int i = 0; i < 10000 && !err2.get(); i++) { 985 try { 986 cache.evictBlock(key); 987 } catch (Exception e) { 988 err2.set(true); 989 LOG.info("Evict block failure: ", e); 990 } 991 } 992 }); 993 994 AtomicBoolean err3 = new AtomicBoolean(false); 995 Thread t3 = new Thread(() -> { 996 for (int i = 0; i < 10000 && !err3.get(); i++) { 997 try { 998 cache.cacheBlock(key, blk); 999 } catch (Exception e) { 1000 err3.set(true); 1001 LOG.info("Cache block failure: ", e); 1002 } 1003 } 1004 }); 1005 t1.start(); 1006 t2.start(); 1007 t3.start(); 1008 t1.join(); 1009 t2.join(); 1010 t3.join(); 1011 Assert.assertFalse(err1.get()); 1012 Assert.assertFalse(err2.get()); 1013 Assert.assertFalse(err3.get()); 1014 } 1015 1016 @Test 1017 public void testMultiThreadGetAndEvictBlock() throws Exception { 1018 long maxSize = 100000; 1019 long blockSize = calculateBlockSize(maxSize, 10); 1020 LruBlockCache cache = 1021 new LruBlockCache(maxSize, blockSize, false, (int) Math.ceil(1.2 * maxSize / blockSize), 1022 LruBlockCache.DEFAULT_LOAD_FACTOR, LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 1023 0.66f, // min 1024 0.99f, // acceptable 1025 0.33f, // single 1026 0.33f, // multi 1027 0.34f, // memory 1028 1.2f, // limit 1029 false, 1024); 1030 testMultiThreadGetAndEvictBlockInternal(cache); 1031 } 1032} 1033