001/** 002 * 003 * Licensed to the Apache Software Foundation (ASF) under one 004 * or more contributor license agreements. See the NOTICE file 005 * distributed with this work for additional information 006 * regarding copyright ownership. The ASF licenses this file 007 * to you under the Apache License, Version 2.0 (the 008 * "License"); you may not use this file except in compliance 009 * with the License. You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 */ 019package org.apache.hadoop.hbase.io.hfile; 020 021import static org.junit.Assert.assertEquals; 022import static org.junit.Assert.assertNotNull; 023import static org.junit.Assert.assertNull; 024import static org.junit.Assert.assertTrue; 025 026import java.nio.ByteBuffer; 027import java.util.Random; 028 029import org.apache.hadoop.hbase.HBaseClassTestRule; 030import org.apache.hadoop.hbase.io.HeapSize; 031import org.apache.hadoop.hbase.testclassification.IOTests; 032import org.apache.hadoop.hbase.testclassification.SmallTests; 033import org.apache.hadoop.hbase.util.ClassSize; 034import org.junit.ClassRule; 035import org.junit.Test; 036import org.junit.experimental.categories.Category; 037 038/** 039 * Tests the concurrent TinyLfuBlockCache. 040 */ 041@Category({IOTests.class, SmallTests.class}) 042public class TestTinyLfuBlockCache { 043 044 @ClassRule 045 public static final HBaseClassTestRule CLASS_RULE = 046 HBaseClassTestRule.forClass(TestTinyLfuBlockCache.class); 047 048 @Test 049 public void testCacheSimple() throws Exception { 050 051 long maxSize = 1000000; 052 long blockSize = calculateBlockSizeDefault(maxSize, 101); 053 054 TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); 055 056 CachedItem [] blocks = generateRandomBlocks(100, blockSize); 057 058 long expectedCacheSize = cache.heapSize(); 059 060 // Confirm empty 061 for (CachedItem block : blocks) { 062 assertTrue(cache.getBlock(block.cacheKey, true, false, true) == null); 063 } 064 065 // Add blocks 066 for (CachedItem block : blocks) { 067 cache.cacheBlock(block.cacheKey, block); 068 expectedCacheSize += block.heapSize(); 069 } 070 071 // Verify correctly calculated cache heap size 072 assertEquals(expectedCacheSize, cache.heapSize()); 073 074 // Check if all blocks are properly cached and retrieved 075 for (CachedItem block : blocks) { 076 HeapSize buf = cache.getBlock(block.cacheKey, true, false, true); 077 assertTrue(buf != null); 078 assertEquals(buf.heapSize(), block.heapSize()); 079 } 080 081 // Re-add same blocks and ensure nothing has changed 082 long expectedBlockCount = cache.getBlockCount(); 083 for (CachedItem block : blocks) { 084 cache.cacheBlock(block.cacheKey, block); 085 } 086 assertEquals( 087 "Cache should ignore cache requests for blocks already in cache", 088 expectedBlockCount, cache.getBlockCount()); 089 090 // Verify correctly calculated cache heap size 091 assertEquals(expectedCacheSize, cache.heapSize()); 092 093 // Check if all blocks are properly cached and retrieved 094 for (CachedItem block : blocks) { 095 HeapSize buf = cache.getBlock(block.cacheKey, true, false, true); 096 assertTrue(buf != null); 097 assertEquals(buf.heapSize(), block.heapSize()); 098 } 099 100 // Expect no evictions 101 assertEquals(0, cache.getStats().getEvictionCount()); 102 } 103 104 @Test 105 public void testCacheEvictionSimple() throws Exception { 106 107 long maxSize = 100000; 108 long blockSize = calculateBlockSizeDefault(maxSize, 10); 109 110 TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); 111 112 CachedItem [] blocks = generateFixedBlocks(11, blockSize, "block"); 113 114 // Add all the blocks 115 for (CachedItem block : blocks) { 116 cache.cacheBlock(block.cacheKey, block); 117 } 118 119 // A single eviction run should have occurred 120 assertEquals(1, cache.getStats().getEvictionCount()); 121 122 // The cache did not grow beyond max 123 assertTrue(cache.heapSize() < maxSize); 124 125 // All blocks except one should be in the cache 126 assertEquals(10, cache.getBlockCount()); 127 } 128 129 @Test 130 public void testScanResistance() throws Exception { 131 132 long maxSize = 100000; 133 long blockSize = calculateBlockSize(maxSize, 10); 134 135 TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); 136 137 CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single"); 138 CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); 139 140 // Add 5 blocks from each 141 for(int i=0; i<5; i++) { 142 cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); 143 cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]); 144 } 145 146 // Add frequency 147 for (int i = 0; i < 5; i++) { 148 for (int j = 0; j < 10; j++) { 149 CachedItem block = multiBlocks[i]; 150 cache.getBlock(block.cacheKey, true, false, true); 151 } 152 } 153 154 // Let's keep "scanning" by adding single blocks. From here on we only 155 // expect evictions from the single bucket. 156 157 for(int i=5;i<18;i++) { 158 cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); 159 } 160 161 for (CachedItem block : multiBlocks) { 162 assertTrue(cache.cache.asMap().containsKey(block.cacheKey)); 163 } 164 165 assertEquals(10, cache.getBlockCount()); 166 assertEquals(13, cache.getStats().getEvictionCount()); 167 168 } 169 170 @Test 171 public void testMaxBlockSize() throws Exception { 172 long maxSize = 100000; 173 long blockSize = calculateBlockSize(maxSize, 10); 174 175 TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); 176 CachedItem [] tooLong = generateFixedBlocks(10, 2 * blockSize, "long"); 177 CachedItem [] small = generateFixedBlocks(15, blockSize / 2, "small"); 178 179 for (CachedItem i:tooLong) { 180 cache.cacheBlock(i.cacheKey, i); 181 } 182 for (CachedItem i:small) { 183 cache.cacheBlock(i.cacheKey, i); 184 } 185 assertEquals(15,cache.getBlockCount()); 186 for (CachedItem i:small) { 187 assertNotNull(cache.getBlock(i.cacheKey, true, false, false)); 188 } 189 for (CachedItem i:tooLong) { 190 assertNull(cache.getBlock(i.cacheKey, true, false, false)); 191 } 192 193 assertEquals(10, cache.getStats().getFailedInserts()); 194 } 195 196 @Test 197 public void testResizeBlockCache() throws Exception { 198 199 long maxSize = 100000; 200 long blockSize = calculateBlockSize(maxSize, 10); 201 202 TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); 203 204 CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block"); 205 206 for(CachedItem block : blocks) { 207 cache.cacheBlock(block.cacheKey, block); 208 } 209 210 // Do not expect any evictions yet 211 assertEquals(10, cache.getBlockCount()); 212 assertEquals(0, cache.getStats().getEvictionCount()); 213 214 // Resize to half capacity plus an extra block (otherwise we evict an extra) 215 cache.setMaxSize(maxSize / 2); 216 217 // And we expect 1/2 of the blocks to be evicted 218 assertEquals(5, cache.getBlockCount()); 219 assertEquals(5, cache.getStats().getEvictedCount()); 220 } 221 222 private CachedItem [] generateFixedBlocks(int numBlocks, int size, String pfx) { 223 CachedItem [] blocks = new CachedItem[numBlocks]; 224 for(int i=0;i<numBlocks;i++) { 225 blocks[i] = new CachedItem(pfx + i, size); 226 } 227 return blocks; 228 } 229 230 private CachedItem [] generateFixedBlocks(int numBlocks, long size, String pfx) { 231 return generateFixedBlocks(numBlocks, (int)size, pfx); 232 } 233 234 private CachedItem [] generateRandomBlocks(int numBlocks, long maxSize) { 235 CachedItem [] blocks = new CachedItem[numBlocks]; 236 Random r = new Random(); 237 for(int i=0;i<numBlocks;i++) { 238 blocks[i] = new CachedItem("block" + i, r.nextInt((int)maxSize)+1); 239 } 240 return blocks; 241 } 242 243 private long calculateBlockSize(long maxSize, int numBlocks) { 244 long roughBlockSize = maxSize / numBlocks; 245 int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize); 246 long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + 247 ClassSize.CONCURRENT_HASHMAP + 248 (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) + 249 (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT); 250 long negateBlockSize = totalOverhead/numEntries; 251 negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD; 252 return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f)); 253 } 254 255 private long calculateBlockSizeDefault(long maxSize, int numBlocks) { 256 long roughBlockSize = maxSize / numBlocks; 257 int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize); 258 long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + 259 ClassSize.CONCURRENT_HASHMAP + 260 (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) + 261 (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT); 262 long negateBlockSize = totalOverhead / numEntries; 263 negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD; 264 return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)* 265 LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); 266 } 267 268 private static class CachedItem implements Cacheable { 269 BlockCacheKey cacheKey; 270 int size; 271 272 CachedItem(String blockName, int size) { 273 this.cacheKey = new BlockCacheKey(blockName, 0); 274 this.size = size; 275 } 276 277 /** The size of this item reported to the block cache layer */ 278 @Override 279 public long heapSize() { 280 return ClassSize.align(size); 281 } 282 283 @Override 284 public int getSerializedLength() { 285 return 0; 286 } 287 288 @Override 289 public CacheableDeserializer<Cacheable> getDeserializer() { 290 return null; 291 } 292 293 @Override 294 public BlockType getBlockType() { 295 return BlockType.DATA; 296 } 297 298 @Override 299 public void serialize(ByteBuffer destination, boolean includeNextBlockMetadata) { 300 } 301 } 302 303} 304