001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertNotNull;
022import static org.junit.Assert.assertNull;
023import static org.junit.Assert.assertTrue;
024
025import java.nio.ByteBuffer;
026import java.util.Random;
027import java.util.concurrent.ExecutorService;
028import java.util.concurrent.Executors;
029import java.util.concurrent.TimeUnit;
030import java.util.concurrent.atomic.AtomicInteger;
031import org.apache.hadoop.conf.Configuration;
032import org.apache.hadoop.hbase.HBaseClassTestRule;
033import org.apache.hadoop.hbase.HBaseConfiguration;
034import org.apache.hadoop.hbase.HConstants;
035import org.apache.hadoop.hbase.Waiter;
036import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
037import org.apache.hadoop.hbase.io.HeapSize;
038import org.apache.hadoop.hbase.io.hfile.LruBlockCache.EvictionThread;
039import org.apache.hadoop.hbase.testclassification.IOTests;
040import org.apache.hadoop.hbase.testclassification.SmallTests;
041import org.apache.hadoop.hbase.util.ClassSize;
042import org.junit.ClassRule;
043import org.junit.Test;
044import org.junit.experimental.categories.Category;
045
046/**
047 * Tests the concurrent LruBlockCache.<p>
048 *
049 * Tests will ensure it grows and shrinks in size properly,
050 * evictions run when they're supposed to and do what they should,
051 * and that cached blocks are accessible when expected to be.
052 */
053@Category({IOTests.class, SmallTests.class})
054public class TestLruBlockCache {
055
056  @ClassRule
057  public static final HBaseClassTestRule CLASS_RULE =
058      HBaseClassTestRule.forClass(TestLruBlockCache.class);
059
060  @Test
061  public void testCacheEvictionThreadSafe() throws Exception {
062    long maxSize = 100000;
063    int numBlocks = 9;
064    int testRuns = 10;
065    final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
066    assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
067
068    final Configuration conf = HBaseConfiguration.create();
069    final LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
070    EvictionThread evictionThread = cache.getEvictionThread();
071    assertTrue(evictionThread != null);
072    while (!evictionThread.isEnteringRun()) {
073      Thread.sleep(1);
074    }
075    final String hfileName = "hfile";
076    int threads = 10;
077    final int blocksPerThread = 5 * numBlocks;
078    for (int run = 0; run != testRuns; ++run) {
079      final AtomicInteger blockCount = new AtomicInteger(0);
080      ExecutorService service = Executors.newFixedThreadPool(threads);
081      for (int i = 0; i != threads; ++i) {
082        service.execute(new Runnable() {
083          @Override
084          public void run() {
085            for (int blockIndex = 0; blockIndex < blocksPerThread || (!cache.isEvictionInProgress()); ++blockIndex) {
086              CachedItem block = new CachedItem(hfileName, (int) blockSize, blockCount.getAndIncrement());
087              boolean inMemory = Math.random() > 0.5;
088              cache.cacheBlock(block.cacheKey, block, inMemory);
089            }
090            cache.evictBlocksByHfileName(hfileName);
091          }
092        });
093      }
094      service.shutdown();
095      // The test may fail here if the evict thread frees the blocks too fast
096      service.awaitTermination(10, TimeUnit.MINUTES);
097      Waiter.waitFor(conf, 10000, 100, new ExplainingPredicate<Exception>() {
098        @Override
099        public boolean evaluate() throws Exception {
100          return cache.getBlockCount() == 0;
101        }
102
103        @Override
104        public String explainFailure() throws Exception {
105          return "Cache block count failed to return to 0";
106        }
107      });
108      assertEquals(0, cache.getBlockCount());
109      assertEquals(cache.getOverhead(), cache.getCurrentSize());
110    }
111  }
112  @Test
113  public void testBackgroundEvictionThread() throws Exception {
114    long maxSize = 100000;
115    int numBlocks = 9;
116    long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
117    assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
118
119    LruBlockCache cache = new LruBlockCache(maxSize,blockSize);
120    EvictionThread evictionThread = cache.getEvictionThread();
121    assertTrue(evictionThread != null);
122
123    CachedItem[] blocks = generateFixedBlocks(numBlocks + 1, blockSize, "block");
124
125    // Make sure eviction thread has entered run method
126    while (!evictionThread.isEnteringRun()) {
127      Thread.sleep(1);
128    }
129
130    // Add all the blocks
131    for (CachedItem block : blocks) {
132      cache.cacheBlock(block.cacheKey, block);
133    }
134
135    // wait until at least one eviction has run
136    int n = 0;
137    while(cache.getStats().getEvictionCount() == 0) {
138      Thread.sleep(200);
139      assertTrue("Eviction never happened.", n++ < 20);
140    }
141
142    // let cache stabilize
143    // On some systems, the cache will run multiple evictions before it attains
144    // steady-state. For instance, after populating the cache with 10 blocks,
145    // the first eviction evicts a single block and then a second eviction
146    // evicts another. I think this is due to the delta between minSize and
147    // acceptableSize, combined with variance between object overhead on
148    // different environments.
149    n = 0;
150    for (long prevCnt = 0 /* < number of blocks added */,
151              curCnt = cache.getBlockCount();
152        prevCnt != curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) {
153      Thread.sleep(200);
154      assertTrue("Cache never stabilized.", n++ < 20);
155    }
156
157    long evictionCount = cache.getStats().getEvictionCount();
158    assertTrue(evictionCount >= 1);
159    System.out.println("Background Evictions run: " + evictionCount);
160  }
161
162  @Test
163  public void testCacheSimple() throws Exception {
164
165    long maxSize = 1000000;
166    long blockSize = calculateBlockSizeDefault(maxSize, 101);
167
168    LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
169
170    CachedItem [] blocks = generateRandomBlocks(100, blockSize);
171
172    long expectedCacheSize = cache.heapSize();
173
174    // Confirm empty
175    for (CachedItem block : blocks) {
176      assertTrue(cache.getBlock(block.cacheKey, true, false, true) == null);
177    }
178
179    // Add blocks
180    for (CachedItem block : blocks) {
181      cache.cacheBlock(block.cacheKey, block);
182      expectedCacheSize += block.cacheBlockHeapSize();
183    }
184
185    // Verify correctly calculated cache heap size
186    assertEquals(expectedCacheSize, cache.heapSize());
187
188    // Check if all blocks are properly cached and retrieved
189    for (CachedItem block : blocks) {
190      HeapSize buf = cache.getBlock(block.cacheKey, true, false, true);
191      assertTrue(buf != null);
192      assertEquals(buf.heapSize(), block.heapSize());
193    }
194
195    // Re-add same blocks and ensure nothing has changed
196    long expectedBlockCount = cache.getBlockCount();
197    for (CachedItem block : blocks) {
198      cache.cacheBlock(block.cacheKey, block);
199    }
200    assertEquals(
201            "Cache should ignore cache requests for blocks already in cache",
202            expectedBlockCount, cache.getBlockCount());
203
204    // Verify correctly calculated cache heap size
205    assertEquals(expectedCacheSize, cache.heapSize());
206
207    // Check if all blocks are properly cached and retrieved
208    for (CachedItem block : blocks) {
209      HeapSize buf = cache.getBlock(block.cacheKey, true, false, true);
210      assertTrue(buf != null);
211      assertEquals(buf.heapSize(), block.heapSize());
212    }
213
214    // Expect no evictions
215    assertEquals(0, cache.getStats().getEvictionCount());
216    Thread t = new LruBlockCache.StatisticsThread(cache);
217    t.start();
218    t.join();
219  }
220
221  @Test
222  public void testCacheEvictionSimple() throws Exception {
223
224    long maxSize = 100000;
225    long blockSize = calculateBlockSizeDefault(maxSize, 10);
226
227    LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
228
229    CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block");
230
231    long expectedCacheSize = cache.heapSize();
232
233    // Add all the blocks
234    for (CachedItem block : blocks) {
235      cache.cacheBlock(block.cacheKey, block);
236      expectedCacheSize += block.cacheBlockHeapSize();
237    }
238
239    // A single eviction run should have occurred
240    assertEquals(1, cache.getStats().getEvictionCount());
241
242    // Our expected size overruns acceptable limit
243    assertTrue(expectedCacheSize >
244      (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
245
246    // But the cache did not grow beyond max
247    assertTrue(cache.heapSize() < maxSize);
248
249    // And is still below the acceptable limit
250    assertTrue(cache.heapSize() <
251        (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
252
253    // All blocks except block 0  should be in the cache
254    assertTrue(cache.getBlock(blocks[0].cacheKey, true, false, true) == null);
255    for(int i=1;i<blocks.length;i++) {
256      assertEquals(cache.getBlock(blocks[i].cacheKey, true, false, true),
257          blocks[i]);
258    }
259  }
260
261  @Test
262  public void testCacheEvictionTwoPriorities() throws Exception {
263
264    long maxSize = 100000;
265    long blockSize = calculateBlockSizeDefault(maxSize, 10);
266
267    LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
268
269    CachedItem [] singleBlocks = generateFixedBlocks(5, 10000, "single");
270    CachedItem [] multiBlocks = generateFixedBlocks(5, 10000, "multi");
271
272    long expectedCacheSize = cache.heapSize();
273
274    // Add and get the multi blocks
275    for (CachedItem block : multiBlocks) {
276      cache.cacheBlock(block.cacheKey, block);
277      expectedCacheSize += block.cacheBlockHeapSize();
278      assertEquals(cache.getBlock(block.cacheKey, true, false, true), block);
279    }
280
281    // Add the single blocks (no get)
282    for (CachedItem block : singleBlocks) {
283      cache.cacheBlock(block.cacheKey, block);
284      expectedCacheSize += block.heapSize();
285    }
286
287    // A single eviction run should have occurred
288    assertEquals(1, cache.getStats().getEvictionCount());
289
290    // We expect two entries evicted
291    assertEquals(2, cache.getStats().getEvictedCount());
292
293    // Our expected size overruns acceptable limit
294    assertTrue(expectedCacheSize >
295      (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
296
297    // But the cache did not grow beyond max
298    assertTrue(cache.heapSize() <= maxSize);
299
300    // And is now below the acceptable limit
301    assertTrue(cache.heapSize() <=
302        (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
303
304    // We expect fairness across the two priorities.
305    // This test makes multi go barely over its limit, in-memory
306    // empty, and the rest in single.  Two single evictions and
307    // one multi eviction expected.
308    assertTrue(cache.getBlock(singleBlocks[0].cacheKey, true, false, true) == null);
309    assertTrue(cache.getBlock(multiBlocks[0].cacheKey, true, false, true) == null);
310
311    // And all others to be cached
312    for(int i=1;i<4;i++) {
313      assertEquals(cache.getBlock(singleBlocks[i].cacheKey, true, false, true),
314          singleBlocks[i]);
315      assertEquals(cache.getBlock(multiBlocks[i].cacheKey, true, false, true),
316          multiBlocks[i]);
317    }
318  }
319
320  @Test
321  public void testCacheEvictionThreePriorities() throws Exception {
322
323    long maxSize = 100000;
324    long blockSize = calculateBlockSize(maxSize, 10);
325
326    LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
327        (int)Math.ceil(1.2*maxSize/blockSize),
328        LruBlockCache.DEFAULT_LOAD_FACTOR,
329        LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
330        0.98f, // min
331        0.99f, // acceptable
332        0.33f, // single
333        0.33f, // multi
334        0.34f, // memory
335        1.2f,  // limit
336        false,
337        16 * 1024 * 1024);
338
339    CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
340    CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
341    CachedItem [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory");
342
343    long expectedCacheSize = cache.heapSize();
344
345    // Add 3 blocks from each priority
346    for(int i=0;i<3;i++) {
347
348      // Just add single blocks
349      cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
350      expectedCacheSize += singleBlocks[i].cacheBlockHeapSize();
351
352      // Add and get multi blocks
353      cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
354      expectedCacheSize += multiBlocks[i].cacheBlockHeapSize();
355      cache.getBlock(multiBlocks[i].cacheKey, true, false, true);
356
357      // Add memory blocks as such
358      cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true);
359      expectedCacheSize += memoryBlocks[i].cacheBlockHeapSize();
360
361    }
362
363    // Do not expect any evictions yet
364    assertEquals(0, cache.getStats().getEvictionCount());
365
366    // Verify cache size
367    assertEquals(expectedCacheSize, cache.heapSize());
368
369    // Insert a single block, oldest single should be evicted
370    cache.cacheBlock(singleBlocks[3].cacheKey, singleBlocks[3]);
371
372    // Single eviction, one thing evicted
373    assertEquals(1, cache.getStats().getEvictionCount());
374    assertEquals(1, cache.getStats().getEvictedCount());
375
376    // Verify oldest single block is the one evicted
377    assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false, true));
378
379    // Change the oldest remaining single block to a multi
380    cache.getBlock(singleBlocks[1].cacheKey, true, false, true);
381
382    // Insert another single block
383    cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]);
384
385    // Two evictions, two evicted.
386    assertEquals(2, cache.getStats().getEvictionCount());
387    assertEquals(2, cache.getStats().getEvictedCount());
388
389    // Oldest multi block should be evicted now
390    assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false, true));
391
392    // Insert another memory block
393    cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true);
394
395    // Three evictions, three evicted.
396    assertEquals(3, cache.getStats().getEvictionCount());
397    assertEquals(3, cache.getStats().getEvictedCount());
398
399    // Oldest memory block should be evicted now
400    assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false, true));
401
402    // Add a block that is twice as big (should force two evictions)
403    CachedItem [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big");
404    cache.cacheBlock(bigBlocks[0].cacheKey, bigBlocks[0]);
405
406    // Four evictions, six evicted (inserted block 3X size, expect +3 evicted)
407    assertEquals(4, cache.getStats().getEvictionCount());
408    assertEquals(6, cache.getStats().getEvictedCount());
409
410    // Expect three remaining singles to be evicted
411    assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false, true));
412    assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false, true));
413    assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false, true));
414
415    // Make the big block a multi block
416    cache.getBlock(bigBlocks[0].cacheKey, true, false, true);
417
418    // Cache another single big block
419    cache.cacheBlock(bigBlocks[1].cacheKey, bigBlocks[1]);
420
421    // Five evictions, nine evicted (3 new)
422    assertEquals(5, cache.getStats().getEvictionCount());
423    assertEquals(9, cache.getStats().getEvictedCount());
424
425    // Expect three remaining multis to be evicted
426    assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false, true));
427    assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false, true));
428    assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false, true));
429
430    // Cache a big memory block
431    cache.cacheBlock(bigBlocks[2].cacheKey, bigBlocks[2], true);
432
433    // Six evictions, twelve evicted (3 new)
434    assertEquals(6, cache.getStats().getEvictionCount());
435    assertEquals(12, cache.getStats().getEvictedCount());
436
437    // Expect three remaining in-memory to be evicted
438    assertEquals(null, cache.getBlock(memoryBlocks[1].cacheKey, true, false, true));
439    assertEquals(null, cache.getBlock(memoryBlocks[2].cacheKey, true, false, true));
440    assertEquals(null, cache.getBlock(memoryBlocks[3].cacheKey, true, false, true));
441  }
442
443  @Test
444  public void testCacheEvictionInMemoryForceMode() throws Exception {
445    long maxSize = 100000;
446    long blockSize = calculateBlockSize(maxSize, 10);
447
448    LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
449        (int)Math.ceil(1.2*maxSize/blockSize),
450        LruBlockCache.DEFAULT_LOAD_FACTOR,
451        LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
452        0.98f, // min
453        0.99f, // acceptable
454        0.2f, // single
455        0.3f, // multi
456        0.5f, // memory
457        1.2f, // limit
458        true,
459        16 * 1024 * 1024);
460
461    CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
462    CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");
463    CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory");
464
465    long expectedCacheSize = cache.heapSize();
466
467    // 0. Add 5 single blocks and 4 multi blocks to make cache full, si:mu:me = 5:4:0
468    for(int i = 0; i < 4; i++) {
469      // Just add single blocks
470      cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
471      expectedCacheSize += singleBlocks[i].cacheBlockHeapSize();
472      // Add and get multi blocks
473      cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
474      expectedCacheSize += multiBlocks[i].cacheBlockHeapSize();
475      cache.getBlock(multiBlocks[i].cacheKey, true, false, true);
476    }
477    // 5th single block
478    cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]);
479    expectedCacheSize += singleBlocks[4].cacheBlockHeapSize();
480    // Do not expect any evictions yet
481    assertEquals(0, cache.getStats().getEvictionCount());
482    // Verify cache size
483    assertEquals(expectedCacheSize, cache.heapSize());
484
485    // 1. Insert a memory block, oldest single should be evicted, si:mu:me = 4:4:1
486    cache.cacheBlock(memoryBlocks[0].cacheKey, memoryBlocks[0], true);
487    // Single eviction, one block evicted
488    assertEquals(1, cache.getStats().getEvictionCount());
489    assertEquals(1, cache.getStats().getEvictedCount());
490    // Verify oldest single block (index = 0) is the one evicted
491    assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false, true));
492
493    // 2. Insert another memory block, another single evicted, si:mu:me = 3:4:2
494    cache.cacheBlock(memoryBlocks[1].cacheKey, memoryBlocks[1], true);
495    // Two evictions, two evicted.
496    assertEquals(2, cache.getStats().getEvictionCount());
497    assertEquals(2, cache.getStats().getEvictedCount());
498    // Current oldest single block (index = 1) should be evicted now
499    assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false, true));
500
501    // 3. Insert 4 memory blocks, 2 single and 2 multi evicted, si:mu:me = 1:2:6
502    cache.cacheBlock(memoryBlocks[2].cacheKey, memoryBlocks[2], true);
503    cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true);
504    cache.cacheBlock(memoryBlocks[4].cacheKey, memoryBlocks[4], true);
505    cache.cacheBlock(memoryBlocks[5].cacheKey, memoryBlocks[5], true);
506    // Three evictions, three evicted.
507    assertEquals(6, cache.getStats().getEvictionCount());
508    assertEquals(6, cache.getStats().getEvictedCount());
509    // two oldest single blocks and two oldest multi blocks evicted
510    assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false, true));
511    assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false, true));
512    assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false, true));
513    assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false, true));
514
515    // 4. Insert 3 memory blocks, the remaining 1 single and 2 multi evicted
516    // si:mu:me = 0:0:9
517    cache.cacheBlock(memoryBlocks[6].cacheKey, memoryBlocks[6], true);
518    cache.cacheBlock(memoryBlocks[7].cacheKey, memoryBlocks[7], true);
519    cache.cacheBlock(memoryBlocks[8].cacheKey, memoryBlocks[8], true);
520    // Three evictions, three evicted.
521    assertEquals(9, cache.getStats().getEvictionCount());
522    assertEquals(9, cache.getStats().getEvictedCount());
523    // one oldest single block and two oldest multi blocks evicted
524    assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false, true));
525    assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false, true));
526    assertEquals(null, cache.getBlock(multiBlocks[3].cacheKey, true, false, true));
527
528    // 5. Insert one memory block, the oldest memory evicted
529    // si:mu:me = 0:0:9
530    cache.cacheBlock(memoryBlocks[9].cacheKey, memoryBlocks[9], true);
531    // one eviction, one evicted.
532    assertEquals(10, cache.getStats().getEvictionCount());
533    assertEquals(10, cache.getStats().getEvictedCount());
534    // oldest memory block evicted
535    assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false, true));
536
537    // 6. Insert one new single block, itself evicted immediately since
538    //    all blocks in cache are memory-type which have higher priority
539    // si:mu:me = 0:0:9 (no change)
540    cache.cacheBlock(singleBlocks[9].cacheKey, singleBlocks[9]);
541    // one eviction, one evicted.
542    assertEquals(11, cache.getStats().getEvictionCount());
543    assertEquals(11, cache.getStats().getEvictedCount());
544    // the single block just cached now evicted (can't evict memory)
545    assertEquals(null, cache.getBlock(singleBlocks[9].cacheKey, true, false, true));
546  }
547
548  // test scan resistance
549  @Test
550  public void testScanResistance() throws Exception {
551
552    long maxSize = 100000;
553    long blockSize = calculateBlockSize(maxSize, 10);
554
555    LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
556        (int)Math.ceil(1.2*maxSize/blockSize),
557        LruBlockCache.DEFAULT_LOAD_FACTOR,
558        LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
559        0.66f, // min
560        0.99f, // acceptable
561        0.33f, // single
562        0.33f, // multi
563        0.34f, // memory
564        1.2f,  // limit
565        false,
566        16 * 1024 * 1024);
567
568    CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single");
569    CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
570
571    // Add 5 multi blocks
572    for (CachedItem block : multiBlocks) {
573      cache.cacheBlock(block.cacheKey, block);
574      cache.getBlock(block.cacheKey, true, false, true);
575    }
576
577    // Add 5 single blocks
578    for(int i=0;i<5;i++) {
579      cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
580    }
581
582    // An eviction ran
583    assertEquals(1, cache.getStats().getEvictionCount());
584
585    // To drop down to 2/3 capacity, we'll need to evict 4 blocks
586    assertEquals(4, cache.getStats().getEvictedCount());
587
588    // Should have been taken off equally from single and multi
589    assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false, true));
590    assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false, true));
591    assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false, true));
592    assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false, true));
593
594    // Let's keep "scanning" by adding single blocks.  From here on we only
595    // expect evictions from the single bucket.
596
597    // Every time we reach 10 total blocks (every 4 inserts) we get 4 single
598    // blocks evicted.  Inserting 13 blocks should yield 3 more evictions and
599    // 12 more evicted.
600
601    for(int i=5;i<18;i++) {
602      cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
603    }
604
605    // 4 total evictions, 16 total evicted
606    assertEquals(4, cache.getStats().getEvictionCount());
607    assertEquals(16, cache.getStats().getEvictedCount());
608
609    // Should now have 7 total blocks
610    assertEquals(7, cache.getBlockCount());
611
612  }
613
614  @Test
615  public void testMaxBlockSize() throws Exception {
616    long maxSize = 100000;
617    long blockSize = calculateBlockSize(maxSize, 10);
618
619    LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
620        (int)Math.ceil(1.2*maxSize/blockSize),
621        LruBlockCache.DEFAULT_LOAD_FACTOR,
622        LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
623        0.66f, // min
624        0.99f, // acceptable
625        0.33f, // single
626        0.33f, // multi
627        0.34f, // memory
628        1.2f,  // limit
629        false,
630        1024);
631    CachedItem [] tooLong = generateFixedBlocks(10, 1024+5, "long");
632    CachedItem [] small = generateFixedBlocks(15, 600, "small");
633
634
635    for (CachedItem i:tooLong) {
636      cache.cacheBlock(i.cacheKey, i);
637    }
638    for (CachedItem i:small) {
639      cache.cacheBlock(i.cacheKey, i);
640    }
641    assertEquals(15,cache.getBlockCount());
642    for (CachedItem i:small) {
643      assertNotNull(cache.getBlock(i.cacheKey, true, false, false));
644    }
645    for (CachedItem i:tooLong) {
646      assertNull(cache.getBlock(i.cacheKey, true, false, false));
647    }
648
649    assertEquals(10, cache.getStats().getFailedInserts());
650  }
651
652  // test setMaxSize
653  @Test
654  public void testResizeBlockCache() throws Exception {
655
656    long maxSize = 300000;
657    long blockSize = calculateBlockSize(maxSize, 31);
658
659    LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
660        (int)Math.ceil(1.2*maxSize/blockSize),
661        LruBlockCache.DEFAULT_LOAD_FACTOR,
662        LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
663        0.98f, // min
664        0.99f, // acceptable
665        0.33f, // single
666        0.33f, // multi
667        0.34f, // memory
668        1.2f,  // limit
669        false,
670        16 * 1024 * 1024);
671
672    CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
673    CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");
674    CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory");
675
676    // Add all blocks from all priorities
677    for(int i=0;i<10;i++) {
678
679      // Just add single blocks
680      cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
681
682      // Add and get multi blocks
683      cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
684      cache.getBlock(multiBlocks[i].cacheKey, true, false, true);
685
686      // Add memory blocks as such
687      cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true);
688    }
689
690    // Do not expect any evictions yet
691    assertEquals(0, cache.getStats().getEvictionCount());
692
693    // Resize to half capacity plus an extra block (otherwise we evict an extra)
694    cache.setMaxSize((long)(maxSize * 0.5f));
695
696    // Should have run a single eviction
697    assertEquals(1, cache.getStats().getEvictionCount());
698
699    // And we expect 1/2 of the blocks to be evicted
700    assertEquals(15, cache.getStats().getEvictedCount());
701
702    // And the oldest 5 blocks from each category should be gone
703    for(int i=0;i<5;i++) {
704      assertEquals(null, cache.getBlock(singleBlocks[i].cacheKey, true, false, true));
705      assertEquals(null, cache.getBlock(multiBlocks[i].cacheKey, true, false, true));
706      assertEquals(null, cache.getBlock(memoryBlocks[i].cacheKey, true, false, true));
707    }
708
709    // And the newest 5 blocks should still be accessible
710    for(int i=5;i<10;i++) {
711      assertEquals(singleBlocks[i], cache.getBlock(singleBlocks[i].cacheKey, true, false, true));
712      assertEquals(multiBlocks[i], cache.getBlock(multiBlocks[i].cacheKey, true, false, true));
713      assertEquals(memoryBlocks[i], cache.getBlock(memoryBlocks[i].cacheKey, true, false, true));
714    }
715  }
716
717  // test metricsPastNPeriods
718  @Test
719  public void testPastNPeriodsMetrics() throws Exception {
720   double delta = 0.01;
721
722    // 3 total periods
723    CacheStats stats = new CacheStats("test", 3);
724
725    // No accesses, should be 0
726    stats.rollMetricsPeriod();
727    assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta);
728    assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
729
730    // period 1, 1 hit caching, 1 hit non-caching, 2 miss non-caching
731    // should be (2/4)=0.5 and (1/1)=1
732    stats.hit(false, true, BlockType.DATA);
733    stats.hit(true, true, BlockType.DATA);
734    stats.miss(false, false, BlockType.DATA);
735    stats.miss(false, false, BlockType.DATA);
736    stats.rollMetricsPeriod();
737    assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
738    assertEquals(1.0, stats.getHitCachingRatioPastNPeriods(), delta);
739
740    // period 2, 1 miss caching, 3 miss non-caching
741    // should be (2/8)=0.25 and (1/2)=0.5
742    stats.miss(true, false, BlockType.DATA);
743    stats.miss(false, false, BlockType.DATA);
744    stats.miss(false, false, BlockType.DATA);
745    stats.miss(false, false, BlockType.DATA);
746    stats.rollMetricsPeriod();
747    assertEquals(0.25, stats.getHitRatioPastNPeriods(), delta);
748    assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta);
749
750    // period 3, 2 hits of each type
751    // should be (6/12)=0.5 and (3/4)=0.75
752    stats.hit(false, true, BlockType.DATA);
753    stats.hit(true, true, BlockType.DATA);
754    stats.hit(false, true, BlockType.DATA);
755    stats.hit(true, true, BlockType.DATA);
756    stats.rollMetricsPeriod();
757    assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
758    assertEquals(0.75, stats.getHitCachingRatioPastNPeriods(), delta);
759
760    // period 4, evict period 1, two caching misses
761    // should be (4/10)=0.4 and (2/5)=0.4
762    stats.miss(true, false, BlockType.DATA);
763    stats.miss(true, false, BlockType.DATA);
764    stats.rollMetricsPeriod();
765    assertEquals(0.4, stats.getHitRatioPastNPeriods(), delta);
766    assertEquals(0.4, stats.getHitCachingRatioPastNPeriods(), delta);
767
768    // period 5, evict period 2, 2 caching misses, 2 non-caching hit
769    // should be (6/10)=0.6 and (2/6)=1/3
770    stats.miss(true, false, BlockType.DATA);
771    stats.miss(true, false, BlockType.DATA);
772    stats.hit(false, true, BlockType.DATA);
773    stats.hit(false, true, BlockType.DATA);
774    stats.rollMetricsPeriod();
775    assertEquals(0.6, stats.getHitRatioPastNPeriods(), delta);
776    assertEquals((double)1/3, stats.getHitCachingRatioPastNPeriods(), delta);
777
778    // period 6, evict period 3
779    // should be (2/6)=1/3 and (0/4)=0
780    stats.rollMetricsPeriod();
781    assertEquals((double)1/3, stats.getHitRatioPastNPeriods(), delta);
782    assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
783
784    // period 7, evict period 4
785    // should be (2/4)=0.5 and (0/2)=0
786    stats.rollMetricsPeriod();
787    assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
788    assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
789
790    // period 8, evict period 5
791    // should be 0 and 0
792    stats.rollMetricsPeriod();
793    assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta);
794    assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
795
796    // period 9, one of each
797    // should be (2/4)=0.5 and (1/2)=0.5
798    stats.miss(true, false, BlockType.DATA);
799    stats.miss(false, false, BlockType.DATA);
800    stats.hit(true, true, BlockType.DATA);
801    stats.hit(false, true, BlockType.DATA);
802    stats.rollMetricsPeriod();
803    assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
804    assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta);
805  }
806
807  @Test
808  public void testCacheBlockNextBlockMetadataMissing() {
809    long maxSize = 100000;
810    long blockSize = calculateBlockSize(maxSize, 10);
811    int size = 100;
812    int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
813    byte[] byteArr = new byte[length];
814    ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size);
815    HFileContext meta = new HFileContextBuilder().build();
816    HFileBlock blockWithNextBlockMetadata = new HFileBlock(BlockType.DATA, size, size, -1, buf,
817        HFileBlock.FILL_HEADER, -1, 52, -1, meta);
818    HFileBlock blockWithoutNextBlockMetadata = new HFileBlock(BlockType.DATA, size, size, -1, buf,
819        HFileBlock.FILL_HEADER, -1, -1, -1, meta);
820
821    LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
822        (int)Math.ceil(1.2*maxSize/blockSize),
823        LruBlockCache.DEFAULT_LOAD_FACTOR,
824        LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
825        0.66f, // min
826        0.99f, // acceptable
827        0.33f, // single
828        0.33f, // multi
829        0.34f, // memory
830        1.2f,  // limit
831        false,
832        1024);
833
834    BlockCacheKey key = new BlockCacheKey("key1", 0);
835    ByteBuffer actualBuffer = ByteBuffer.allocate(length);
836    ByteBuffer block1Buffer = ByteBuffer.allocate(length);
837    ByteBuffer block2Buffer = ByteBuffer.allocate(length);
838    blockWithNextBlockMetadata.serialize(block1Buffer, true);
839    blockWithoutNextBlockMetadata.serialize(block2Buffer, true);
840
841    //Add blockWithNextBlockMetadata, expect blockWithNextBlockMetadata back.
842    CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithNextBlockMetadata, actualBuffer,
843        block1Buffer);
844
845    //Add blockWithoutNextBlockMetada, expect blockWithNextBlockMetadata back.
846    CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithoutNextBlockMetadata, actualBuffer,
847        block1Buffer);
848
849    //Clear and add blockWithoutNextBlockMetadata
850    cache.clearCache();
851    assertNull(cache.getBlock(key, false, false, false));
852    CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithoutNextBlockMetadata, actualBuffer,
853        block2Buffer);
854
855    //Add blockWithNextBlockMetadata, expect blockWithNextBlockMetadata to replace.
856    CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithNextBlockMetadata, actualBuffer,
857        block1Buffer);
858  }
859
860  private CachedItem [] generateFixedBlocks(int numBlocks, int size, String pfx) {
861    CachedItem [] blocks = new CachedItem[numBlocks];
862    for(int i=0;i<numBlocks;i++) {
863      blocks[i] = new CachedItem(pfx + i, size);
864    }
865    return blocks;
866  }
867
868  private CachedItem [] generateFixedBlocks(int numBlocks, long size, String pfx) {
869    return generateFixedBlocks(numBlocks, (int)size, pfx);
870  }
871
872  private CachedItem [] generateRandomBlocks(int numBlocks, long maxSize) {
873    CachedItem [] blocks = new CachedItem[numBlocks];
874    Random r = new Random();
875    for(int i=0;i<numBlocks;i++) {
876      blocks[i] = new CachedItem("block" + i, r.nextInt((int)maxSize)+1);
877    }
878    return blocks;
879  }
880
881  private long calculateBlockSize(long maxSize, int numBlocks) {
882    long roughBlockSize = maxSize / numBlocks;
883    int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
884    long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
885        ClassSize.CONCURRENT_HASHMAP +
886        (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
887        (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
888    long negateBlockSize = (long)(totalOverhead/numEntries);
889    negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD;
890    return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f));
891  }
892
893  private long calculateBlockSizeDefault(long maxSize, int numBlocks) {
894    long roughBlockSize = maxSize / numBlocks;
895    int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
896    long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
897        ClassSize.CONCURRENT_HASHMAP +
898        (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
899        (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
900    long negateBlockSize = totalOverhead / numEntries;
901    negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD;
902    return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*
903        LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
904  }
905
906  private static class CachedItem implements Cacheable {
907    BlockCacheKey cacheKey;
908    int size;
909
910    CachedItem(String blockName, int size, int offset) {
911      this.cacheKey = new BlockCacheKey(blockName, offset);
912      this.size = size;
913    }
914
915    CachedItem(String blockName, int size) {
916      this.cacheKey = new BlockCacheKey(blockName, 0);
917      this.size = size;
918    }
919
920    /** The size of this item reported to the block cache layer */
921    @Override
922    public long heapSize() {
923      return ClassSize.align(size);
924    }
925
926    /** Size of the cache block holding this item. Used for verification. */
927    public long cacheBlockHeapSize() {
928      return LruCachedBlock.PER_BLOCK_OVERHEAD
929          + ClassSize.align(cacheKey.heapSize())
930          + ClassSize.align(size);
931    }
932
933    @Override
934    public int getSerializedLength() {
935      return 0;
936    }
937
938    @Override
939    public CacheableDeserializer<Cacheable> getDeserializer() {
940      return null;
941    }
942
943    @Override
944    public void serialize(ByteBuffer destination, boolean includeNextBlockMetadata) {
945    }
946
947    @Override
948    public BlockType getBlockType() {
949      return BlockType.DATA;
950    }
951
952    @Override
953    public MemoryType getMemoryType() {
954      return MemoryType.EXCLUSIVE;
955    }
956
957  }
958
959}
960