001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import static org.junit.Assert.assertEquals;
021
022import java.nio.ByteBuffer;
023import org.apache.hadoop.hbase.HBaseClassTestRule;
024import org.apache.hadoop.hbase.testclassification.IOTests;
025import org.apache.hadoop.hbase.testclassification.SmallTests;
026import org.junit.ClassRule;
027import org.junit.Test;
028import org.junit.experimental.categories.Category;
029
030@Category({ IOTests.class, SmallTests.class })
031public class TestCachedBlockQueue {
032
033  @ClassRule
034  public static final HBaseClassTestRule CLASS_RULE =
035    HBaseClassTestRule.forClass(TestCachedBlockQueue.class);
036
037  @Test
038  public void testQueue() throws Exception {
039    CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
040    CachedBlock cb2 = new CachedBlock(1500, "cb2", 2);
041    CachedBlock cb3 = new CachedBlock(1000, "cb3", 3);
042    CachedBlock cb4 = new CachedBlock(1500, "cb4", 4);
043    CachedBlock cb5 = new CachedBlock(1000, "cb5", 5);
044    CachedBlock cb6 = new CachedBlock(1750, "cb6", 6);
045    CachedBlock cb7 = new CachedBlock(1000, "cb7", 7);
046    CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
047    CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
048    CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
049
050    LruCachedBlockQueue queue = new LruCachedBlockQueue(10000, 1000);
051
052    queue.add(cb1);
053    queue.add(cb2);
054    queue.add(cb3);
055    queue.add(cb4);
056    queue.add(cb5);
057    queue.add(cb6);
058    queue.add(cb7);
059    queue.add(cb8);
060    queue.add(cb9);
061    queue.add(cb10);
062
063    // We expect cb1 through cb8 to be in the queue
064    long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() + cb4.heapSize()
065      + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() + cb8.heapSize();
066
067    assertEquals(queue.heapSize(), expectedSize);
068
069    for (int i = 1; i <= 8; i++) {
070      assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb" + i);
071    }
072  }
073
074  @Test
075  public void testQueueSmallBlockEdgeCase() throws Exception {
076    CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
077    CachedBlock cb2 = new CachedBlock(1500, "cb2", 2);
078    CachedBlock cb3 = new CachedBlock(1000, "cb3", 3);
079    CachedBlock cb4 = new CachedBlock(1500, "cb4", 4);
080    CachedBlock cb5 = new CachedBlock(1000, "cb5", 5);
081    CachedBlock cb6 = new CachedBlock(1750, "cb6", 6);
082    CachedBlock cb7 = new CachedBlock(1000, "cb7", 7);
083    CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
084    CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
085    CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
086
087    LruCachedBlockQueue queue = new LruCachedBlockQueue(10000, 1000);
088
089    queue.add(cb1);
090    queue.add(cb2);
091    queue.add(cb3);
092    queue.add(cb4);
093    queue.add(cb5);
094    queue.add(cb6);
095    queue.add(cb7);
096    queue.add(cb8);
097    queue.add(cb9);
098    queue.add(cb10);
099
100    CachedBlock cb0 = new CachedBlock(10 + CachedBlock.PER_BLOCK_OVERHEAD, "cb0", 0);
101    queue.add(cb0);
102
103    // This is older so we must include it, but it will not end up kicking
104    // anything out because (heapSize - cb8.heapSize + cb0.heapSize < maxSize)
105    // and we must always maintain heapSize >= maxSize once we achieve it.
106
107    // We expect cb0 through cb8 to be in the queue
108    long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() + cb4.heapSize()
109      + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() + cb8.heapSize() + cb0.heapSize();
110
111    assertEquals(queue.heapSize(), expectedSize);
112
113    for (int i = 0; i <= 8; i++) {
114      assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb" + i);
115    }
116  }
117
118  private static class CachedBlock extends org.apache.hadoop.hbase.io.hfile.LruCachedBlock {
119    public CachedBlock(final long heapSize, String name, long accessTime) {
120      super(new BlockCacheKey(name, 0), new Cacheable() {
121        @Override
122        public long heapSize() {
123          return ((int) (heapSize - CachedBlock.PER_BLOCK_OVERHEAD));
124        }
125
126        @Override
127        public int getSerializedLength() {
128          return 0;
129        }
130
131        @Override
132        public void serialize(ByteBuffer destination, boolean includeNextBlockMetadata) {
133        }
134
135        @Override
136        public CacheableDeserializer<Cacheable> getDeserializer() {
137          return null;
138        }
139
140        @Override
141        public BlockType getBlockType() {
142          return BlockType.DATA;
143        }
144
145      }, accessTime, false);
146    }
147  }
148}