001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import java.nio.ByteBuffer;
021import junit.framework.TestCase;
022import org.apache.hadoop.hbase.HBaseClassTestRule;
023import org.apache.hadoop.hbase.testclassification.IOTests;
024import org.apache.hadoop.hbase.testclassification.SmallTests;
025import org.junit.ClassRule;
026import org.junit.experimental.categories.Category;
027
028@Category({IOTests.class, SmallTests.class})
029public class TestCachedBlockQueue extends TestCase {
030
031  @ClassRule
032  public static final HBaseClassTestRule CLASS_RULE =
033      HBaseClassTestRule.forClass(TestCachedBlockQueue.class);
034
035  public void testQueue() throws Exception {
036
037    CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
038    CachedBlock cb2 = new CachedBlock(1500, "cb2", 2);
039    CachedBlock cb3 = new CachedBlock(1000, "cb3", 3);
040    CachedBlock cb4 = new CachedBlock(1500, "cb4", 4);
041    CachedBlock cb5 = new CachedBlock(1000, "cb5", 5);
042    CachedBlock cb6 = new CachedBlock(1750, "cb6", 6);
043    CachedBlock cb7 = new CachedBlock(1000, "cb7", 7);
044    CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
045    CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
046    CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
047
048    LruCachedBlockQueue queue = new LruCachedBlockQueue(10000,1000);
049
050    queue.add(cb1);
051    queue.add(cb2);
052    queue.add(cb3);
053    queue.add(cb4);
054    queue.add(cb5);
055    queue.add(cb6);
056    queue.add(cb7);
057    queue.add(cb8);
058    queue.add(cb9);
059    queue.add(cb10);
060
061    // We expect cb1 through cb8 to be in the queue
062    long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() +
063      cb4.heapSize() + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() +
064      cb8.heapSize();
065
066    assertEquals(queue.heapSize(), expectedSize);
067
068    for (int i = 1; i <= 8; i++) {
069      assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb"+i);
070    }
071  }
072
073  public void testQueueSmallBlockEdgeCase() throws Exception {
074
075    CachedBlock cb1 = new CachedBlock(1000, "cb1", 1);
076    CachedBlock cb2 = new CachedBlock(1500, "cb2", 2);
077    CachedBlock cb3 = new CachedBlock(1000, "cb3", 3);
078    CachedBlock cb4 = new CachedBlock(1500, "cb4", 4);
079    CachedBlock cb5 = new CachedBlock(1000, "cb5", 5);
080    CachedBlock cb6 = new CachedBlock(1750, "cb6", 6);
081    CachedBlock cb7 = new CachedBlock(1000, "cb7", 7);
082    CachedBlock cb8 = new CachedBlock(1500, "cb8", 8);
083    CachedBlock cb9 = new CachedBlock(1000, "cb9", 9);
084    CachedBlock cb10 = new CachedBlock(1500, "cb10", 10);
085
086    LruCachedBlockQueue queue = new LruCachedBlockQueue(10000,1000);
087
088    queue.add(cb1);
089    queue.add(cb2);
090    queue.add(cb3);
091    queue.add(cb4);
092    queue.add(cb5);
093    queue.add(cb6);
094    queue.add(cb7);
095    queue.add(cb8);
096    queue.add(cb9);
097    queue.add(cb10);
098
099    CachedBlock cb0 = new CachedBlock(10 + CachedBlock.PER_BLOCK_OVERHEAD, "cb0", 0);
100    queue.add(cb0);
101
102    // This is older so we must include it, but it will not end up kicking
103    // anything out because (heapSize - cb8.heapSize + cb0.heapSize < maxSize)
104    // and we must always maintain heapSize >= maxSize once we achieve it.
105
106    // We expect cb0 through cb8 to be in the queue
107    long expectedSize = cb1.heapSize() + cb2.heapSize() + cb3.heapSize() +
108      cb4.heapSize() + cb5.heapSize() + cb6.heapSize() + cb7.heapSize() +
109      cb8.heapSize() + cb0.heapSize();
110
111    assertEquals(queue.heapSize(), expectedSize);
112
113    for (int i = 0; i <= 8; i++) {
114      assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb"+i);
115    }
116  }
117
118  private static class CachedBlock extends org.apache.hadoop.hbase.io.hfile.LruCachedBlock
119  {
120    public CachedBlock(final long heapSize, String name, long accessTime) {
121      super(new BlockCacheKey(name, 0),
122          new Cacheable() {
123            @Override
124            public long heapSize() {
125              return ((int)(heapSize - CachedBlock.PER_BLOCK_OVERHEAD));
126            }
127
128            @Override
129            public int getSerializedLength() {
130              return 0;
131            }
132
133            @Override
134            public void serialize(ByteBuffer destination, boolean includeNextBlockMetadata) {
135            }
136
137            @Override
138            public CacheableDeserializer<Cacheable> getDeserializer() {
139              // TODO Auto-generated method stub
140              return null;
141            }
142
143            @Override
144            public BlockType getBlockType() {
145              return BlockType.DATA;
146            }
147
148          }, accessTime, false);
149    }
150  }
151
152}
153