001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertNotNull;
022import static org.junit.Assert.assertNull;
023import static org.junit.Assert.assertTrue;
024
025import java.nio.ByteBuffer;
026import java.util.Random;
027import java.util.concurrent.ThreadLocalRandom;
028import org.apache.hadoop.hbase.HBaseClassTestRule;
029import org.apache.hadoop.hbase.io.HeapSize;
030import org.apache.hadoop.hbase.testclassification.IOTests;
031import org.apache.hadoop.hbase.testclassification.SmallTests;
032import org.apache.hadoop.hbase.util.ClassSize;
033import org.junit.ClassRule;
034import org.junit.Test;
035import org.junit.experimental.categories.Category;
036
037/**
038 * Tests the concurrent TinyLfuBlockCache.
039 */
040@Category({ IOTests.class, SmallTests.class })
041public class TestTinyLfuBlockCache {
042
043  @ClassRule
044  public static final HBaseClassTestRule CLASS_RULE =
045    HBaseClassTestRule.forClass(TestTinyLfuBlockCache.class);
046
047  @Test
048  public void testCacheSimple() throws Exception {
049
050    long maxSize = 1000000;
051    long blockSize = calculateBlockSizeDefault(maxSize, 101);
052
053    TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run);
054
055    CachedItem[] blocks = generateRandomBlocks(100, blockSize);
056
057    long expectedCacheSize = cache.heapSize();
058
059    // Confirm empty
060    for (CachedItem block : blocks) {
061      assertTrue(cache.getBlock(block.cacheKey, true, false, true) == null);
062    }
063
064    // Add blocks
065    for (CachedItem block : blocks) {
066      cache.cacheBlock(block.cacheKey, block);
067      expectedCacheSize += block.heapSize();
068    }
069
070    // Verify correctly calculated cache heap size
071    assertEquals(expectedCacheSize, cache.heapSize());
072
073    // Check if all blocks are properly cached and retrieved
074    for (CachedItem block : blocks) {
075      HeapSize buf = cache.getBlock(block.cacheKey, true, false, true);
076      assertTrue(buf != null);
077      assertEquals(buf.heapSize(), block.heapSize());
078    }
079
080    // Re-add same blocks and ensure nothing has changed
081    long expectedBlockCount = cache.getBlockCount();
082    for (CachedItem block : blocks) {
083      cache.cacheBlock(block.cacheKey, block);
084    }
085    assertEquals("Cache should ignore cache requests for blocks already in cache",
086      expectedBlockCount, cache.getBlockCount());
087
088    // Verify correctly calculated cache heap size
089    assertEquals(expectedCacheSize, cache.heapSize());
090
091    // Check if all blocks are properly cached and retrieved
092    for (CachedItem block : blocks) {
093      HeapSize buf = cache.getBlock(block.cacheKey, true, false, true);
094      assertTrue(buf != null);
095      assertEquals(buf.heapSize(), block.heapSize());
096    }
097
098    // Expect no evictions
099    assertEquals(0, cache.getStats().getEvictionCount());
100
101    CacheTestUtils.testConvertToJSON(cache);
102  }
103
104  @Test
105  public void testCacheEvictionSimple() throws Exception {
106
107    long maxSize = 100000;
108    long blockSize = calculateBlockSizeDefault(maxSize, 10);
109
110    TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run);
111
112    CachedItem[] blocks = generateFixedBlocks(11, blockSize, "block");
113
114    // Add all the blocks
115    for (CachedItem block : blocks) {
116      cache.cacheBlock(block.cacheKey, block);
117    }
118
119    // A single eviction run should have occurred
120    assertEquals(1, cache.getStats().getEvictionCount());
121
122    // The cache did not grow beyond max
123    assertTrue(cache.heapSize() < maxSize);
124
125    // All blocks except one should be in the cache
126    assertEquals(10, cache.getBlockCount());
127  }
128
129  @Test
130  public void testScanResistance() throws Exception {
131
132    long maxSize = 100000;
133    long blockSize = calculateBlockSize(maxSize, 10);
134
135    TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run);
136
137    CachedItem[] singleBlocks = generateFixedBlocks(20, blockSize, "single");
138    CachedItem[] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
139
140    // Add 5 blocks from each
141    for (int i = 0; i < 5; i++) {
142      cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
143      cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
144    }
145
146    // Add frequency
147    for (int i = 0; i < 5; i++) {
148      for (int j = 0; j < 10; j++) {
149        CachedItem block = multiBlocks[i];
150        cache.getBlock(block.cacheKey, true, false, true);
151      }
152    }
153
154    // Let's keep "scanning" by adding single blocks. From here on we only
155    // expect evictions from the single bucket.
156
157    for (int i = 5; i < 18; i++) {
158      cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
159    }
160
161    for (CachedItem block : multiBlocks) {
162      assertTrue(cache.cache.asMap().containsKey(block.cacheKey));
163    }
164
165    assertEquals(10, cache.getBlockCount());
166    assertEquals(13, cache.getStats().getEvictionCount());
167
168  }
169
170  @Test
171  public void testMaxBlockSize() throws Exception {
172    long maxSize = 100000;
173    long blockSize = calculateBlockSize(maxSize, 10);
174
175    TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run);
176    CachedItem[] tooLong = generateFixedBlocks(10, 2 * blockSize, "long");
177    CachedItem[] small = generateFixedBlocks(15, blockSize / 2, "small");
178
179    for (CachedItem i : tooLong) {
180      cache.cacheBlock(i.cacheKey, i);
181    }
182    for (CachedItem i : small) {
183      cache.cacheBlock(i.cacheKey, i);
184    }
185    assertEquals(15, cache.getBlockCount());
186    for (CachedItem i : small) {
187      assertNotNull(cache.getBlock(i.cacheKey, true, false, false));
188    }
189    for (CachedItem i : tooLong) {
190      assertNull(cache.getBlock(i.cacheKey, true, false, false));
191    }
192
193    assertEquals(10, cache.getStats().getFailedInserts());
194  }
195
196  @Test
197  public void testResizeBlockCache() throws Exception {
198
199    long maxSize = 100000;
200    long blockSize = calculateBlockSize(maxSize, 10);
201
202    TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run);
203
204    CachedItem[] blocks = generateFixedBlocks(10, blockSize, "block");
205
206    for (CachedItem block : blocks) {
207      cache.cacheBlock(block.cacheKey, block);
208    }
209
210    // Do not expect any evictions yet
211    assertEquals(10, cache.getBlockCount());
212    assertEquals(0, cache.getStats().getEvictionCount());
213
214    // Resize to half capacity plus an extra block (otherwise we evict an extra)
215    cache.setMaxSize(maxSize / 2);
216
217    // And we expect 1/2 of the blocks to be evicted
218    assertEquals(5, cache.getBlockCount());
219    assertEquals(5, cache.getStats().getEvictedCount());
220  }
221
222  private CachedItem[] generateFixedBlocks(int numBlocks, int size, String pfx) {
223    CachedItem[] blocks = new CachedItem[numBlocks];
224    for (int i = 0; i < numBlocks; i++) {
225      blocks[i] = new CachedItem(pfx + i, size);
226    }
227    return blocks;
228  }
229
230  private CachedItem[] generateFixedBlocks(int numBlocks, long size, String pfx) {
231    return generateFixedBlocks(numBlocks, (int) size, pfx);
232  }
233
234  private CachedItem[] generateRandomBlocks(int numBlocks, long maxSize) {
235    CachedItem[] blocks = new CachedItem[numBlocks];
236    Random rand = ThreadLocalRandom.current();
237    for (int i = 0; i < numBlocks; i++) {
238      blocks[i] = new CachedItem("block" + i, rand.nextInt((int) maxSize) + 1);
239    }
240    return blocks;
241  }
242
243  private long calculateBlockSize(long maxSize, int numBlocks) {
244    long roughBlockSize = maxSize / numBlocks;
245    int numEntries = (int) Math.ceil((1.2) * maxSize / roughBlockSize);
246    long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP
247      + (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY)
248      + (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
249    long negateBlockSize = totalOverhead / numEntries;
250    negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD;
251    return ClassSize.align((long) Math.floor((roughBlockSize - negateBlockSize) * 0.99f));
252  }
253
254  private long calculateBlockSizeDefault(long maxSize, int numBlocks) {
255    long roughBlockSize = maxSize / numBlocks;
256    int numEntries = (int) Math.ceil((1.2) * maxSize / roughBlockSize);
257    long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP
258      + (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY)
259      + (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
260    long negateBlockSize = totalOverhead / numEntries;
261    negateBlockSize += LruCachedBlock.PER_BLOCK_OVERHEAD;
262    return ClassSize.align((long) Math
263      .floor((roughBlockSize - negateBlockSize) * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
264  }
265
266  private static class CachedItem implements Cacheable {
267    BlockCacheKey cacheKey;
268    int size;
269
270    CachedItem(String blockName, int size) {
271      this.cacheKey = new BlockCacheKey(blockName, 0);
272      this.size = size;
273    }
274
275    /** The size of this item reported to the block cache layer */
276    @Override
277    public long heapSize() {
278      return ClassSize.align(size);
279    }
280
281    @Override
282    public int getSerializedLength() {
283      return 0;
284    }
285
286    @Override
287    public CacheableDeserializer<Cacheable> getDeserializer() {
288      return null;
289    }
290
291    @Override
292    public BlockType getBlockType() {
293      return BlockType.DATA;
294    }
295
296    @Override
297    public void serialize(ByteBuffer destination, boolean includeNextBlockMetadata) {
298    }
299  }
300
301}