001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver;
019
020import static org.apache.hadoop.hbase.HTestConst.addContent;
021import static org.junit.Assert.assertEquals;
022
023import java.util.ArrayList;
024import java.util.List;
025import org.apache.hadoop.conf.Configuration;
026import org.apache.hadoop.fs.Path;
027import org.apache.hadoop.hbase.ExtendedCell;
028import org.apache.hadoop.hbase.HBaseClassTestRule;
029import org.apache.hadoop.hbase.HBaseTestingUtil;
030import org.apache.hadoop.hbase.KeyValueUtil;
031import org.apache.hadoop.hbase.TableName;
032import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
033import org.apache.hadoop.hbase.client.RegionInfo;
034import org.apache.hadoop.hbase.client.RegionInfoBuilder;
035import org.apache.hadoop.hbase.client.Scan;
036import org.apache.hadoop.hbase.client.TableDescriptor;
037import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
038import org.apache.hadoop.hbase.io.compress.Compression;
039import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
040import org.apache.hadoop.hbase.io.hfile.BlockCache;
041import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory;
042import org.apache.hadoop.hbase.io.hfile.CacheStats;
043import org.apache.hadoop.hbase.testclassification.RegionServerTests;
044import org.apache.hadoop.hbase.testclassification.SmallTests;
045import org.apache.hadoop.hbase.util.Bytes;
046import org.junit.Before;
047import org.junit.ClassRule;
048import org.junit.Test;
049import org.junit.experimental.categories.Category;
050
051@SuppressWarnings("deprecation")
052@Category({ RegionServerTests.class, SmallTests.class })
053public class TestBlocksScanned {
054
055  @ClassRule
056  public static final HBaseClassTestRule CLASS_RULE =
057    HBaseClassTestRule.forClass(TestBlocksScanned.class);
058
059  private static byte[] FAMILY = Bytes.toBytes("family");
060  private static byte[] COL = Bytes.toBytes("col");
061  private static byte[] START_KEY = Bytes.toBytes("aaa");
062  private static byte[] END_KEY = Bytes.toBytes("zzz");
063  private static int BLOCK_SIZE = 70;
064
065  private static HBaseTestingUtil TEST_UTIL = null;
066  private Configuration conf;
067  private Path testDir;
068
069  @Before
070  public void setUp() throws Exception {
071    TEST_UTIL = new HBaseTestingUtil();
072    conf = TEST_UTIL.getConfiguration();
073    testDir = TEST_UTIL.getDataTestDir("TestBlocksScanned");
074  }
075
076  @Test
077  public void testBlocksScanned() throws Exception {
078    byte[] tableName = Bytes.toBytes("TestBlocksScanned");
079    TableDescriptor tableDescriptor =
080      TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
081        .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setMaxVersions(10)
082          .setBlockCacheEnabled(true).setBlocksize(BLOCK_SIZE)
083          .setCompressionType(Compression.Algorithm.NONE).build())
084        .build();
085    _testBlocksScanned(tableDescriptor);
086  }
087
088  @Test
089  public void testBlocksScannedWithEncoding() throws Exception {
090    byte[] tableName = Bytes.toBytes("TestBlocksScannedWithEncoding");
091    TableDescriptor tableDescriptor =
092      TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
093        .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setMaxVersions(10)
094          .setBlockCacheEnabled(true).setDataBlockEncoding(DataBlockEncoding.FAST_DIFF)
095          .setBlocksize(BLOCK_SIZE).setCompressionType(Compression.Algorithm.NONE).build())
096        .build();
097    _testBlocksScanned(tableDescriptor);
098  }
099
100  private void _testBlocksScanned(TableDescriptor td) throws Exception {
101    BlockCache blockCache = BlockCacheFactory.createBlockCache(conf);
102    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(td.getTableName()).setStartKey(START_KEY)
103      .setEndKey(END_KEY).build();
104    HRegion r = HBaseTestingUtil.createRegionAndWAL(regionInfo, testDir, conf, td, blockCache);
105    addContent(r, FAMILY, COL);
106    r.flush(true);
107
108    CacheStats stats = blockCache.getStats();
109    long before = stats.getHitCount() + stats.getMissCount();
110    // Do simple test of getting one row only first.
111    Scan scan = new Scan().withStartRow(Bytes.toBytes("aaa")).withStopRow(Bytes.toBytes("aaz"))
112      .setReadType(Scan.ReadType.PREAD);
113    scan.addColumn(FAMILY, COL);
114    scan.readVersions(1);
115
116    InternalScanner s = r.getScanner(scan);
117    List<ExtendedCell> results = new ArrayList<>();
118    while (s.next(results))
119      ;
120    s.close();
121
122    int expectResultSize = 'z' - 'a';
123    assertEquals(expectResultSize, results.size());
124
125    int kvPerBlock = (int) Math
126      .ceil(BLOCK_SIZE / (double) KeyValueUtil.ensureKeyValue(results.get(0)).getLength());
127    assertEquals(2, kvPerBlock);
128
129    long expectDataBlockRead = (long) Math.ceil(expectResultSize / (double) kvPerBlock);
130    long expectIndexBlockRead = expectDataBlockRead;
131
132    assertEquals(expectIndexBlockRead + expectDataBlockRead,
133      stats.getHitCount() + stats.getMissCount() - before);
134  }
135}