001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.junit.Assert.assertEquals; 021 022import java.io.IOException; 023import java.util.ArrayList; 024import java.util.Collection; 025import java.util.HashMap; 026import java.util.List; 027import java.util.Map; 028import java.util.Set; 029import org.apache.hadoop.conf.Configuration; 030import org.apache.hadoop.hbase.Cell; 031import org.apache.hadoop.hbase.HBaseClassTestRule; 032import org.apache.hadoop.hbase.HBaseTestingUtil; 033import org.apache.hadoop.hbase.TableName; 034import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 035import org.apache.hadoop.hbase.client.Put; 036import org.apache.hadoop.hbase.client.RegionInfo; 037import org.apache.hadoop.hbase.client.RegionInfoBuilder; 038import org.apache.hadoop.hbase.client.Scan; 039import org.apache.hadoop.hbase.client.TableDescriptor; 040import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 041import org.apache.hadoop.hbase.regionserver.BloomType; 042import org.apache.hadoop.hbase.regionserver.HRegion; 043import org.apache.hadoop.hbase.regionserver.InternalScanner; 044import org.apache.hadoop.hbase.testclassification.IOTests; 045import org.apache.hadoop.hbase.testclassification.SmallTests; 046import org.apache.hadoop.hbase.util.Bytes; 047import org.junit.AfterClass; 048import org.junit.ClassRule; 049import org.junit.Test; 050import org.junit.experimental.categories.Category; 051import org.junit.runner.RunWith; 052import org.junit.runners.Parameterized; 053import org.junit.runners.Parameterized.Parameters; 054 055/** 056 * Test the optimization that does not scan files where all key ranges are excluded. 057 */ 058@RunWith(Parameterized.class) 059@Category({ IOTests.class, SmallTests.class }) 060public class TestScannerSelectionUsingKeyRange { 061 062 @ClassRule 063 public static final HBaseClassTestRule CLASS_RULE = 064 HBaseClassTestRule.forClass(TestScannerSelectionUsingKeyRange.class); 065 066 private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 067 private static TableName TABLE = TableName.valueOf("myTable"); 068 private static String FAMILY = "myCF"; 069 private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); 070 private static final int NUM_ROWS = 8; 071 private static final int NUM_COLS_PER_ROW = 5; 072 private static final int NUM_FILES = 2; 073 private static final Map<Object, Integer> TYPE_COUNT = new HashMap<>(3); 074 static { 075 TYPE_COUNT.put(BloomType.ROWCOL, 0); 076 TYPE_COUNT.put(BloomType.ROW, 0); 077 TYPE_COUNT.put(BloomType.NONE, 0); 078 } 079 080 private BloomType bloomType; 081 private int expectedCount; 082 083 @Parameters 084 public static Collection<Object[]> parameters() { 085 List<Object[]> params = new ArrayList<>(); 086 for (Object type : TYPE_COUNT.keySet()) { 087 params.add(new Object[] { type, TYPE_COUNT.get(type) }); 088 } 089 return params; 090 } 091 092 public TestScannerSelectionUsingKeyRange(Object type, Object count) { 093 bloomType = (BloomType) type; 094 expectedCount = (Integer) count; 095 } 096 097 @AfterClass 098 public static void tearDownAfterClass() throws Exception { 099 TEST_UTIL.cleanupTestDir(); 100 } 101 102 @Test 103 public void testScannerSelection() throws IOException { 104 Configuration conf = TEST_UTIL.getConfiguration(); 105 conf.setInt("hbase.hstore.compactionThreshold", 10000); 106 TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TABLE) 107 .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_BYTES) 108 .setBlockCacheEnabled(true).setBloomFilterType(bloomType).build()) 109 .build(); 110 111 RegionInfo info = RegionInfoBuilder.newBuilder(TABLE).build(); 112 HRegion region = 113 HBaseTestingUtil.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), conf, tableDescriptor); 114 115 for (int iFile = 0; iFile < NUM_FILES; ++iFile) { 116 for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { 117 Put put = new Put(Bytes.toBytes("row" + iRow)); 118 for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) { 119 put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol), 120 Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); 121 } 122 region.put(put); 123 } 124 region.flush(true); 125 } 126 127 Scan scan = new Scan().withStartRow(Bytes.toBytes("aaa")).withStopRow(Bytes.toBytes("aaz")); 128 BlockCache cache = BlockCacheFactory.createBlockCache(conf); 129 InternalScanner scanner = region.getScanner(scan); 130 List<Cell> results = new ArrayList<>(); 131 while (scanner.next(results)) { 132 } 133 scanner.close(); 134 assertEquals(0, results.size()); 135 if (cache instanceof LruBlockCache) { 136 Set<String> accessedFiles = ((LruBlockCache) cache).getCachedFileNamesForTest(); 137 assertEquals(expectedCount, accessedFiles.size()); 138 } 139 HBaseTestingUtil.closeRegionAndWAL(region); 140 } 141}