Package org.apache.hadoop.hbase.io.hfile
Class TestPrefetchWithBucketCache
java.lang.Object
org.apache.hadoop.hbase.io.hfile.TestPrefetchWithBucketCache
-
Field Summary
Modifier and TypeFieldDescriptionprivate org.apache.hadoop.hbase.io.hfile.BlockCache
private org.apache.hadoop.hbase.io.hfile.CacheConfig
static final HBaseClassTestRule
private org.apache.hadoop.conf.Configuration
private static final int
private org.apache.hadoop.fs.FileSystem
private static final org.slf4j.Logger
org.junit.rules.TestName
private static final int
private static final HBaseTestingUtility
-
Constructor Summary
-
Method Summary
Modifier and TypeMethodDescriptionprivate org.apache.hadoop.hbase.io.hfile.HFile.Reader
createReaderAndWaitForPrefetchInterruption
(org.apache.hadoop.fs.Path storeFilePath) static org.apache.hadoop.hbase.KeyValue.Type
generateKeyType
(Random rand) private void
readStoreFile
(org.apache.hadoop.fs.Path storeFilePath) private void
readStoreFile
(org.apache.hadoop.fs.Path storeFilePath, BiFunction<org.apache.hadoop.hbase.io.hfile.HFile.Reader, Long, org.apache.hadoop.hbase.io.hfile.HFileBlock> readFunction, BiConsumer<org.apache.hadoop.hbase.io.hfile.BlockCacheKey, org.apache.hadoop.hbase.io.hfile.HFileBlock> validationFunction) void
setUp()
void
tearDown()
void
void
void
private org.apache.hadoop.fs.Path
writeStoreFile
(String fname, int numKVs) private org.apache.hadoop.fs.Path
writeStoreFile
(String fname, org.apache.hadoop.hbase.io.hfile.HFileContext context, int numKVs)
-
Field Details
-
LOG
-
CLASS_RULE
-
name
-
TEST_UTIL
-
NUM_VALID_KEY_TYPES
-
DATA_BLOCK_SIZE
- See Also:
-
conf
-
cacheConf
-
fs
-
blockCache
-
-
Constructor Details
-
TestPrefetchWithBucketCache
public TestPrefetchWithBucketCache()
-
-
Method Details
-
setUp
- Throws:
IOException
-
tearDown
-
testPrefetchDoesntOverwork
- Throws:
Exception
-
testPrefetchInterruptOnCapacity
- Throws:
Exception
-
testPrefetchDoesntInterruptInMemoryOnCapacity
- Throws:
Exception
-
readStoreFile
- Throws:
Exception
-
readStoreFile
private void readStoreFile(org.apache.hadoop.fs.Path storeFilePath, BiFunction<org.apache.hadoop.hbase.io.hfile.HFile.Reader, Long, throws Exceptionorg.apache.hadoop.hbase.io.hfile.HFileBlock> readFunction, BiConsumer<org.apache.hadoop.hbase.io.hfile.BlockCacheKey, org.apache.hadoop.hbase.io.hfile.HFileBlock> validationFunction) - Throws:
Exception
-
createReaderAndWaitForPrefetchInterruption
private org.apache.hadoop.hbase.io.hfile.HFile.Reader createReaderAndWaitForPrefetchInterruption(org.apache.hadoop.fs.Path storeFilePath) throws Exception - Throws:
Exception
-
writeStoreFile
- Throws:
IOException
-
writeStoreFile
private org.apache.hadoop.fs.Path writeStoreFile(String fname, org.apache.hadoop.hbase.io.hfile.HFileContext context, int numKVs) throws IOException - Throws:
IOException
-
generateKeyType
-