001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import java.util.ArrayList; 021import java.util.List; 022import org.apache.hadoop.conf.Configuration; 023import org.apache.hadoop.fs.FileSystem; 024import org.apache.hadoop.fs.Path; 025import org.apache.hadoop.hbase.CellBuilderType; 026import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; 027import org.apache.hadoop.hbase.HBaseClassTestRule; 028import org.apache.hadoop.hbase.HBaseTestingUtil; 029import org.apache.hadoop.hbase.HConstants; 030import org.apache.hadoop.hbase.KeyValue; 031import org.apache.hadoop.hbase.testclassification.IOTests; 032import org.apache.hadoop.hbase.testclassification.SmallTests; 033import org.apache.hadoop.hbase.util.Bytes; 034import org.junit.ClassRule; 035import org.junit.Test; 036import org.junit.experimental.categories.Category; 037 038/** 039 * Test a case when an inline index chunk is converted to a root one. This reproduces the bug in 040 * HBASE-6871. We write a carefully selected number of relatively large keys so that we accumulate a 041 * leaf index chunk that only goes over the configured index chunk size after adding the last 042 * key/value. The bug is in that when we close the file, we convert that inline (leaf-level) chunk 043 * into a root chunk, but then look at the size of that root chunk, find that it is greater than the 044 * configured chunk size, and split it into a number of intermediate index blocks that should really 045 * be leaf-level blocks. If more keys were added, we would flush the leaf-level block, add another 046 * entry to the root-level block, and that would prevent us from upgrading the leaf-level chunk to 047 * the root chunk, thus not triggering the bug. 048 */ 049@Category({ IOTests.class, SmallTests.class }) 050public class TestHFileInlineToRootChunkConversion { 051 052 @ClassRule 053 public static final HBaseClassTestRule CLASS_RULE = 054 HBaseClassTestRule.forClass(TestHFileInlineToRootChunkConversion.class); 055 056 private final HBaseTestingUtil testUtil = new HBaseTestingUtil(); 057 private final Configuration conf = testUtil.getConfiguration(); 058 059 @Test 060 public void testWriteHFile() throws Exception { 061 Path hfPath = new Path(testUtil.getDataTestDir(), 062 TestHFileInlineToRootChunkConversion.class.getSimpleName() + ".hfile"); 063 int maxChunkSize = 1024; 064 FileSystem fs = FileSystem.get(conf); 065 CacheConfig cacheConf = new CacheConfig(conf); 066 conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); 067 HFileContext context = new HFileContextBuilder().withBlockSize(16).build(); 068 HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf).withFileContext(context) 069 .withPath(fs, hfPath).create(); 070 List<byte[]> keys = new ArrayList<>(); 071 StringBuilder sb = new StringBuilder(); 072 073 for (int i = 0; i < 4; ++i) { 074 sb.append("key" + String.format("%05d", i)); 075 sb.append("_"); 076 for (int j = 0; j < 100; ++j) { 077 sb.append('0' + j); 078 } 079 String keyStr = sb.toString(); 080 sb.setLength(0); 081 082 byte[] k = Bytes.toBytes(keyStr); 083 keys.add(k); 084 byte[] v = Bytes.toBytes("value" + i); 085 hfw.append(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(k) 086 .setFamily(HConstants.EMPTY_BYTE_ARRAY).setQualifier(HConstants.EMPTY_BYTE_ARRAY) 087 .setTimestamp(HConstants.LATEST_TIMESTAMP).setType(KeyValue.Type.Maximum.getCode()) 088 .setValue(v).build()); 089 } 090 hfw.close(); 091 092 HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf); 093 // Scanner doesn't do Cells yet. Fix. 094 HFileScanner scanner = reader.getScanner(conf, true, true); 095 for (int i = 0; i < keys.size(); ++i) { 096 scanner.seekTo(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) 097 .setRow(keys.get(i)).setFamily(HConstants.EMPTY_BYTE_ARRAY) 098 .setQualifier(HConstants.EMPTY_BYTE_ARRAY).setTimestamp(HConstants.LATEST_TIMESTAMP) 099 .setType(KeyValue.Type.Maximum.getCode()).setValue(HConstants.EMPTY_BYTE_ARRAY).build()); 100 } 101 reader.close(); 102 } 103}