001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertNull; 022import static org.junit.Assert.assertTrue; 023import static org.junit.Assert.fail; 024 025import java.io.ByteArrayInputStream; 026import java.io.ByteArrayOutputStream; 027import java.io.DataInputStream; 028import java.io.DataOutputStream; 029import java.io.IOException; 030import java.util.ArrayList; 031import java.util.Collection; 032import java.util.List; 033import org.apache.hadoop.fs.FSDataInputStream; 034import org.apache.hadoop.fs.FSDataOutputStream; 035import org.apache.hadoop.fs.FileSystem; 036import org.apache.hadoop.fs.Path; 037import org.apache.hadoop.hbase.CellComparator; 038import org.apache.hadoop.hbase.CellComparatorImpl; 039import org.apache.hadoop.hbase.HBaseClassTestRule; 040import org.apache.hadoop.hbase.HBaseTestingUtility; 041import org.apache.hadoop.hbase.InnerStoreCellComparator; 042import org.apache.hadoop.hbase.KeyValue; 043import org.apache.hadoop.hbase.MetaCellComparator; 044import org.apache.hadoop.hbase.testclassification.IOTests; 045import org.apache.hadoop.hbase.testclassification.SmallTests; 046import org.apache.hadoop.hbase.util.Bytes; 047import org.junit.Before; 048import org.junit.ClassRule; 049import org.junit.Rule; 050import org.junit.Test; 051import org.junit.experimental.categories.Category; 052import org.junit.rules.ExpectedException; 053import org.junit.runner.RunWith; 054import org.junit.runners.Parameterized; 055import org.junit.runners.Parameterized.Parameters; 056import org.slf4j.Logger; 057import org.slf4j.LoggerFactory; 058 059import org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos; 060 061@RunWith(Parameterized.class) 062@Category({ IOTests.class, SmallTests.class }) 063public class TestFixedFileTrailer { 064 065 @ClassRule 066 public static final HBaseClassTestRule CLASS_RULE = 067 HBaseClassTestRule.forClass(TestFixedFileTrailer.class); 068 069 private static final Logger LOG = LoggerFactory.getLogger(TestFixedFileTrailer.class); 070 private static final int MAX_COMPARATOR_NAME_LENGTH = 128; 071 072 /** 073 * The number of used fields by version. Indexed by version minus two. Min version that we support 074 * is V2 075 */ 076 private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14, 15 }; 077 078 private HBaseTestingUtility util = new HBaseTestingUtility(); 079 private FileSystem fs; 080 private ByteArrayOutputStream baos = new ByteArrayOutputStream(); 081 private int version; 082 083 static { 084 assert NUM_FIELDS_BY_VERSION.length == HFile.MAX_FORMAT_VERSION - HFile.MIN_FORMAT_VERSION + 1; 085 } 086 087 public TestFixedFileTrailer(int version) { 088 this.version = version; 089 } 090 091 @Rule 092 public ExpectedException expectedEx = ExpectedException.none(); 093 094 @Parameters 095 public static Collection<Object[]> getParameters() { 096 List<Object[]> versionsToTest = new ArrayList<>(); 097 for (int v = HFile.MIN_FORMAT_VERSION; v <= HFile.MAX_FORMAT_VERSION; ++v) 098 versionsToTest.add(new Integer[] { v }); 099 return versionsToTest; 100 } 101 102 @Before 103 public void setUp() throws IOException { 104 fs = FileSystem.get(util.getConfiguration()); 105 } 106 107 @Test 108 public void testComparatorIsHBase1Compatible() { 109 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 110 t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); 111 assertEquals(CellComparatorImpl.COMPARATOR.getClass().getName(), t.getComparatorClassName()); 112 HFileProtos.FileTrailerProto pb = t.toProtobuf(); 113 assertEquals(KeyValue.COMPARATOR.getClass().getName(), pb.getComparatorClassName()); 114 t.setComparatorClass(MetaCellComparator.META_COMPARATOR.getClass()); 115 pb = t.toProtobuf(); 116 assertEquals(KeyValue.META_COMPARATOR.getClass().getName(), pb.getComparatorClassName()); 117 } 118 119 @Test 120 public void testCreateComparator() throws IOException { 121 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 122 try { 123 assertEquals(InnerStoreCellComparator.class, 124 t.createComparator(KeyValue.COMPARATOR.getLegacyKeyComparatorName()).getClass()); 125 assertEquals(InnerStoreCellComparator.class, 126 t.createComparator(KeyValue.COMPARATOR.getClass().getName()).getClass()); 127 assertEquals(InnerStoreCellComparator.class, 128 t.createComparator(CellComparator.class.getName()).getClass()); 129 assertEquals(MetaCellComparator.class, 130 t.createComparator(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName()).getClass()); 131 assertEquals(MetaCellComparator.class, 132 t.createComparator(KeyValue.META_COMPARATOR.getClass().getName()).getClass()); 133 assertEquals(MetaCellComparator.class, 134 t.createComparator("org.apache.hadoop.hbase.CellComparator$MetaCellComparator").getClass()); 135 assertEquals(MetaCellComparator.class, 136 t.createComparator("org.apache.hadoop.hbase.CellComparatorImpl$MetaCellComparator") 137 .getClass()); 138 assertEquals(MetaCellComparator.class, 139 t.createComparator(MetaCellComparator.META_COMPARATOR.getClass().getName()).getClass()); 140 assertEquals(MetaCellComparator.META_COMPARATOR.getClass(), 141 t.createComparator(MetaCellComparator.META_COMPARATOR.getClass().getName()).getClass()); 142 assertEquals(CellComparatorImpl.COMPARATOR.getClass(), 143 t.createComparator(MetaCellComparator.COMPARATOR.getClass().getName()).getClass()); 144 assertNull(t.createComparator(Bytes.BYTES_RAWCOMPARATOR.getClass().getName())); 145 assertNull(t.createComparator("org.apache.hadoop.hbase.KeyValue$RawBytesComparator")); 146 } catch (IOException e) { 147 fail("Unexpected exception while testing FixedFileTrailer#createComparator(), " 148 + e.getMessage()); 149 } 150 151 // Test an invalid comparatorClassName 152 expectedEx.expect(IOException.class); 153 t.createComparator(""); 154 155 } 156 157 @Test 158 public void testTrailer() throws IOException { 159 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 160 t.setDataIndexCount(3); 161 t.setEntryCount(((long) Integer.MAX_VALUE) + 1); 162 163 t.setLastDataBlockOffset(291); 164 t.setNumDataIndexLevels(3); 165 t.setComparatorClass(InnerStoreCellComparator.INNER_STORE_COMPARATOR.getClass()); 166 t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. 167 t.setUncompressedDataIndexSize(827398717L); // Something random. 168 169 t.setLoadOnOpenOffset(128); 170 t.setMetaIndexCount(7); 171 172 t.setTotalUncompressedBytes(129731987); 173 174 { 175 DataOutputStream dos = new DataOutputStream(baos); // Limited scope. 176 t.serialize(dos); 177 dos.flush(); 178 assertEquals(dos.size(), FixedFileTrailer.getTrailerSize(version)); 179 } 180 181 byte[] bytes = baos.toByteArray(); 182 baos.reset(); 183 184 assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version)); 185 186 ByteArrayInputStream bais = new ByteArrayInputStream(bytes); 187 188 // Finished writing, trying to read. 189 { 190 DataInputStream dis = new DataInputStream(bais); 191 FixedFileTrailer t2 = 192 new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 193 t2.deserialize(dis); 194 assertEquals(-1, bais.read()); // Ensure we have read everything. 195 checkLoadedTrailer(version, t, t2); 196 } 197 198 // Now check what happens if the trailer is corrupted. 199 Path trailerPath = new Path(util.getDataTestDir(), "trailer_" + version); 200 201 { 202 for (byte invalidVersion : new byte[] { HFile.MIN_FORMAT_VERSION - 1, 203 HFile.MAX_FORMAT_VERSION + 1 }) { 204 bytes[bytes.length - 1] = invalidVersion; 205 writeTrailer(trailerPath, null, bytes); 206 try { 207 readTrailer(trailerPath); 208 fail("Exception expected"); 209 } catch (IllegalArgumentException ex) { 210 // Make it easy to debug this. 211 String msg = ex.getMessage(); 212 String cleanMsg = msg.replaceAll("^(java(\\.[a-zA-Z]+)+:\\s+)?|\\s+\\(.*\\)\\s*$", ""); 213 // will be followed by " expected: ..." 214 assertEquals("Actual exception message is \"" + msg + "\".\nCleaned-up message", 215 "Invalid HFile version: " + invalidVersion, cleanMsg); 216 LOG.info("Got an expected exception: " + msg); 217 } 218 } 219 220 } 221 222 // Now write the trailer into a file and auto-detect the version. 223 writeTrailer(trailerPath, t, null); 224 225 FixedFileTrailer t4 = readTrailer(trailerPath); 226 227 checkLoadedTrailer(version, t, t4); 228 229 String trailerStr = t.toString(); 230 assertEquals( 231 "Invalid number of fields in the string representation " + "of the trailer: " + trailerStr, 232 NUM_FIELDS_BY_VERSION[version - 2], trailerStr.split(", ").length); 233 assertEquals(trailerStr, t4.toString()); 234 } 235 236 @Test 237 public void testTrailerForV2NonPBCompatibility() throws Exception { 238 if (version == 2) { 239 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.MINOR_VERSION_NO_CHECKSUM); 240 t.setDataIndexCount(3); 241 t.setEntryCount(((long) Integer.MAX_VALUE) + 1); 242 t.setLastDataBlockOffset(291); 243 t.setNumDataIndexLevels(3); 244 t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); 245 t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. 246 t.setUncompressedDataIndexSize(827398717L); // Something random. 247 t.setLoadOnOpenOffset(128); 248 t.setMetaIndexCount(7); 249 t.setTotalUncompressedBytes(129731987); 250 251 { 252 DataOutputStream dos = new DataOutputStream(baos); // Limited scope. 253 serializeAsWritable(dos, t); 254 dos.flush(); 255 assertEquals(FixedFileTrailer.getTrailerSize(version), dos.size()); 256 } 257 258 byte[] bytes = baos.toByteArray(); 259 baos.reset(); 260 assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version)); 261 262 ByteArrayInputStream bais = new ByteArrayInputStream(bytes); 263 { 264 DataInputStream dis = new DataInputStream(bais); 265 FixedFileTrailer t2 = 266 new FixedFileTrailer(version, HFileReaderImpl.MINOR_VERSION_NO_CHECKSUM); 267 t2.deserialize(dis); 268 assertEquals(-1, bais.read()); // Ensure we have read everything. 269 checkLoadedTrailer(version, t, t2); 270 } 271 } 272 } 273 274 // Copied from FixedFileTrailer for testing the reading part of 275 // FixedFileTrailer of non PB 276 // serialized FFTs. 277 private void serializeAsWritable(DataOutputStream output, FixedFileTrailer fft) 278 throws IOException { 279 BlockType.TRAILER.write(output); 280 output.writeLong(fft.getFileInfoOffset()); 281 output.writeLong(fft.getLoadOnOpenDataOffset()); 282 output.writeInt(fft.getDataIndexCount()); 283 output.writeLong(fft.getUncompressedDataIndexSize()); 284 output.writeInt(fft.getMetaIndexCount()); 285 output.writeLong(fft.getTotalUncompressedBytes()); 286 output.writeLong(fft.getEntryCount()); 287 output.writeInt(fft.getCompressionCodec().ordinal()); 288 output.writeInt(fft.getNumDataIndexLevels()); 289 output.writeLong(fft.getFirstDataBlockOffset()); 290 output.writeLong(fft.getLastDataBlockOffset()); 291 Bytes.writeStringFixedSize(output, fft.getComparatorClassName(), MAX_COMPARATOR_NAME_LENGTH); 292 output 293 .writeInt(FixedFileTrailer.materializeVersion(fft.getMajorVersion(), fft.getMinorVersion())); 294 } 295 296 private FixedFileTrailer readTrailer(Path trailerPath) throws IOException { 297 FSDataInputStream fsdis = fs.open(trailerPath); 298 FixedFileTrailer trailerRead = 299 FixedFileTrailer.readFromStream(fsdis, fs.getFileStatus(trailerPath).getLen()); 300 fsdis.close(); 301 return trailerRead; 302 } 303 304 private void writeTrailer(Path trailerPath, FixedFileTrailer t, byte[] useBytesInstead) 305 throws IOException { 306 assert (t == null) != (useBytesInstead == null); // Expect one non-null. 307 308 FSDataOutputStream fsdos = fs.create(trailerPath); 309 fsdos.write(135); // to make deserializer's job less trivial 310 if (useBytesInstead != null) { 311 fsdos.write(useBytesInstead); 312 } else { 313 t.serialize(fsdos); 314 } 315 fsdos.close(); 316 } 317 318 private void checkLoadedTrailer(int version, FixedFileTrailer expected, FixedFileTrailer loaded) 319 throws IOException { 320 assertEquals(version, loaded.getMajorVersion()); 321 assertEquals(expected.getDataIndexCount(), loaded.getDataIndexCount()); 322 323 assertEquals( 324 Math.min(expected.getEntryCount(), version == 1 ? Integer.MAX_VALUE : Long.MAX_VALUE), 325 loaded.getEntryCount()); 326 327 if (version == 1) { 328 assertEquals(expected.getFileInfoOffset(), loaded.getFileInfoOffset()); 329 } 330 331 if (version == 2) { 332 assertEquals(expected.getLastDataBlockOffset(), loaded.getLastDataBlockOffset()); 333 assertEquals(expected.getNumDataIndexLevels(), loaded.getNumDataIndexLevels()); 334 assertEquals(expected.createComparator().getClass().getName(), 335 loaded.createComparator().getClass().getName()); 336 assertEquals(expected.getFirstDataBlockOffset(), loaded.getFirstDataBlockOffset()); 337 assertTrue(expected.createComparator() instanceof CellComparatorImpl); 338 assertEquals(expected.getUncompressedDataIndexSize(), loaded.getUncompressedDataIndexSize()); 339 } 340 341 assertEquals(expected.getLoadOnOpenDataOffset(), loaded.getLoadOnOpenDataOffset()); 342 assertEquals(expected.getMetaIndexCount(), loaded.getMetaIndexCount()); 343 344 assertEquals(expected.getTotalUncompressedBytes(), loaded.getTotalUncompressedBytes()); 345 } 346 347}