001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.encoding; 019 020import java.io.DataInputStream; 021import java.io.IOException; 022import java.io.InputStream; 023import org.apache.commons.io.IOUtils; 024import org.apache.hadoop.conf.Configuration; 025import org.apache.hadoop.hbase.io.ByteBuffInputStream; 026import org.apache.hadoop.hbase.io.TagCompressionContext; 027import org.apache.hadoop.hbase.io.compress.CanReinit; 028import org.apache.hadoop.hbase.io.compress.Compression; 029import org.apache.hadoop.hbase.io.crypto.Cipher; 030import org.apache.hadoop.hbase.io.crypto.Decryptor; 031import org.apache.hadoop.hbase.io.crypto.Encryption; 032import org.apache.hadoop.hbase.io.hfile.HFileContext; 033import org.apache.hadoop.hbase.io.util.BlockIOUtils; 034import org.apache.hadoop.hbase.nio.ByteBuff; 035import org.apache.hadoop.hbase.util.Bytes; 036import org.apache.hadoop.io.compress.Decompressor; 037import org.apache.yetus.audience.InterfaceAudience; 038 039/** 040 * A default implementation of {@link HFileBlockDecodingContext}. It assumes the block data section 041 * is compressed as a whole. 042 * @see HFileBlockDefaultEncodingContext for the default compression context 043 */ 044@InterfaceAudience.Private 045public class HFileBlockDefaultDecodingContext implements HFileBlockDecodingContext { 046 private final Configuration conf; 047 private final HFileContext fileContext; 048 private TagCompressionContext tagCompressionContext; 049 050 public HFileBlockDefaultDecodingContext(Configuration conf, HFileContext fileContext) { 051 this.conf = conf; 052 this.fileContext = fileContext; 053 } 054 055 @Override 056 public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader, 057 ByteBuff blockBufferWithoutHeader, ByteBuff onDiskBlock) throws IOException { 058 final ByteBuffInputStream byteBuffInputStream = new ByteBuffInputStream(onDiskBlock); 059 InputStream dataInputStream = new DataInputStream(byteBuffInputStream); 060 061 try { 062 Encryption.Context cryptoContext = fileContext.getEncryptionContext(); 063 if (cryptoContext != Encryption.Context.NONE) { 064 065 Cipher cipher = cryptoContext.getCipher(); 066 Decryptor decryptor = cipher.getDecryptor(); 067 decryptor.setKey(cryptoContext.getKey()); 068 069 // Encrypted block format: 070 // +--------------------------+ 071 // | byte iv length | 072 // +--------------------------+ 073 // | iv data ... | 074 // +--------------------------+ 075 // | encrypted block data ... | 076 // +--------------------------+ 077 078 int ivLength = dataInputStream.read(); 079 if (ivLength > 0) { 080 byte[] iv = new byte[ivLength]; 081 IOUtils.readFully(dataInputStream, iv); 082 decryptor.setIv(iv); 083 // All encrypted blocks will have a nonzero IV length. If we see an IV 084 // length of zero, this means the encoding context had 0 bytes of 085 // plaintext to encode. 086 decryptor.reset(); 087 dataInputStream = decryptor.createDecryptionStream(dataInputStream); 088 } 089 onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength; 090 } 091 092 Compression.Algorithm compression = fileContext.getCompression(); 093 if (compression != Compression.Algorithm.NONE) { 094 Decompressor decompressor = null; 095 try { 096 decompressor = compression.getDecompressor(); 097 // Some algorithms don't return decompressors and accept null as a valid parameter for 098 // same when creating decompression streams. We can ignore these cases wrt reinit. 099 if (decompressor instanceof CanReinit) { 100 ((CanReinit) decompressor).reinit(conf); 101 } 102 try (InputStream is = 103 compression.createDecompressionStream(dataInputStream, decompressor, 0)) { 104 BlockIOUtils.readFullyWithHeapBuffer(is, blockBufferWithoutHeader, 105 uncompressedSizeWithoutHeader); 106 } 107 } finally { 108 if (decompressor != null) { 109 compression.returnDecompressor(decompressor); 110 } 111 } 112 } else { 113 BlockIOUtils.readFullyWithHeapBuffer(dataInputStream, blockBufferWithoutHeader, 114 onDiskSizeWithoutHeader); 115 } 116 } finally { 117 byteBuffInputStream.close(); 118 dataInputStream.close(); 119 } 120 } 121 122 @Override 123 public HFileContext getHFileContext() { 124 return this.fileContext; 125 } 126 127 public TagCompressionContext getTagCompressionContext() { 128 return tagCompressionContext; 129 } 130 131 public void setTagCompressionContext(TagCompressionContext tagCompressionContext) { 132 this.tagCompressionContext = tagCompressionContext; 133 } 134}