001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.util; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertNotNull; 022import static org.junit.Assert.assertTrue; 023 024import java.security.Key; 025import java.util.ArrayList; 026import java.util.List; 027import javax.crypto.spec.SecretKeySpec; 028import org.apache.hadoop.conf.Configuration; 029import org.apache.hadoop.fs.Path; 030import org.apache.hadoop.hbase.HBaseClassTestRule; 031import org.apache.hadoop.hbase.HBaseTestingUtil; 032import org.apache.hadoop.hbase.HConstants; 033import org.apache.hadoop.hbase.TableName; 034import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; 035import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 036import org.apache.hadoop.hbase.client.Put; 037import org.apache.hadoop.hbase.client.Table; 038import org.apache.hadoop.hbase.client.TableDescriptor; 039import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 040import org.apache.hadoop.hbase.io.crypto.Encryption; 041import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider; 042import org.apache.hadoop.hbase.io.crypto.aes.AES; 043import org.apache.hadoop.hbase.io.hfile.CacheConfig; 044import org.apache.hadoop.hbase.io.hfile.HFile; 045import org.apache.hadoop.hbase.regionserver.HRegion; 046import org.apache.hadoop.hbase.regionserver.HStore; 047import org.apache.hadoop.hbase.regionserver.HStoreFile; 048import org.apache.hadoop.hbase.regionserver.Region; 049import org.apache.hadoop.hbase.security.EncryptionUtil; 050import org.apache.hadoop.hbase.security.User; 051import org.apache.hadoop.hbase.testclassification.MediumTests; 052import org.apache.hadoop.hbase.testclassification.MiscTests; 053import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; 054import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; 055import org.junit.After; 056import org.junit.Before; 057import org.junit.ClassRule; 058import org.junit.Ignore; 059import org.junit.Test; 060import org.junit.experimental.categories.Category; 061 062//revisit later 063@Ignore 064@Category({ MiscTests.class, MediumTests.class }) 065public class TestHBaseFsckEncryption { 066 067 @ClassRule 068 public static final HBaseClassTestRule CLASS_RULE = 069 HBaseClassTestRule.forClass(TestHBaseFsckEncryption.class); 070 071 private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 072 073 private Configuration conf; 074 private TableDescriptor tableDescriptor; 075 private Key cfKey; 076 077 @Before 078 public void setUp() throws Exception { 079 conf = TEST_UTIL.getConfiguration(); 080 conf.setInt("hfile.format.version", 3); 081 conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName()); 082 conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"); 083 084 // Create the test encryption key 085 byte[] keyBytes = new byte[AES.KEY_LENGTH]; 086 Bytes.secureRandom(keyBytes); 087 String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); 088 cfKey = new SecretKeySpec(keyBytes, algorithm); 089 090 // Start the minicluster 091 TEST_UTIL.startMiniCluster(3); 092 093 // Create the table 094 TableDescriptorBuilder tableDescriptorBuilder = 095 TableDescriptorBuilder.newBuilder(TableName.valueOf("default", "TestHBaseFsckEncryption")); 096 ColumnFamilyDescriptor columnFamilyDescriptor = 097 ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf")).setEncryptionType(algorithm) 098 .setEncryptionKey(EncryptionUtil.wrapKey(conf, 099 conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), 100 cfKey)) 101 .build(); 102 tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); 103 tableDescriptor = tableDescriptorBuilder.build(); 104 TEST_UTIL.getAdmin().createTable(tableDescriptor); 105 TEST_UTIL.waitTableAvailable(tableDescriptor.getTableName(), 5000); 106 } 107 108 @After 109 public void tearDown() throws Exception { 110 TEST_UTIL.shutdownMiniCluster(); 111 } 112 113 @Test 114 public void testFsckWithEncryption() throws Exception { 115 // Populate the table with some data 116 Table table = TEST_UTIL.getConnection().getTable(tableDescriptor.getTableName()); 117 try { 118 byte[] values = { 'A', 'B', 'C', 'D' }; 119 for (int i = 0; i < values.length; i++) { 120 for (int j = 0; j < values.length; j++) { 121 Put put = new Put(new byte[] { values[i], values[j] }); 122 put.addColumn(Bytes.toBytes("cf"), new byte[] {}, new byte[] { values[i], values[j] }); 123 table.put(put); 124 } 125 } 126 } finally { 127 table.close(); 128 } 129 // Flush it 130 TEST_UTIL.getAdmin().flush(tableDescriptor.getTableName()); 131 132 // Verify we have encrypted store files on disk 133 final List<Path> paths = findStorefilePaths(tableDescriptor.getTableName()); 134 assertTrue(paths.size() > 0); 135 for (Path path : paths) { 136 assertTrue("Store file " + path + " has incorrect key", 137 Bytes.equals(cfKey.getEncoded(), extractHFileKey(path))); 138 } 139 140 // Insure HBck doesn't consider them corrupt 141 HBaseFsck res = HbckTestingUtil.doHFileQuarantine(conf, tableDescriptor.getTableName()); 142 assertEquals(0, res.getRetCode()); 143 HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker(); 144 assertEquals(0, hfcc.getCorrupted().size()); 145 assertEquals(0, hfcc.getFailures().size()); 146 assertEquals(0, hfcc.getQuarantined().size()); 147 assertEquals(0, hfcc.getMissing().size()); 148 } 149 150 private List<Path> findStorefilePaths(TableName tableName) throws Exception { 151 List<Path> paths = new ArrayList<>(); 152 for (Region region : TEST_UTIL.getRSForFirstRegionInTable(tableName) 153 .getRegions(tableDescriptor.getTableName())) { 154 for (HStore store : ((HRegion) region).getStores()) { 155 for (HStoreFile storefile : store.getStorefiles()) { 156 paths.add(storefile.getPath()); 157 } 158 } 159 } 160 return paths; 161 } 162 163 private byte[] extractHFileKey(Path path) throws Exception { 164 HFile.Reader reader = 165 HFile.createReader(TEST_UTIL.getTestFileSystem(), path, new CacheConfig(conf), true, conf); 166 try { 167 Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext(); 168 assertNotNull("Reader has a null crypto context", cryptoContext); 169 Key key = cryptoContext.getKey(); 170 assertNotNull("Crypto context has no key", key); 171 return key.getEncoded(); 172 } finally { 173 reader.close(); 174 } 175 } 176 177}