001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.util; 019 020import static org.junit.jupiter.api.Assertions.assertEquals; 021import static org.junit.jupiter.api.Assertions.assertNotNull; 022import static org.junit.jupiter.api.Assertions.assertTrue; 023 024import java.security.Key; 025import java.util.ArrayList; 026import java.util.List; 027import javax.crypto.spec.SecretKeySpec; 028import org.apache.hadoop.conf.Configuration; 029import org.apache.hadoop.fs.Path; 030import org.apache.hadoop.hbase.HBaseTestingUtil; 031import org.apache.hadoop.hbase.HConstants; 032import org.apache.hadoop.hbase.TableName; 033import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; 034import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 035import org.apache.hadoop.hbase.client.Put; 036import org.apache.hadoop.hbase.client.Table; 037import org.apache.hadoop.hbase.client.TableDescriptor; 038import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 039import org.apache.hadoop.hbase.io.crypto.Encryption; 040import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider; 041import org.apache.hadoop.hbase.io.crypto.aes.AES; 042import org.apache.hadoop.hbase.io.hfile.CacheConfig; 043import org.apache.hadoop.hbase.io.hfile.HFile; 044import org.apache.hadoop.hbase.regionserver.HRegion; 045import org.apache.hadoop.hbase.regionserver.HStore; 046import org.apache.hadoop.hbase.regionserver.HStoreFile; 047import org.apache.hadoop.hbase.regionserver.Region; 048import org.apache.hadoop.hbase.security.EncryptionUtil; 049import org.apache.hadoop.hbase.security.User; 050import org.apache.hadoop.hbase.testclassification.MediumTests; 051import org.apache.hadoop.hbase.testclassification.MiscTests; 052import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; 053import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; 054import org.junit.jupiter.api.AfterEach; 055import org.junit.jupiter.api.BeforeEach; 056import org.junit.jupiter.api.Disabled; 057import org.junit.jupiter.api.Tag; 058import org.junit.jupiter.api.Test; 059 060//revisit later 061@Disabled 062@Tag(MiscTests.TAG) 063@Tag(MediumTests.TAG) 064public class TestHBaseFsckEncryption { 065 066 private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 067 068 private Configuration conf; 069 private TableDescriptor tableDescriptor; 070 private Key cfKey; 071 072 @BeforeEach 073 public void setUp() throws Exception { 074 conf = TEST_UTIL.getConfiguration(); 075 conf.setInt("hfile.format.version", 3); 076 conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName()); 077 conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"); 078 079 // Create the test encryption key 080 byte[] keyBytes = new byte[AES.KEY_LENGTH]; 081 Bytes.secureRandom(keyBytes); 082 String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); 083 cfKey = new SecretKeySpec(keyBytes, algorithm); 084 085 // Start the minicluster 086 TEST_UTIL.startMiniCluster(3); 087 088 // Create the table 089 TableDescriptorBuilder tableDescriptorBuilder = 090 TableDescriptorBuilder.newBuilder(TableName.valueOf("default", "TestHBaseFsckEncryption")); 091 ColumnFamilyDescriptor columnFamilyDescriptor = 092 ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf")).setEncryptionType(algorithm) 093 .setEncryptionKey(EncryptionUtil.wrapKey(conf, 094 conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), 095 cfKey)) 096 .build(); 097 tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); 098 tableDescriptor = tableDescriptorBuilder.build(); 099 TEST_UTIL.getAdmin().createTable(tableDescriptor); 100 TEST_UTIL.waitTableAvailable(tableDescriptor.getTableName(), 5000); 101 } 102 103 @AfterEach 104 public void tearDown() throws Exception { 105 TEST_UTIL.shutdownMiniCluster(); 106 } 107 108 @Test 109 public void testFsckWithEncryption() throws Exception { 110 // Populate the table with some data 111 Table table = TEST_UTIL.getConnection().getTable(tableDescriptor.getTableName()); 112 try { 113 byte[] values = { 'A', 'B', 'C', 'D' }; 114 for (int i = 0; i < values.length; i++) { 115 for (int j = 0; j < values.length; j++) { 116 Put put = new Put(new byte[] { values[i], values[j] }); 117 put.addColumn(Bytes.toBytes("cf"), new byte[] {}, new byte[] { values[i], values[j] }); 118 table.put(put); 119 } 120 } 121 } finally { 122 table.close(); 123 } 124 // Flush it 125 TEST_UTIL.getAdmin().flush(tableDescriptor.getTableName()); 126 127 // Verify we have encrypted store files on disk 128 final List<Path> paths = findStorefilePaths(tableDescriptor.getTableName()); 129 assertTrue(paths.size() > 0); 130 for (Path path : paths) { 131 assertTrue(Bytes.equals(cfKey.getEncoded(), extractHFileKey(path)), 132 "Store file " + path + " has incorrect key"); 133 } 134 135 // Insure HBck doesn't consider them corrupt 136 HBaseFsck res = HbckTestingUtil.doHFileQuarantine(conf, tableDescriptor.getTableName()); 137 assertEquals(0, res.getRetCode()); 138 HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker(); 139 assertEquals(0, hfcc.getCorrupted().size()); 140 assertEquals(0, hfcc.getFailures().size()); 141 assertEquals(0, hfcc.getQuarantined().size()); 142 assertEquals(0, hfcc.getMissing().size()); 143 } 144 145 private List<Path> findStorefilePaths(TableName tableName) throws Exception { 146 List<Path> paths = new ArrayList<>(); 147 for (Region region : TEST_UTIL.getRSForFirstRegionInTable(tableName) 148 .getRegions(tableDescriptor.getTableName())) { 149 for (HStore store : ((HRegion) region).getStores()) { 150 for (HStoreFile storefile : store.getStorefiles()) { 151 paths.add(storefile.getPath()); 152 } 153 } 154 } 155 return paths; 156 } 157 158 private byte[] extractHFileKey(Path path) throws Exception { 159 HFile.Reader reader = 160 HFile.createReader(TEST_UTIL.getTestFileSystem(), path, new CacheConfig(conf), true, conf); 161 try { 162 Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext(); 163 assertNotNull(cryptoContext, "Reader has a null crypto context"); 164 Key key = cryptoContext.getKey(); 165 assertNotNull(key, "Crypto context has no key"); 166 return key.getEncoded(); 167 } finally { 168 reader.close(); 169 } 170 } 171 172}