001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static org.junit.Assert.assertNotNull; 021import static org.junit.Assert.assertTrue; 022 023import java.io.IOException; 024import java.security.Key; 025import java.security.SecureRandom; 026import java.util.ArrayList; 027import java.util.Collection; 028import java.util.List; 029import javax.crypto.spec.SecretKeySpec; 030import org.apache.hadoop.conf.Configuration; 031import org.apache.hadoop.fs.Path; 032import org.apache.hadoop.hbase.HBaseClassTestRule; 033import org.apache.hadoop.hbase.HBaseTestingUtility; 034import org.apache.hadoop.hbase.HColumnDescriptor; 035import org.apache.hadoop.hbase.HConstants; 036import org.apache.hadoop.hbase.HTableDescriptor; 037import org.apache.hadoop.hbase.TableName; 038import org.apache.hadoop.hbase.Waiter; 039import org.apache.hadoop.hbase.client.CompactionState; 040import org.apache.hadoop.hbase.client.Put; 041import org.apache.hadoop.hbase.client.Table; 042import org.apache.hadoop.hbase.io.crypto.Encryption; 043import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; 044import org.apache.hadoop.hbase.io.crypto.aes.AES; 045import org.apache.hadoop.hbase.io.hfile.CacheConfig; 046import org.apache.hadoop.hbase.io.hfile.HFile; 047import org.apache.hadoop.hbase.security.EncryptionUtil; 048import org.apache.hadoop.hbase.security.User; 049import org.apache.hadoop.hbase.testclassification.MediumTests; 050import org.apache.hadoop.hbase.testclassification.RegionServerTests; 051import org.apache.hadoop.hbase.util.Bytes; 052import org.junit.AfterClass; 053import org.junit.BeforeClass; 054import org.junit.ClassRule; 055import org.junit.Rule; 056import org.junit.Test; 057import org.junit.experimental.categories.Category; 058import org.junit.rules.TestName; 059import org.slf4j.Logger; 060import org.slf4j.LoggerFactory; 061 062@Category({RegionServerTests.class, MediumTests.class}) 063public class TestEncryptionKeyRotation { 064 065 @ClassRule 066 public static final HBaseClassTestRule CLASS_RULE = 067 HBaseClassTestRule.forClass(TestEncryptionKeyRotation.class); 068 069 private static final Logger LOG = LoggerFactory.getLogger(TestEncryptionKeyRotation.class); 070 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); 071 private static final Configuration conf = TEST_UTIL.getConfiguration(); 072 private static final Key initialCFKey; 073 private static final Key secondCFKey; 074 075 @Rule 076 public TestName name = new TestName(); 077 078 static { 079 // Create the test encryption keys 080 SecureRandom rng = new SecureRandom(); 081 byte[] keyBytes = new byte[AES.KEY_LENGTH]; 082 rng.nextBytes(keyBytes); 083 String algorithm = 084 conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); 085 initialCFKey = new SecretKeySpec(keyBytes, algorithm); 086 rng.nextBytes(keyBytes); 087 secondCFKey = new SecretKeySpec(keyBytes, algorithm); 088 } 089 090 @BeforeClass 091 public static void setUp() throws Exception { 092 conf.setInt("hfile.format.version", 3); 093 conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName()); 094 conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"); 095 096 // Start the minicluster 097 TEST_UTIL.startMiniCluster(1); 098 } 099 100 @AfterClass 101 public static void tearDown() throws Exception { 102 TEST_UTIL.shutdownMiniCluster(); 103 } 104 105 @Test 106 public void testCFKeyRotation() throws Exception { 107 // Create the table schema 108 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("default", name.getMethodName())); 109 HColumnDescriptor hcd = new HColumnDescriptor("cf"); 110 String algorithm = 111 conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); 112 hcd.setEncryptionType(algorithm); 113 hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf, "hbase", initialCFKey)); 114 htd.addFamily(hcd); 115 116 // Create the table and some on disk files 117 createTableAndFlush(htd); 118 119 // Verify we have store file(s) with the initial key 120 final List<Path> initialPaths = findStorefilePaths(htd.getTableName()); 121 assertTrue(initialPaths.size() > 0); 122 for (Path path: initialPaths) { 123 assertTrue("Store file " + path + " has incorrect key", 124 Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path))); 125 } 126 127 // Update the schema with a new encryption key 128 hcd = htd.getFamily(Bytes.toBytes("cf")); 129 hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf, 130 conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), 131 secondCFKey)); 132 TEST_UTIL.getAdmin().modifyColumnFamily(htd.getTableName(), hcd); 133 Thread.sleep(5000); // Need a predicate for online schema change 134 135 // And major compact 136 TEST_UTIL.getAdmin().majorCompact(htd.getTableName()); 137 // waiting for the major compaction to complete 138 TEST_UTIL.waitFor(30000, new Waiter.Predicate<IOException>() { 139 @Override 140 public boolean evaluate() throws IOException { 141 return TEST_UTIL.getAdmin().getCompactionState(htd.getTableName()) == 142 CompactionState.NONE; 143 } 144 }); 145 List<Path> pathsAfterCompaction = findStorefilePaths(htd.getTableName()); 146 assertTrue(pathsAfterCompaction.size() > 0); 147 for (Path path: pathsAfterCompaction) { 148 assertTrue("Store file " + path + " has incorrect key", 149 Bytes.equals(secondCFKey.getEncoded(), extractHFileKey(path))); 150 } 151 List<Path> compactedPaths = findCompactedStorefilePaths(htd.getTableName()); 152 assertTrue(compactedPaths.size() > 0); 153 for (Path path: compactedPaths) { 154 assertTrue("Store file " + path + " retains initial key", 155 Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path))); 156 } 157 } 158 159 @Test 160 public void testMasterKeyRotation() throws Exception { 161 // Create the table schema 162 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("default", name.getMethodName())); 163 HColumnDescriptor hcd = new HColumnDescriptor("cf"); 164 String algorithm = 165 conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); 166 hcd.setEncryptionType(algorithm); 167 hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf, "hbase", initialCFKey)); 168 htd.addFamily(hcd); 169 170 // Create the table and some on disk files 171 createTableAndFlush(htd); 172 173 // Verify we have store file(s) with the initial key 174 List<Path> storeFilePaths = findStorefilePaths(htd.getTableName()); 175 assertTrue(storeFilePaths.size() > 0); 176 for (Path path: storeFilePaths) { 177 assertTrue("Store file " + path + " has incorrect key", 178 Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path))); 179 } 180 181 // Now shut down the HBase cluster 182 TEST_UTIL.shutdownMiniHBaseCluster(); 183 184 // "Rotate" the master key 185 conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "other"); 186 conf.set(HConstants.CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY, "hbase"); 187 188 // Start the cluster back up 189 TEST_UTIL.startMiniHBaseCluster(); 190 // Verify the table can still be loaded 191 TEST_UTIL.waitTableAvailable(htd.getTableName(), 5000); 192 // Double check that the store file keys can be unwrapped 193 storeFilePaths = findStorefilePaths(htd.getTableName()); 194 assertTrue(storeFilePaths.size() > 0); 195 for (Path path: storeFilePaths) { 196 assertTrue("Store file " + path + " has incorrect key", 197 Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path))); 198 } 199 } 200 201 private static List<Path> findStorefilePaths(TableName tableName) throws Exception { 202 List<Path> paths = new ArrayList<>(); 203 for (Region region : TEST_UTIL.getRSForFirstRegionInTable(tableName) 204 .getRegions(tableName)) { 205 for (HStore store : ((HRegion) region).getStores()) { 206 for (HStoreFile storefile : store.getStorefiles()) { 207 paths.add(storefile.getPath()); 208 } 209 } 210 } 211 return paths; 212 } 213 214 private static List<Path> findCompactedStorefilePaths(TableName tableName) throws Exception { 215 List<Path> paths = new ArrayList<>(); 216 for (Region region : TEST_UTIL.getRSForFirstRegionInTable(tableName) 217 .getRegions(tableName)) { 218 for (HStore store : ((HRegion) region).getStores()) { 219 Collection<HStoreFile> compactedfiles = 220 store.getStoreEngine().getStoreFileManager().getCompactedfiles(); 221 if (compactedfiles != null) { 222 for (HStoreFile storefile : compactedfiles) { 223 paths.add(storefile.getPath()); 224 } 225 } 226 } 227 } 228 return paths; 229 } 230 231 private void createTableAndFlush(HTableDescriptor htd) throws Exception { 232 HColumnDescriptor hcd = htd.getFamilies().iterator().next(); 233 // Create the test table 234 TEST_UTIL.getAdmin().createTable(htd); 235 TEST_UTIL.waitTableAvailable(htd.getTableName(), 5000); 236 // Create a store file 237 Table table = TEST_UTIL.getConnection().getTable(htd.getTableName()); 238 try { 239 table.put(new Put(Bytes.toBytes("testrow")) 240 .addColumn(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value"))); 241 } finally { 242 table.close(); 243 } 244 TEST_UTIL.getAdmin().flush(htd.getTableName()); 245 } 246 247 private static byte[] extractHFileKey(Path path) throws Exception { 248 HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path, 249 new CacheConfig(conf), true, conf); 250 try { 251 Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext(); 252 assertNotNull("Reader has a null crypto context", cryptoContext); 253 Key key = cryptoContext.getKey(); 254 assertNotNull("Crypto context has no key", key); 255 return key.getEncoded(); 256 } finally { 257 reader.close(); 258 } 259 } 260 261}