001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertFalse; 022import static org.junit.Assert.assertNotNull; 023import static org.junit.Assert.assertTrue; 024import static org.junit.Assert.fail; 025 026import java.io.DataInputStream; 027import java.io.DataOutputStream; 028import java.io.IOException; 029import java.security.SecureRandom; 030import java.util.List; 031import org.apache.hadoop.conf.Configuration; 032import org.apache.hadoop.fs.FSDataInputStream; 033import org.apache.hadoop.fs.FSDataOutputStream; 034import org.apache.hadoop.fs.FileSystem; 035import org.apache.hadoop.fs.Path; 036import org.apache.hadoop.hbase.Cell; 037import org.apache.hadoop.hbase.HBaseClassTestRule; 038import org.apache.hadoop.hbase.HBaseTestingUtility; 039import org.apache.hadoop.hbase.HConstants; 040import org.apache.hadoop.hbase.KeyValue; 041import org.apache.hadoop.hbase.KeyValueUtil; 042import org.apache.hadoop.hbase.io.compress.Compression; 043import org.apache.hadoop.hbase.io.crypto.Cipher; 044import org.apache.hadoop.hbase.io.crypto.Encryption; 045import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; 046import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; 047import org.apache.hadoop.hbase.testclassification.IOTests; 048import org.apache.hadoop.hbase.testclassification.SmallTests; 049import org.apache.hadoop.hbase.util.Bytes; 050import org.apache.hadoop.hbase.util.RedundantKVGenerator; 051import org.junit.BeforeClass; 052import org.junit.ClassRule; 053import org.junit.Test; 054import org.junit.experimental.categories.Category; 055import org.slf4j.Logger; 056import org.slf4j.LoggerFactory; 057 058@Category({IOTests.class, SmallTests.class}) 059public class TestHFileEncryption { 060 061 @ClassRule 062 public static final HBaseClassTestRule CLASS_RULE = 063 HBaseClassTestRule.forClass(TestHFileEncryption.class); 064 065 private static final Logger LOG = LoggerFactory.getLogger(TestHFileEncryption.class); 066 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); 067 private static final SecureRandom RNG = new SecureRandom(); 068 069 private static FileSystem fs; 070 private static Encryption.Context cryptoContext; 071 072 @BeforeClass 073 public static void setUp() throws Exception { 074 Configuration conf = TEST_UTIL.getConfiguration(); 075 // Disable block cache in this test. 076 conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f); 077 conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName()); 078 conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"); 079 conf.setInt("hfile.format.version", 3); 080 081 fs = FileSystem.get(conf); 082 083 cryptoContext = Encryption.newContext(conf); 084 String algorithm = 085 conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); 086 Cipher aes = Encryption.getCipher(conf, algorithm); 087 assertNotNull(aes); 088 cryptoContext.setCipher(aes); 089 byte[] key = new byte[aes.getKeyLength()]; 090 RNG.nextBytes(key); 091 cryptoContext.setKey(key); 092 } 093 094 private int writeBlock(FSDataOutputStream os, HFileContext fileContext, int size) 095 throws IOException { 096 HFileBlock.Writer hbw = new HFileBlock.Writer(null, fileContext); 097 DataOutputStream dos = hbw.startWriting(BlockType.DATA); 098 for (int j = 0; j < size; j++) { 099 dos.writeInt(j); 100 } 101 hbw.writeHeaderAndData(os); 102 LOG.info("Wrote a block at " + os.getPos() + " with" + 103 " onDiskSizeWithHeader=" + hbw.getOnDiskSizeWithHeader() + 104 " uncompressedSizeWithoutHeader=" + hbw.getOnDiskSizeWithoutHeader() + 105 " uncompressedSizeWithoutHeader=" + hbw.getUncompressedSizeWithoutHeader()); 106 return hbw.getOnDiskSizeWithHeader(); 107 } 108 109 private long readAndVerifyBlock(long pos, HFileContext ctx, HFileBlock.FSReaderImpl hbr, int size) 110 throws IOException { 111 HFileBlock b = hbr.readBlockData(pos, -1, false, false); 112 assertEquals(0, HFile.getAndResetChecksumFailuresCount()); 113 b.sanityCheck(); 114 assertFalse(b.isUnpacked()); 115 b = b.unpack(ctx, hbr); 116 LOG.info("Read a block at " + pos + " with" + 117 " onDiskSizeWithHeader=" + b.getOnDiskSizeWithHeader() + 118 " uncompressedSizeWithoutHeader=" + b.getOnDiskSizeWithoutHeader() + 119 " uncompressedSizeWithoutHeader=" + b.getUncompressedSizeWithoutHeader()); 120 DataInputStream dis = b.getByteStream(); 121 for (int i = 0; i < size; i++) { 122 int read = dis.readInt(); 123 if (read != i) { 124 fail("Block data corrupt at element " + i); 125 } 126 } 127 return b.getOnDiskSizeWithHeader(); 128 } 129 130 @Test 131 public void testDataBlockEncryption() throws IOException { 132 final int blocks = 10; 133 final int[] blockSizes = new int[blocks]; 134 for (int i = 0; i < blocks; i++) { 135 blockSizes[i] = (1024 + RNG.nextInt(1024 * 63)) / Bytes.SIZEOF_INT; 136 } 137 for (Compression.Algorithm compression : TestHFileBlock.COMPRESSION_ALGORITHMS) { 138 Path path = new Path(TEST_UTIL.getDataTestDir(), "block_v3_" + compression + "_AES"); 139 LOG.info("testDataBlockEncryption: encryption=AES compression=" + compression); 140 long totalSize = 0; 141 HFileContext fileContext = new HFileContextBuilder() 142 .withCompression(compression) 143 .withEncryptionContext(cryptoContext) 144 .build(); 145 FSDataOutputStream os = fs.create(path); 146 try { 147 for (int i = 0; i < blocks; i++) { 148 totalSize += writeBlock(os, fileContext, blockSizes[i]); 149 } 150 } finally { 151 os.close(); 152 } 153 FSDataInputStream is = fs.open(path); 154 try { 155 HFileBlock.FSReaderImpl hbr = new HFileBlock.FSReaderImpl(is, totalSize, fileContext); 156 long pos = 0; 157 for (int i = 0; i < blocks; i++) { 158 pos += readAndVerifyBlock(pos, fileContext, hbr, blockSizes[i]); 159 } 160 } finally { 161 is.close(); 162 } 163 } 164 } 165 166 @Test 167 public void testHFileEncryptionMetadata() throws Exception { 168 Configuration conf = TEST_UTIL.getConfiguration(); 169 CacheConfig cacheConf = new CacheConfig(conf); 170 HFileContext fileContext = new HFileContextBuilder() 171 .withEncryptionContext(cryptoContext) 172 .build(); 173 174 // write a simple encrypted hfile 175 Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile"); 176 FSDataOutputStream out = fs.create(path); 177 HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) 178 .withOutputStream(out) 179 .withFileContext(fileContext) 180 .create(); 181 try { 182 KeyValue kv = new KeyValue("foo".getBytes(), "f1".getBytes(), null, "value".getBytes()); 183 writer.append(kv); 184 } finally { 185 writer.close(); 186 out.close(); 187 } 188 189 // read it back in and validate correct crypto metadata 190 HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf); 191 try { 192 reader.loadFileInfo(); 193 FixedFileTrailer trailer = reader.getTrailer(); 194 assertNotNull(trailer.getEncryptionKey()); 195 Encryption.Context readerContext = reader.getFileContext().getEncryptionContext(); 196 assertEquals(readerContext.getCipher().getName(), cryptoContext.getCipher().getName()); 197 assertTrue(Bytes.equals(readerContext.getKeyBytes(), 198 cryptoContext.getKeyBytes())); 199 } finally { 200 reader.close(); 201 } 202 } 203 204 @Test 205 public void testHFileEncryption() throws Exception { 206 // Create 1000 random test KVs 207 RedundantKVGenerator generator = new RedundantKVGenerator(); 208 List<KeyValue> testKvs = generator.generateTestKeyValues(1000); 209 210 // Iterate through data block encoding and compression combinations 211 Configuration conf = TEST_UTIL.getConfiguration(); 212 CacheConfig cacheConf = new CacheConfig(conf); 213 for (DataBlockEncoding encoding: DataBlockEncoding.values()) { 214 for (Compression.Algorithm compression: TestHFileBlock.COMPRESSION_ALGORITHMS) { 215 HFileContext fileContext = new HFileContextBuilder() 216 .withBlockSize(4096) // small blocks 217 .withEncryptionContext(cryptoContext) 218 .withCompression(compression) 219 .withDataBlockEncoding(encoding) 220 .build(); 221 // write a new test HFile 222 LOG.info("Writing with " + fileContext); 223 Path path = new Path(TEST_UTIL.getDataTestDir(), 224 TEST_UTIL.getRandomUUID().toString() + ".hfile"); 225 FSDataOutputStream out = fs.create(path); 226 HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) 227 .withOutputStream(out) 228 .withFileContext(fileContext) 229 .create(); 230 try { 231 for (KeyValue kv: testKvs) { 232 writer.append(kv); 233 } 234 } finally { 235 writer.close(); 236 out.close(); 237 } 238 239 // read it back in 240 LOG.info("Reading with " + fileContext); 241 int i = 0; 242 HFileScanner scanner = null; 243 HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf); 244 try { 245 reader.loadFileInfo(); 246 FixedFileTrailer trailer = reader.getTrailer(); 247 assertNotNull(trailer.getEncryptionKey()); 248 scanner = reader.getScanner(false, false); 249 assertTrue("Initial seekTo failed", scanner.seekTo()); 250 do { 251 Cell kv = scanner.getCell(); 252 assertTrue("Read back an unexpected or invalid KV", 253 testKvs.contains(KeyValueUtil.ensureKeyValue(kv))); 254 i++; 255 } while (scanner.next()); 256 } finally { 257 reader.close(); 258 scanner.close(); 259 } 260 261 assertEquals("Did not read back as many KVs as written", i, testKvs.size()); 262 263 // Test random seeks with pread 264 LOG.info("Random seeking with " + fileContext); 265 reader = HFile.createReader(fs, path, cacheConf, true, conf); 266 try { 267 scanner = reader.getScanner(false, true); 268 assertTrue("Initial seekTo failed", scanner.seekTo()); 269 for (i = 0; i < 100; i++) { 270 KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size())); 271 assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv)); 272 } 273 } finally { 274 scanner.close(); 275 reader.close(); 276 } 277 } 278 } 279 } 280 281}