001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertFalse; 022import static org.junit.Assert.assertNotNull; 023import static org.junit.Assert.assertTrue; 024import static org.junit.Assert.fail; 025 026import java.io.DataInputStream; 027import java.io.DataOutputStream; 028import java.io.IOException; 029import java.util.List; 030import java.util.Random; 031import java.util.concurrent.ThreadLocalRandom; 032import org.apache.hadoop.conf.Configuration; 033import org.apache.hadoop.fs.FSDataInputStream; 034import org.apache.hadoop.fs.FSDataOutputStream; 035import org.apache.hadoop.fs.FileSystem; 036import org.apache.hadoop.fs.Path; 037import org.apache.hadoop.hbase.Cell; 038import org.apache.hadoop.hbase.HBaseClassTestRule; 039import org.apache.hadoop.hbase.HBaseTestingUtility; 040import org.apache.hadoop.hbase.HConstants; 041import org.apache.hadoop.hbase.KeyValue; 042import org.apache.hadoop.hbase.KeyValueUtil; 043import org.apache.hadoop.hbase.io.ByteBuffAllocator; 044import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; 045import org.apache.hadoop.hbase.io.compress.Compression; 046import org.apache.hadoop.hbase.io.crypto.Cipher; 047import org.apache.hadoop.hbase.io.crypto.Encryption; 048import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; 049import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; 050import org.apache.hadoop.hbase.testclassification.IOTests; 051import org.apache.hadoop.hbase.testclassification.SmallTests; 052import org.apache.hadoop.hbase.util.Bytes; 053import org.apache.hadoop.hbase.util.RedundantKVGenerator; 054import org.junit.BeforeClass; 055import org.junit.ClassRule; 056import org.junit.Test; 057import org.junit.experimental.categories.Category; 058import org.slf4j.Logger; 059import org.slf4j.LoggerFactory; 060 061@Category({ IOTests.class, SmallTests.class }) 062public class TestHFileEncryption { 063 064 @ClassRule 065 public static final HBaseClassTestRule CLASS_RULE = 066 HBaseClassTestRule.forClass(TestHFileEncryption.class); 067 068 private static final Logger LOG = LoggerFactory.getLogger(TestHFileEncryption.class); 069 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); 070 071 private static FileSystem fs; 072 private static Encryption.Context cryptoContext; 073 074 @BeforeClass 075 public static void setUp() throws Exception { 076 Configuration conf = TEST_UTIL.getConfiguration(); 077 // Disable block cache in this test. 078 conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f); 079 conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName()); 080 conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"); 081 conf.setInt("hfile.format.version", 3); 082 083 fs = FileSystem.get(conf); 084 085 cryptoContext = Encryption.newContext(conf); 086 String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); 087 Cipher aes = Encryption.getCipher(conf, algorithm); 088 assertNotNull(aes); 089 cryptoContext.setCipher(aes); 090 byte[] key = new byte[aes.getKeyLength()]; 091 Bytes.secureRandom(key); 092 cryptoContext.setKey(key); 093 } 094 095 private int writeBlock(Configuration conf, FSDataOutputStream os, HFileContext fileContext, 096 int size) throws IOException { 097 HFileBlock.Writer hbw = new HFileBlock.Writer(conf, null, fileContext); 098 DataOutputStream dos = hbw.startWriting(BlockType.DATA); 099 for (int j = 0; j < size; j++) { 100 dos.writeInt(j); 101 } 102 hbw.writeHeaderAndData(os); 103 LOG.info("Wrote a block at " + os.getPos() + " with" + " onDiskSizeWithHeader=" 104 + hbw.getOnDiskSizeWithHeader() + " uncompressedSizeWithoutHeader=" 105 + hbw.getOnDiskSizeWithoutHeader() + " uncompressedSizeWithoutHeader=" 106 + hbw.getUncompressedSizeWithoutHeader()); 107 return hbw.getOnDiskSizeWithHeader(); 108 } 109 110 private long readAndVerifyBlock(long pos, HFileContext ctx, HFileBlock.FSReaderImpl hbr, int size) 111 throws IOException { 112 HFileBlock b = hbr.readBlockData(pos, -1, false, false, true); 113 assertEquals(0, HFile.getAndResetChecksumFailuresCount()); 114 b.sanityCheck(); 115 assertFalse( 116 (b.getHFileContext().getCompression() != Compression.Algorithm.NONE) && b.isUnpacked()); 117 b = b.unpack(ctx, hbr); 118 LOG.info( 119 "Read a block at " + pos + " with" + " onDiskSizeWithHeader=" + b.getOnDiskSizeWithHeader() 120 + " uncompressedSizeWithoutHeader=" + b.getOnDiskSizeWithoutHeader() 121 + " uncompressedSizeWithoutHeader=" + b.getUncompressedSizeWithoutHeader()); 122 DataInputStream dis = b.getByteStream(); 123 for (int i = 0; i < size; i++) { 124 int read = dis.readInt(); 125 if (read != i) { 126 fail("Block data corrupt at element " + i); 127 } 128 } 129 return b.getOnDiskSizeWithHeader(); 130 } 131 132 @Test 133 public void testDataBlockEncryption() throws IOException { 134 final int blocks = 10; 135 final int[] blockSizes = new int[blocks]; 136 final Random rand = ThreadLocalRandom.current(); 137 for (int i = 0; i < blocks; i++) { 138 blockSizes[i] = (1024 + rand.nextInt(1024 * 63)) / Bytes.SIZEOF_INT; 139 } 140 for (Compression.Algorithm compression : HBaseTestingUtility.COMPRESSION_ALGORITHMS) { 141 Path path = new Path(TEST_UTIL.getDataTestDir(), "block_v3_" + compression + "_AES"); 142 LOG.info("testDataBlockEncryption: encryption=AES compression=" + compression); 143 long totalSize = 0; 144 HFileContext fileContext = new HFileContextBuilder().withCompression(compression) 145 .withEncryptionContext(cryptoContext).build(); 146 FSDataOutputStream os = fs.create(path); 147 try { 148 for (int i = 0; i < blocks; i++) { 149 totalSize += writeBlock(TEST_UTIL.getConfiguration(), os, fileContext, blockSizes[i]); 150 } 151 } finally { 152 os.close(); 153 } 154 FSDataInputStream is = fs.open(path); 155 ReaderContext context = 156 new ReaderContextBuilder().withInputStreamWrapper(new FSDataInputStreamWrapper(is)) 157 .withFilePath(path).withFileSystem(fs).withFileSize(totalSize).build(); 158 try { 159 HFileBlock.FSReaderImpl hbr = new HFileBlock.FSReaderImpl(context, fileContext, 160 ByteBuffAllocator.HEAP, TEST_UTIL.getConfiguration()); 161 long pos = 0; 162 for (int i = 0; i < blocks; i++) { 163 pos += readAndVerifyBlock(pos, fileContext, hbr, blockSizes[i]); 164 } 165 } finally { 166 is.close(); 167 } 168 } 169 } 170 171 @Test 172 public void testHFileEncryptionMetadata() throws Exception { 173 Configuration conf = TEST_UTIL.getConfiguration(); 174 CacheConfig cacheConf = new CacheConfig(conf); 175 HFileContext fileContext = 176 new HFileContextBuilder().withEncryptionContext(cryptoContext).build(); 177 178 // write a simple encrypted hfile 179 Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile"); 180 FSDataOutputStream out = fs.create(path); 181 HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(out) 182 .withFileContext(fileContext).create(); 183 try { 184 KeyValue kv = new KeyValue("foo".getBytes(), "f1".getBytes(), null, "value".getBytes()); 185 writer.append(kv); 186 } finally { 187 writer.close(); 188 out.close(); 189 } 190 191 // read it back in and validate correct crypto metadata 192 HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf); 193 try { 194 FixedFileTrailer trailer = reader.getTrailer(); 195 assertNotNull(trailer.getEncryptionKey()); 196 Encryption.Context readerContext = reader.getFileContext().getEncryptionContext(); 197 assertEquals(readerContext.getCipher().getName(), cryptoContext.getCipher().getName()); 198 assertTrue(Bytes.equals(readerContext.getKeyBytes(), cryptoContext.getKeyBytes())); 199 } finally { 200 reader.close(); 201 } 202 } 203 204 @Test 205 public void testHFileEncryption() throws Exception { 206 // Create 1000 random test KVs 207 RedundantKVGenerator generator = new RedundantKVGenerator(); 208 List<KeyValue> testKvs = generator.generateTestKeyValues(1000); 209 210 // Iterate through data block encoding and compression combinations 211 Configuration conf = TEST_UTIL.getConfiguration(); 212 CacheConfig cacheConf = new CacheConfig(conf); 213 for (DataBlockEncoding encoding : DataBlockEncoding.values()) { 214 for (Compression.Algorithm compression : HBaseTestingUtility.COMPRESSION_ALGORITHMS) { 215 HFileContext fileContext = new HFileContextBuilder().withBlockSize(4096) // small blocks 216 .withEncryptionContext(cryptoContext).withCompression(compression) 217 .withDataBlockEncoding(encoding).build(); 218 // write a new test HFile 219 LOG.info("Writing with " + fileContext); 220 Path path = new Path(TEST_UTIL.getDataTestDir(), 221 HBaseTestingUtility.getRandomUUID().toString() + ".hfile"); 222 FSDataOutputStream out = fs.create(path); 223 HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(out) 224 .withFileContext(fileContext).create(); 225 try { 226 for (KeyValue kv : testKvs) { 227 writer.append(kv); 228 } 229 } finally { 230 writer.close(); 231 out.close(); 232 } 233 234 // read it back in 235 LOG.info("Reading with " + fileContext); 236 int i = 0; 237 HFileScanner scanner = null; 238 HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf); 239 try { 240 FixedFileTrailer trailer = reader.getTrailer(); 241 assertNotNull(trailer.getEncryptionKey()); 242 scanner = reader.getScanner(conf, false, false); 243 assertTrue("Initial seekTo failed", scanner.seekTo()); 244 do { 245 Cell kv = scanner.getCell(); 246 assertTrue("Read back an unexpected or invalid KV", 247 testKvs.contains(KeyValueUtil.ensureKeyValue(kv))); 248 i++; 249 } while (scanner.next()); 250 } finally { 251 reader.close(); 252 scanner.close(); 253 } 254 255 assertEquals("Did not read back as many KVs as written", i, testKvs.size()); 256 257 // Test random seeks with pread 258 LOG.info("Random seeking with " + fileContext); 259 Random rand = ThreadLocalRandom.current(); 260 reader = HFile.createReader(fs, path, cacheConf, true, conf); 261 try { 262 scanner = reader.getScanner(conf, false, true); 263 assertTrue("Initial seekTo failed", scanner.seekTo()); 264 for (i = 0; i < 100; i++) { 265 KeyValue kv = testKvs.get(rand.nextInt(testKvs.size())); 266 assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv)); 267 } 268 } finally { 269 scanner.close(); 270 reader.close(); 271 } 272 } 273 } 274 } 275 276}