001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.util;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertNotNull;
022import static org.junit.Assert.assertTrue;
023
024import java.security.Key;
025import java.util.ArrayList;
026import java.util.List;
027import javax.crypto.spec.SecretKeySpec;
028import org.apache.hadoop.conf.Configuration;
029import org.apache.hadoop.fs.Path;
030import org.apache.hadoop.hbase.HBaseClassTestRule;
031import org.apache.hadoop.hbase.HBaseTestingUtility;
032import org.apache.hadoop.hbase.HColumnDescriptor;
033import org.apache.hadoop.hbase.HConstants;
034import org.apache.hadoop.hbase.HTableDescriptor;
035import org.apache.hadoop.hbase.TableName;
036import org.apache.hadoop.hbase.client.Put;
037import org.apache.hadoop.hbase.client.Table;
038import org.apache.hadoop.hbase.io.crypto.Encryption;
039import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
040import org.apache.hadoop.hbase.io.crypto.aes.AES;
041import org.apache.hadoop.hbase.io.hfile.CacheConfig;
042import org.apache.hadoop.hbase.io.hfile.HFile;
043import org.apache.hadoop.hbase.regionserver.HRegion;
044import org.apache.hadoop.hbase.regionserver.HStore;
045import org.apache.hadoop.hbase.regionserver.HStoreFile;
046import org.apache.hadoop.hbase.regionserver.Region;
047import org.apache.hadoop.hbase.security.EncryptionUtil;
048import org.apache.hadoop.hbase.security.User;
049import org.apache.hadoop.hbase.testclassification.MediumTests;
050import org.apache.hadoop.hbase.testclassification.MiscTests;
051import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
052import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil;
053import org.junit.After;
054import org.junit.Before;
055import org.junit.ClassRule;
056import org.junit.Test;
057import org.junit.experimental.categories.Category;
058
059@Category({ MiscTests.class, MediumTests.class })
060public class TestHBaseFsckEncryption {
061
062  @ClassRule
063  public static final HBaseClassTestRule CLASS_RULE =
064    HBaseClassTestRule.forClass(TestHBaseFsckEncryption.class);
065
066  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
067
068  private Configuration conf;
069  private HTableDescriptor htd;
070  private Key cfKey;
071
072  @Before
073  public void setUp() throws Exception {
074    conf = TEST_UTIL.getConfiguration();
075    conf.setInt("hfile.format.version", 3);
076    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
077    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
078
079    // Create the test encryption key
080    byte[] keyBytes = new byte[AES.KEY_LENGTH];
081    Bytes.secureRandom(keyBytes);
082    String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
083    cfKey = new SecretKeySpec(keyBytes, algorithm);
084
085    // Start the minicluster
086    TEST_UTIL.startMiniCluster(3);
087
088    // Create the table
089    htd = new HTableDescriptor(TableName.valueOf("default", "TestHBaseFsckEncryption"));
090    HColumnDescriptor hcd = new HColumnDescriptor("cf");
091    hcd.setEncryptionType(algorithm);
092    hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf,
093      conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()),
094      cfKey));
095    htd.addFamily(hcd);
096    TEST_UTIL.getAdmin().createTable(htd);
097    TEST_UTIL.waitTableAvailable(htd.getTableName(), 5000);
098  }
099
100  @After
101  public void tearDown() throws Exception {
102    TEST_UTIL.shutdownMiniCluster();
103  }
104
105  @Test
106  public void testFsckWithEncryption() throws Exception {
107    // Populate the table with some data
108    Table table = TEST_UTIL.getConnection().getTable(htd.getTableName());
109    try {
110      byte[] values = { 'A', 'B', 'C', 'D' };
111      for (int i = 0; i < values.length; i++) {
112        for (int j = 0; j < values.length; j++) {
113          Put put = new Put(new byte[] { values[i], values[j] });
114          put.addColumn(Bytes.toBytes("cf"), new byte[] {}, new byte[] { values[i], values[j] });
115          table.put(put);
116        }
117      }
118    } finally {
119      table.close();
120    }
121    // Flush it
122    TEST_UTIL.getAdmin().flush(htd.getTableName());
123
124    // Verify we have encrypted store files on disk
125    final List<Path> paths = findStorefilePaths(htd.getTableName());
126    assertTrue(paths.size() > 0);
127    for (Path path : paths) {
128      assertTrue("Store file " + path + " has incorrect key",
129        Bytes.equals(cfKey.getEncoded(), extractHFileKey(path)));
130    }
131
132    // Insure HBck doesn't consider them corrupt
133    HBaseFsck res = HbckTestingUtil.doHFileQuarantine(conf, htd.getTableName());
134    assertEquals(0, res.getRetCode());
135    HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker();
136    assertEquals(0, hfcc.getCorrupted().size());
137    assertEquals(0, hfcc.getFailures().size());
138    assertEquals(0, hfcc.getQuarantined().size());
139    assertEquals(0, hfcc.getMissing().size());
140  }
141
142  private List<Path> findStorefilePaths(TableName tableName) throws Exception {
143    List<Path> paths = new ArrayList<>();
144    for (Region region : TEST_UTIL.getRSForFirstRegionInTable(tableName)
145      .getRegions(htd.getTableName())) {
146      for (HStore store : ((HRegion) region).getStores()) {
147        for (HStoreFile storefile : store.getStorefiles()) {
148          paths.add(storefile.getPath());
149        }
150      }
151    }
152    return paths;
153  }
154
155  private byte[] extractHFileKey(Path path) throws Exception {
156    HFile.Reader reader =
157      HFile.createReader(TEST_UTIL.getTestFileSystem(), path, new CacheConfig(conf), true, conf);
158    try {
159      Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
160      assertNotNull("Reader has a null crypto context", cryptoContext);
161      Key key = cryptoContext.getKey();
162      assertNotNull("Crypto context has no key", key);
163      return key.getEncoded();
164    } finally {
165      reader.close();
166    }
167  }
168
169}