001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018
019package org.apache.hadoop.hbase.regionserver.wal;
020
021import static org.junit.Assert.assertArrayEquals;
022import static org.junit.Assert.assertFalse;
023import static org.junit.Assert.assertTrue;
024
025import java.io.IOException;
026import java.lang.reflect.Method;
027
028import org.apache.hadoop.fs.FSDataOutputStream;
029import org.apache.hadoop.fs.Path;
030import org.apache.hadoop.hbase.HBaseClassTestRule;
031import org.apache.hadoop.hbase.HBaseTestingUtility;
032import org.apache.hadoop.hbase.TableName;
033import org.apache.hadoop.hbase.client.Get;
034import org.apache.hadoop.hbase.client.Put;
035import org.apache.hadoop.hbase.client.Table;
036import org.apache.hadoop.hbase.testclassification.LargeTests;
037import org.apache.hadoop.hbase.util.Bytes;
038import org.apache.hadoop.hbase.util.CommonFSUtils;
039import org.apache.hadoop.hdfs.DFSClient;
040import org.apache.hadoop.hdfs.DFSTestUtil;
041import org.apache.hadoop.hdfs.DistributedFileSystem;
042import org.apache.hadoop.hdfs.MiniDFSCluster;
043import org.junit.AfterClass;
044import org.junit.Assume;
045import org.junit.BeforeClass;
046import org.junit.ClassRule;
047import org.junit.Test;
048import org.junit.experimental.categories.Category;
049
050@Category(LargeTests.class)
051public class TestHBaseWalOnEC {
052  @ClassRule
053  public static final HBaseClassTestRule CLASS_RULE =
054      HBaseClassTestRule.forClass(TestHBaseWalOnEC.class);
055
056  private static final HBaseTestingUtility util = new HBaseTestingUtility();
057
058  private static final String HFLUSH = "hflush";
059
060  @BeforeClass
061  public static void setup() throws Exception {
062    try {
063      MiniDFSCluster cluster = util.startMiniDFSCluster(3); // Need 3 DNs for RS-3-2 policy
064      DistributedFileSystem fs = cluster.getFileSystem();
065
066      Method enableAllECPolicies = DFSTestUtil.class.getMethod("enableAllECPolicies",
067          DistributedFileSystem.class);
068      enableAllECPolicies.invoke(null, fs);
069
070      DFSClient client = fs.getClient();
071      Method setErasureCodingPolicy = DFSClient.class.getMethod("setErasureCodingPolicy",
072          String.class, String.class);
073      setErasureCodingPolicy.invoke(client, "/", "RS-3-2-1024k"); // try a built-in policy
074
075      try (FSDataOutputStream out = fs.create(new Path("/canary"))) {
076        // If this comes back as having hflush then some test setup assumption is wrong.
077        // Fail the test so that a developer has to look and triage
078        assertFalse("Did not enable EC!", CommonFSUtils.hasCapability(out, HFLUSH));
079      }
080    } catch (NoSuchMethodException e) {
081      // We're not testing anything interesting if EC is not available, so skip the rest of the test
082      Assume.assumeNoException("Using an older version of hadoop; EC not available.", e);
083    }
084
085    util.getConfiguration().setBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true);
086    util.startMiniCluster();
087  }
088
089  @AfterClass
090  public static void tearDown() throws Exception {
091    util.shutdownMiniCluster();
092  }
093
094  @Test
095  public void testStreamCreate() throws IOException {
096    try (FSDataOutputStream out = CommonFSUtils.createForWal(util.getDFSCluster().getFileSystem(),
097        new Path("/testStreamCreate"), true)) {
098      assertTrue(CommonFSUtils.hasCapability(out, HFLUSH));
099    }
100  }
101
102  @Test
103  public void testFlush() throws IOException {
104    byte[] row = Bytes.toBytes("row");
105    byte[] cf = Bytes.toBytes("cf");
106    byte[] cq = Bytes.toBytes("cq");
107    byte[] value = Bytes.toBytes("value");
108
109    TableName name = TableName.valueOf(getClass().getSimpleName());
110
111    Table t = util.createTable(name, cf);
112    t.put(new Put(row).addColumn(cf, cq, value));
113
114    util.getAdmin().flush(name);
115
116    assertArrayEquals(value, t.get(new Get(row)).getValue(cf, cq));
117  }
118}
119