001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver.wal; 019 020import static org.junit.Assert.assertArrayEquals; 021import static org.junit.Assert.assertFalse; 022 023import java.io.IOException; 024import java.lang.reflect.Method; 025import java.util.Arrays; 026import java.util.List; 027import org.apache.hadoop.fs.FSDataOutputStream; 028import org.apache.hadoop.fs.Path; 029import org.apache.hadoop.fs.StreamCapabilities; 030import org.apache.hadoop.hbase.HBaseClassTestRule; 031import org.apache.hadoop.hbase.HBaseTestingUtility; 032import org.apache.hadoop.hbase.TableName; 033import org.apache.hadoop.hbase.client.Get; 034import org.apache.hadoop.hbase.client.Put; 035import org.apache.hadoop.hbase.client.Table; 036import org.apache.hadoop.hbase.testclassification.LargeTests; 037import org.apache.hadoop.hbase.testclassification.RegionServerTests; 038import org.apache.hadoop.hbase.util.Bytes; 039import org.apache.hadoop.hbase.util.CommonFSUtils; 040import org.apache.hadoop.hbase.wal.WALFactory; 041import org.apache.hadoop.hdfs.DFSClient; 042import org.apache.hadoop.hdfs.DFSTestUtil; 043import org.apache.hadoop.hdfs.DistributedFileSystem; 044import org.apache.hadoop.hdfs.MiniDFSCluster; 045import org.junit.After; 046import org.junit.Assume; 047import org.junit.Before; 048import org.junit.BeforeClass; 049import org.junit.ClassRule; 050import org.junit.Test; 051import org.junit.experimental.categories.Category; 052import org.junit.runner.RunWith; 053import org.junit.runners.Parameterized; 054import org.junit.runners.Parameterized.Parameter; 055import org.junit.runners.Parameterized.Parameters; 056 057@RunWith(Parameterized.class) 058@Category({ RegionServerTests.class, LargeTests.class }) 059public class TestHBaseWalOnEC { 060 061 @ClassRule 062 public static final HBaseClassTestRule CLASS_RULE = 063 HBaseClassTestRule.forClass(TestHBaseWalOnEC.class); 064 065 private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); 066 067 @BeforeClass 068 public static void setUpBeforeClass() throws Exception { 069 try { 070 MiniDFSCluster cluster = UTIL.startMiniDFSCluster(3); // Need 3 DNs for RS-3-2 policy 071 DistributedFileSystem fs = cluster.getFileSystem(); 072 073 Method enableAllECPolicies = 074 DFSTestUtil.class.getMethod("enableAllECPolicies", DistributedFileSystem.class); 075 enableAllECPolicies.invoke(null, fs); 076 077 DFSClient client = fs.getClient(); 078 Method setErasureCodingPolicy = 079 DFSClient.class.getMethod("setErasureCodingPolicy", String.class, String.class); 080 setErasureCodingPolicy.invoke(client, "/", "RS-3-2-1024k"); // try a built-in policy 081 082 try (FSDataOutputStream out = fs.create(new Path("/canary"))) { 083 // If this comes back as having hflush then some test setup assumption is wrong. 084 // Fail the test so that a developer has to look and triage 085 assertFalse("Did not enable EC!", out.hasCapability(StreamCapabilities.HFLUSH)); 086 } 087 } catch (NoSuchMethodException e) { 088 // We're not testing anything interesting if EC is not available, so skip the rest of the test 089 Assume.assumeNoException("Using an older version of hadoop; EC not available.", e); 090 } 091 092 UTIL.getConfiguration().setBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true); 093 094 } 095 096 @Parameter 097 public String walProvider; 098 099 @Parameters 100 public static List<Object[]> params() { 101 return Arrays.asList(new Object[] { "asyncfs" }, new Object[] { "filesystem" }); 102 } 103 104 @Before 105 public void setUp() throws Exception { 106 UTIL.getConfiguration().set(WALFactory.WAL_PROVIDER, walProvider); 107 UTIL.startMiniCluster(3); 108 } 109 110 @After 111 public void tearDown() throws Exception { 112 UTIL.shutdownMiniCluster(); 113 } 114 115 @Test 116 public void testReadWrite() throws IOException { 117 byte[] row = Bytes.toBytes("row"); 118 byte[] cf = Bytes.toBytes("cf"); 119 byte[] cq = Bytes.toBytes("cq"); 120 byte[] value = Bytes.toBytes("value"); 121 122 TableName name = TableName.valueOf(getClass().getSimpleName()); 123 124 Table t = UTIL.createTable(name, cf); 125 t.put(new Put(row).addColumn(cf, cq, value)); 126 127 UTIL.getAdmin().flush(name); 128 129 assertArrayEquals(value, t.get(new Get(row)).getValue(cf, cq)); 130 } 131}