001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver.wal; 019 020import static org.junit.jupiter.api.Assertions.assertArrayEquals; 021import static org.junit.jupiter.api.Assertions.assertFalse; 022 023import java.io.IOException; 024import java.util.stream.Stream; 025import org.apache.hadoop.fs.FSDataOutputStream; 026import org.apache.hadoop.fs.Path; 027import org.apache.hadoop.fs.StreamCapabilities; 028import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; 029import org.apache.hadoop.hbase.HBaseTestingUtil; 030import org.apache.hadoop.hbase.TableName; 031import org.apache.hadoop.hbase.client.Get; 032import org.apache.hadoop.hbase.client.Put; 033import org.apache.hadoop.hbase.client.Table; 034import org.apache.hadoop.hbase.testclassification.LargeTests; 035import org.apache.hadoop.hbase.testclassification.RegionServerTests; 036import org.apache.hadoop.hbase.util.Bytes; 037import org.apache.hadoop.hbase.util.CommonFSUtils; 038import org.apache.hadoop.hbase.wal.WALFactory; 039import org.apache.hadoop.hdfs.DFSTestUtil; 040import org.apache.hadoop.hdfs.DistributedFileSystem; 041import org.apache.hadoop.hdfs.MiniDFSCluster; 042import org.apache.hadoop.hdfs.client.HdfsAdmin; 043import org.junit.jupiter.api.AfterEach; 044import org.junit.jupiter.api.BeforeAll; 045import org.junit.jupiter.api.BeforeEach; 046import org.junit.jupiter.api.Tag; 047import org.junit.jupiter.api.TestTemplate; 048import org.junit.jupiter.params.provider.Arguments; 049 050@Tag(RegionServerTests.TAG) 051@Tag(LargeTests.TAG) 052@HBaseParameterizedTestTemplate 053public class TestHBaseWalOnEC { 054 055 private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); 056 057 @BeforeAll 058 public static void setUpBeforeClass() throws Exception { 059 MiniDFSCluster cluster = UTIL.startMiniDFSCluster(3); // Need 3 DNs for RS-3-2 policy 060 DistributedFileSystem fs = cluster.getFileSystem(); 061 062 DFSTestUtil.enableAllECPolicies(fs); 063 064 HdfsAdmin hdfsAdmin = new HdfsAdmin(fs.getUri(), UTIL.getConfiguration()); 065 hdfsAdmin.setErasureCodingPolicy(new Path("/"), "RS-3-2-1024k"); 066 067 try (FSDataOutputStream out = fs.create(new Path("/canary"))) { 068 // If this comes back as having hflush then some test setup assumption is wrong. 069 // Fail the test so that a developer has to look and triage 070 assertFalse(out.hasCapability(StreamCapabilities.HFLUSH), "Did not enable EC!"); 071 } 072 073 UTIL.getConfiguration().setBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true); 074 075 } 076 077 private final String walProvider; 078 079 public static Stream<Arguments> parameters() { 080 return Stream.of(Arguments.of("asyncfs"), Arguments.of("filesystem")); 081 } 082 083 public TestHBaseWalOnEC(String walProvider) { 084 this.walProvider = walProvider; 085 } 086 087 @BeforeEach 088 public void setUp() throws Exception { 089 UTIL.getConfiguration().set(WALFactory.WAL_PROVIDER, walProvider); 090 UTIL.startMiniCluster(3); 091 } 092 093 @AfterEach 094 public void tearDown() throws Exception { 095 UTIL.shutdownMiniCluster(); 096 } 097 098 @TestTemplate 099 public void testReadWrite() throws IOException { 100 byte[] row = Bytes.toBytes("row"); 101 byte[] cf = Bytes.toBytes("cf"); 102 byte[] cq = Bytes.toBytes("cq"); 103 byte[] value = Bytes.toBytes("value"); 104 105 TableName name = TableName.valueOf(getClass().getSimpleName()); 106 107 Table t = UTIL.createTable(name, cf); 108 t.put(new Put(row).addColumn(cf, cq, value)); 109 110 UTIL.getAdmin().flush(name); 111 112 assertArrayEquals(value, t.get(new Get(row)).getValue(cf, cq)); 113 } 114}