001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertNotNull;
022import static org.junit.Assert.assertTrue;
023import static org.mockito.Mockito.mock;
024
025import java.io.File;
026import java.io.FileOutputStream;
027import java.io.IOException;
028import java.util.ArrayList;
029import java.util.Arrays;
030import java.util.Collection;
031import java.util.List;
032import java.util.UUID;
033import org.apache.hadoop.conf.Configuration;
034import org.apache.hadoop.fs.FSDataOutputStream;
035import org.apache.hadoop.fs.Path;
036import org.apache.hadoop.hbase.CellBuilderType;
037import org.apache.hadoop.hbase.CellUtil;
038import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
039import org.apache.hadoop.hbase.HBaseConfiguration;
040import org.apache.hadoop.hbase.HBaseTestingUtility;
041import org.apache.hadoop.hbase.KeyValue;
042import org.apache.hadoop.hbase.TableName;
043import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
044import org.apache.hadoop.hbase.client.RegionInfo;
045import org.apache.hadoop.hbase.client.RegionInfoBuilder;
046import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
047import org.apache.hadoop.hbase.io.hfile.HFile;
048import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
049import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
050import org.apache.hadoop.hbase.util.Bytes;
051import org.apache.hadoop.hbase.util.Pair;
052import org.apache.hadoop.hbase.wal.WAL;
053import org.apache.hadoop.hbase.wal.WALEdit;
054import org.hamcrest.Description;
055import org.hamcrest.Matcher;
056import org.hamcrest.TypeSafeMatcher;
057import org.junit.Before;
058import org.junit.ClassRule;
059import org.junit.Rule;
060import org.junit.rules.TemporaryFolder;
061import org.junit.rules.TestName;
062import org.junit.runner.RunWith;
063import org.junit.runners.Parameterized;
064
065import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
066import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
067
068@RunWith(Parameterized.class)
069public class TestBulkloadBase {
070  @ClassRule
071  public static TemporaryFolder testFolder = new TemporaryFolder();
072  private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
073  protected final WAL log = mock(WAL.class);
074  protected final Configuration conf = HBaseConfiguration.create();
075  private final byte[] randomBytes = new byte[100];
076  protected final byte[] family1 = Bytes.toBytes("family1");
077  protected final byte[] family2 = Bytes.toBytes("family2");
078  protected final byte[] family3 = Bytes.toBytes("family3");
079
080  protected Boolean useFileBasedSFT;
081
082  @Rule
083  public TestName name = new TestName();
084
085  public TestBulkloadBase(boolean useFileBasedSFT) {
086    this.useFileBasedSFT = useFileBasedSFT;
087  }
088
089  @Parameterized.Parameters
090  public static Collection<Boolean> data() {
091    Boolean[] data = { false, true };
092    return Arrays.asList(data);
093  }
094
095  @Before
096  public void before() throws IOException {
097    Bytes.random(randomBytes);
098    if (useFileBasedSFT) {
099      conf.set(StoreFileTrackerFactory.TRACKER_IMPL,
100        "org.apache.hadoop.hbase.regionserver.storefiletracker.FileBasedStoreFileTracker");
101    } else {
102      conf.unset(StoreFileTrackerFactory.TRACKER_IMPL);
103    }
104  }
105
106  protected Pair<byte[], String> withMissingHFileForFamily(byte[] family) {
107    return new Pair<>(family, getNotExistFilePath());
108  }
109
110  private String getNotExistFilePath() {
111    Path path = new Path(TEST_UTIL.getDataTestDir(), "does_not_exist");
112    return path.toUri().getPath();
113  }
114
115  protected Pair<byte[], String> withInvalidColumnFamilyButProperHFileLocation(byte[] family)
116    throws IOException {
117    createHFileForFamilies(family);
118    return new Pair<>(new byte[] { 0x00, 0x01, 0x02 }, getNotExistFilePath());
119  }
120
121  protected HRegion testRegionWithFamiliesAndSpecifiedTableName(TableName tableName,
122    byte[]... families) throws IOException {
123    RegionInfo hRegionInfo = RegionInfoBuilder.newBuilder(tableName).build();
124    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
125
126    for (byte[] family : families) {
127      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
128    }
129    ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null,
130      MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT);
131    // TODO We need a way to do this without creating files
132    return HRegion.createHRegion(hRegionInfo, new Path(testFolder.newFolder().toURI()), conf,
133      builder.build(), log);
134
135  }
136
137  protected HRegion testRegionWithFamilies(byte[]... families) throws IOException {
138    TableName tableName =
139      TableName.valueOf(name.getMethodName().substring(0, name.getMethodName().indexOf("[")));
140    return testRegionWithFamiliesAndSpecifiedTableName(tableName, families);
141  }
142
143  private List<Pair<byte[], String>> getBlankFamilyPaths() {
144    return new ArrayList<>();
145  }
146
147  protected List<Pair<byte[], String>> withFamilyPathsFor(byte[]... families) throws IOException {
148    List<Pair<byte[], String>> familyPaths = getBlankFamilyPaths();
149    for (byte[] family : families) {
150      familyPaths.add(new Pair<>(family, createHFileForFamilies(family)));
151    }
152    return familyPaths;
153  }
154
155  private String createHFileForFamilies(byte[] family) throws IOException {
156    HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(conf);
157    // TODO We need a way to do this without creating files
158    File hFileLocation = testFolder.newFile(generateUniqueName(null));
159    FSDataOutputStream out = new FSDataOutputStream(new FileOutputStream(hFileLocation), null);
160    try {
161      hFileFactory.withOutputStream(out);
162      hFileFactory.withFileContext(new HFileContextBuilder().build());
163      HFile.Writer writer = hFileFactory.create();
164      try {
165        writer.append(new KeyValue(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
166          .setRow(randomBytes).setFamily(family).setQualifier(randomBytes).setTimestamp(0L)
167          .setType(KeyValue.Type.Put.getCode()).setValue(randomBytes).build()));
168      } finally {
169        writer.close();
170      }
171    } finally {
172      out.close();
173    }
174    return hFileLocation.getAbsoluteFile().getAbsolutePath();
175  }
176
177  private static String generateUniqueName(final String suffix) {
178    String name = UUID.randomUUID().toString().replaceAll("-", "");
179    if (suffix != null) {
180      name += suffix;
181    }
182    return name;
183  }
184
185  protected static Matcher<WALEdit> bulkLogWalEditType(byte[] typeBytes) {
186    return new WalMatcher(typeBytes);
187  }
188
189  protected static Matcher<WALEdit> bulkLogWalEdit(byte[] typeBytes, byte[] tableName,
190    byte[] familyName, List<String> storeFileNames) {
191    return new WalMatcher(typeBytes, tableName, familyName, storeFileNames);
192  }
193
194  private static class WalMatcher extends TypeSafeMatcher<WALEdit> {
195    private final byte[] typeBytes;
196    private final byte[] tableName;
197    private final byte[] familyName;
198    private final List<String> storeFileNames;
199
200    public WalMatcher(byte[] typeBytes) {
201      this(typeBytes, null, null, null);
202    }
203
204    public WalMatcher(byte[] typeBytes, byte[] tableName, byte[] familyName,
205      List<String> storeFileNames) {
206      this.typeBytes = typeBytes;
207      this.tableName = tableName;
208      this.familyName = familyName;
209      this.storeFileNames = storeFileNames;
210    }
211
212    @Override
213    protected boolean matchesSafely(WALEdit item) {
214      assertTrue(Arrays.equals(CellUtil.cloneQualifier(item.getCells().get(0)), typeBytes));
215      WALProtos.BulkLoadDescriptor desc;
216      try {
217        desc = WALEdit.getBulkLoadDescriptor(item.getCells().get(0));
218      } catch (IOException e) {
219        return false;
220      }
221      assertNotNull(desc);
222
223      if (tableName != null) {
224        assertTrue(
225          Bytes.equals(ProtobufUtil.toTableName(desc.getTableName()).getName(), tableName));
226      }
227
228      if (storeFileNames != null) {
229        int index = 0;
230        WALProtos.StoreDescriptor store = desc.getStores(0);
231        assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), familyName));
232        assertTrue(Bytes.equals(Bytes.toBytes(store.getStoreHomeDir()), familyName));
233        assertEquals(storeFileNames.size(), store.getStoreFileCount());
234      }
235
236      return true;
237    }
238
239    @Override
240    public void describeTo(Description description) {
241
242    }
243  }
244}