public class TestHFileOutputFormat2 extends Object
HFileOutputFormat2
.
Sets up and runs a mapreduce job that writes hfile output.
Creates a few inner classes to implement splits and an inputformat that
emits keys and values like those of PerformanceEvaluation
.Modifier and Type | Class and Description |
---|---|
(package private) static class |
TestHFileOutputFormat2.RandomKVGeneratingMapper
Simple mapper that makes KeyValue output.
|
(package private) static class |
TestHFileOutputFormat2.RandomPutGeneratingMapper
Simple mapper that makes Put output.
|
Modifier and Type | Field and Description |
---|---|
static HBaseClassTestRule |
CLASS_RULE |
private static byte[][] |
FAMILIES |
static byte[] |
FAMILY_NAME |
private static org.slf4j.Logger |
LOG |
private static int |
ROWSPERSPLIT |
private static org.apache.hadoop.hbase.TableName[] |
TABLE_NAMES |
private HBaseTestingUtility |
util |
Constructor and Description |
---|
TestHFileOutputFormat2() |
Modifier and Type | Method and Description |
---|---|
private org.apache.hadoop.mapreduce.TaskAttemptContext |
createTestTaskAttemptContext(org.apache.hadoop.mapreduce.Job job) |
private void |
doIncrementalLoadTest(boolean shouldChangeRegions,
boolean shouldKeepLocality,
boolean putSortReducer,
List<String> tableStr) |
private void |
doIncrementalLoadTest(boolean shouldChangeRegions,
boolean shouldKeepLocality,
boolean putSortReducer,
String tableStr) |
private byte[][] |
generateRandomSplitKeys(int numKeys) |
private byte[][] |
generateRandomStartKeys(int numKeys) |
private Map<String,Integer> |
getMockColumnFamiliesForBlockSize(int numCfs) |
private Map<String,org.apache.hadoop.hbase.regionserver.BloomType> |
getMockColumnFamiliesForBloomType(int numCfs) |
private Map<String,org.apache.hadoop.hbase.io.compress.Compression.Algorithm> |
getMockColumnFamiliesForCompression(int numCfs) |
private Map<String,org.apache.hadoop.hbase.io.encoding.DataBlockEncoding> |
getMockColumnFamiliesForDataBlockEncoding(int numCfs) |
private String |
getStoragePolicyName(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path) |
private String |
getStoragePolicyNameForOldHDFSVersion(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path) |
static void |
main(String[] args) |
void |
manualTest(String[] args) |
private void |
quickPoll(Callable<Boolean> c,
int waitMs) |
private void |
runIncrementalPELoad(org.apache.hadoop.conf.Configuration conf,
List<org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.TableInfo> tableInfo,
org.apache.hadoop.fs.Path outDir,
boolean putSortReducer) |
private void |
setupMockColumnFamiliesForBlockSize(org.apache.hadoop.hbase.client.Table table,
Map<String,Integer> familyToDataBlockEncoding) |
private void |
setupMockColumnFamiliesForBloomType(org.apache.hadoop.hbase.client.Table table,
Map<String,org.apache.hadoop.hbase.regionserver.BloomType> familyToDataBlockEncoding) |
private void |
setupMockColumnFamiliesForCompression(org.apache.hadoop.hbase.client.Table table,
Map<String,org.apache.hadoop.hbase.io.compress.Compression.Algorithm> familyToCompression) |
private void |
setupMockColumnFamiliesForDataBlockEncoding(org.apache.hadoop.hbase.client.Table table,
Map<String,org.apache.hadoop.hbase.io.encoding.DataBlockEncoding> familyToDataBlockEncoding) |
private void |
setupMockStartKeys(org.apache.hadoop.hbase.client.RegionLocator table) |
private void |
setupMockTableName(org.apache.hadoop.hbase.client.RegionLocator table) |
private void |
setupRandomGeneratorMapper(org.apache.hadoop.mapreduce.Job job,
boolean putSortReducer) |
void |
test_LATEST_TIMESTAMP_isReplaced()
Test that
HFileOutputFormat2 RecordWriter amends timestamps if
passed a keyvalue whose timestamp is HConstants.LATEST_TIMESTAMP . |
void |
test_TIMERANGE() |
void |
test_WritingTagData()
Test that
HFileOutputFormat2 RecordWriter writes tags such as ttl into
hfile. |
void |
testBlockStoragePolicy() |
void |
testColumnFamilySettings()
Test that
HFileOutputFormat2 RecordWriter uses compression and
bloom filter settings from the column family descriptor |
void |
TestConfigureCompression() |
void |
testExcludeAllFromMinorCompaction()
This test is to test the scenario happened in HBASE-6901.
|
void |
testExcludeMinorCompaction() |
void |
testJobConfiguration() |
void |
testMRIncrementalLoad() |
void |
testMRIncrementalLoadWithLocality()
Test for HFileOutputFormat2.LOCALITY_SENSITIVE_CONF_KEY = true
This test could only check the correctness of original logic if LOCALITY_SENSITIVE_CONF_KEY
is set to true.
|
void |
testMRIncrementalLoadWithPutSortReducer() |
void |
testMRIncrementalLoadWithSplit() |
void |
testMultiMRIncrementalLoadWithPutSortReducer() |
void |
testSerializeDeserializeFamilyBlockSizeMap()
Test for
HFileOutputFormat2.createFamilyBlockSizeMap(Configuration) . |
void |
testSerializeDeserializeFamilyBloomTypeMap()
Test for
HFileOutputFormat2.createFamilyBloomTypeMap(Configuration) . |
void |
testSerializeDeserializeFamilyCompressionMap()
Test for
HFileOutputFormat2.createFamilyCompressionMap(Configuration) . |
void |
testSerializeDeserializeFamilyDataBlockEncodingMap()
Test for
HFileOutputFormat2.createFamilyDataBlockEncodingMap(Configuration) . |
void |
testWritingPEData()
Run small MR job.
|
private void |
writeRandomKeyValues(org.apache.hadoop.mapreduce.RecordWriter<org.apache.hadoop.hbase.io.ImmutableBytesWritable,org.apache.hadoop.hbase.Cell> writer,
org.apache.hadoop.mapreduce.TaskAttemptContext context,
Set<byte[]> families,
int numRows)
Write random values to the writer assuming a table created using
FAMILIES as column family descriptors |
public static final HBaseClassTestRule CLASS_RULE
private static final int ROWSPERSPLIT
public static final byte[] FAMILY_NAME
private static final byte[][] FAMILIES
private static final org.apache.hadoop.hbase.TableName[] TABLE_NAMES
private HBaseTestingUtility util
private static final org.slf4j.Logger LOG
public TestHFileOutputFormat2()
private void setupRandomGeneratorMapper(org.apache.hadoop.mapreduce.Job job, boolean putSortReducer)
public void test_LATEST_TIMESTAMP_isReplaced() throws Exception
HFileOutputFormat2
RecordWriter amends timestamps if
passed a keyvalue whose timestamp is HConstants.LATEST_TIMESTAMP
.Exception
private org.apache.hadoop.mapreduce.TaskAttemptContext createTestTaskAttemptContext(org.apache.hadoop.mapreduce.Job job) throws Exception
Exception
public void test_TIMERANGE() throws Exception
Exception
public void testWritingPEData() throws Exception
Exception
public void test_WritingTagData() throws Exception
HFileOutputFormat2
RecordWriter writes tags such as ttl into
hfile.Exception
public void testJobConfiguration() throws Exception
Exception
private byte[][] generateRandomStartKeys(int numKeys)
private byte[][] generateRandomSplitKeys(int numKeys)
public void testMRIncrementalLoad() throws Exception
Exception
public void testMRIncrementalLoadWithSplit() throws Exception
Exception
public void testMRIncrementalLoadWithLocality() throws Exception
Exception
public void testMRIncrementalLoadWithPutSortReducer() throws Exception
Exception
private void doIncrementalLoadTest(boolean shouldChangeRegions, boolean shouldKeepLocality, boolean putSortReducer, String tableStr) throws Exception
Exception
public void testMultiMRIncrementalLoadWithPutSortReducer() throws Exception
Exception
private void doIncrementalLoadTest(boolean shouldChangeRegions, boolean shouldKeepLocality, boolean putSortReducer, List<String> tableStr) throws Exception
Exception
private void runIncrementalPELoad(org.apache.hadoop.conf.Configuration conf, List<org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.TableInfo> tableInfo, org.apache.hadoop.fs.Path outDir, boolean putSortReducer) throws IOException, InterruptedException, ClassNotFoundException
public void testSerializeDeserializeFamilyCompressionMap() throws IOException
HFileOutputFormat2.createFamilyCompressionMap(Configuration)
.
Tests that the family compression map is correctly serialized into
and deserialized from configurationIOException
private void setupMockColumnFamiliesForCompression(org.apache.hadoop.hbase.client.Table table, Map<String,org.apache.hadoop.hbase.io.compress.Compression.Algorithm> familyToCompression) throws IOException
IOException
private Map<String,org.apache.hadoop.hbase.io.compress.Compression.Algorithm> getMockColumnFamiliesForCompression(int numCfs)
public void testSerializeDeserializeFamilyBloomTypeMap() throws IOException
HFileOutputFormat2.createFamilyBloomTypeMap(Configuration)
.
Tests that the family bloom type map is correctly serialized into
and deserialized from configurationIOException
private void setupMockColumnFamiliesForBloomType(org.apache.hadoop.hbase.client.Table table, Map<String,org.apache.hadoop.hbase.regionserver.BloomType> familyToDataBlockEncoding) throws IOException
IOException
private Map<String,org.apache.hadoop.hbase.regionserver.BloomType> getMockColumnFamiliesForBloomType(int numCfs)
public void testSerializeDeserializeFamilyBlockSizeMap() throws IOException
HFileOutputFormat2.createFamilyBlockSizeMap(Configuration)
.
Tests that the family block size map is correctly serialized into
and deserialized from configurationIOException
private void setupMockColumnFamiliesForBlockSize(org.apache.hadoop.hbase.client.Table table, Map<String,Integer> familyToDataBlockEncoding) throws IOException
IOException
private Map<String,Integer> getMockColumnFamiliesForBlockSize(int numCfs)
public void testSerializeDeserializeFamilyDataBlockEncodingMap() throws IOException
HFileOutputFormat2.createFamilyDataBlockEncodingMap(Configuration)
.
Tests that the family data block encoding map is correctly serialized into
and deserialized from configurationIOException
private void setupMockColumnFamiliesForDataBlockEncoding(org.apache.hadoop.hbase.client.Table table, Map<String,org.apache.hadoop.hbase.io.encoding.DataBlockEncoding> familyToDataBlockEncoding) throws IOException
IOException
private Map<String,org.apache.hadoop.hbase.io.encoding.DataBlockEncoding> getMockColumnFamiliesForDataBlockEncoding(int numCfs)
private void setupMockStartKeys(org.apache.hadoop.hbase.client.RegionLocator table) throws IOException
IOException
private void setupMockTableName(org.apache.hadoop.hbase.client.RegionLocator table) throws IOException
IOException
public void testColumnFamilySettings() throws Exception
HFileOutputFormat2
RecordWriter uses compression and
bloom filter settings from the column family descriptorException
private void writeRandomKeyValues(org.apache.hadoop.mapreduce.RecordWriter<org.apache.hadoop.hbase.io.ImmutableBytesWritable,org.apache.hadoop.hbase.Cell> writer, org.apache.hadoop.mapreduce.TaskAttemptContext context, Set<byte[]> families, int numRows) throws IOException, InterruptedException
FAMILIES
as column family descriptorsIOException
InterruptedException
public void testExcludeAllFromMinorCompaction() throws Exception
Exception
public void testExcludeMinorCompaction() throws Exception
Exception
private void quickPoll(Callable<Boolean> c, int waitMs) throws Exception
Exception
public void manualTest(String[] args) throws Exception
Exception
public void testBlockStoragePolicy() throws Exception
Exception
private String getStoragePolicyName(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path path)
private String getStoragePolicyNameForOldHDFSVersion(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path path)
public void TestConfigureCompression() throws Exception
Exception
Copyright © 2007–2020 The Apache Software Foundation. All rights reserved.