Package org.apache.hadoop.hbase.client
Class FromClientSideBase
java.lang.Object
org.apache.hadoop.hbase.client.FromClientSideBase
- Direct Known Subclasses:
TestFromClientSide,TestFromClientSide4,TestFromClientSide5,TestTableScanMetrics
Base for TestFromClientSide* classes. Has common defines and utility used by all.
-
Field Summary
FieldsModifier and TypeFieldDescription(package private) static byte[](package private) static final byte[]private static final org.slf4j.Logger(package private) static byte[](package private) static byte[](package private) static int(package private) static HBaseTestingUtil(package private) static byte[] -
Constructor Summary
Constructors -
Method Summary
Modifier and TypeMethodDescriptionprotected static voidprotected voidassertDoubleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] familyA, byte[] qualifierA, byte[] valueA, byte[] familyB, byte[] qualifierB, byte[] valueB) Validate that result contains two specified keys, exactly.protected voidassertEmptyResult(org.apache.hadoop.hbase.client.Result result) (package private) static voidassertIncrementKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, long value) protected voidassertKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, byte[] value) protected voidassertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[][] families, byte[][] qualifiers, byte[][] values, int[][] idxs) protected voidassertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected voidassertNullResult(org.apache.hadoop.hbase.client.Result result) protected voidassertNumKeys(org.apache.hadoop.hbase.client.Result result, int n) protected voidassertRowCount(org.apache.hadoop.hbase.client.Table t, int expected) protected voidassertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, byte[] value) protected voidassertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long value) protected voidassertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long ts, byte[] value) protected org.apache.hadoop.hbase.client.ResultScannerbuildScanner(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) protected org.apache.hadoop.hbase.client.ScancreateScanWithRowFilter(byte[] key) protected org.apache.hadoop.hbase.client.ScancreateScanWithRowFilter(byte[] key, byte[] startRow, org.apache.hadoop.hbase.CompareOperator op) protected voiddeleteColumns(org.apache.hadoop.hbase.client.Table ht, String value, String keyPrefix) protected static booleanequals(byte[] left, byte[] right) protected voidgetAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected intgetNumberOfRows(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) protected org.apache.hadoop.hbase.client.ResultgetSingleScanResult(org.apache.hadoop.hbase.client.Table ht, org.apache.hadoop.hbase.client.Scan scan) protected voidgetTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) protected voidgetTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, long value) protected voidgetVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) Verify a single column using gets.protected voidgetVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) Verify we do not read any values by accident around a single column Same requirements as getVerifySingleColumnprotected voidgetVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) protected voidgetVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) protected voidgetVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected voidgetVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected static final voidinitialize(Class<? extends org.apache.hadoop.hbase.client.ConnectionRegistry> registryImpl, int numHedgedReqs, Class<?>... cps) protected static booleanisSameParameterizedCluster(Class<?> registryImpl, int numHedgedReqs) JUnit does not provide an easy way to run a hook after each parameterized run.protected byte[][]makeN(byte[] base, int n) (package private) byte[][]makeNAscii(byte[] base, int n) protected byte[][]makeNBig(byte[] base, int n) protected long[]makeStamps(int n) protected voidprotected voidscanAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected voidscanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) protected voidscanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value, boolean isReversedScan) protected voidscanVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) Verify a single column using scanners.protected voidscanVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) protected voidscanVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) protected voidscanVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) protected voidscanVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected voidscanVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected voidsingleRowGetTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) protected voidsingleRowScanTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) protected List<org.apache.hadoop.hbase.HRegionLocation>splitTable(org.apache.hadoop.hbase.client.Table t) Split table into multiple regions.private List<org.apache.hadoop.hbase.HRegionLocation>waitOnSplit(org.apache.hadoop.hbase.client.Table t)
-
Field Details
-
LOG
-
TEST_UTIL
-
ROW
-
FAMILY
-
INVALID_FAMILY
-
QUALIFIER
-
VALUE
-
SLAVES
-
-
Constructor Details
-
FromClientSideBase
-
-
Method Details
-
isSameParameterizedCluster
JUnit does not provide an easy way to run a hook after each parameterized run. Without that there is no easy way to restart the test cluster after each parameterized run. Annotation BeforeParam does not work either because it runs before parameterization and hence does not have access to the test parameters (which is weird). This *hack* checks if the current instance of test cluster configuration has the passed parameterized configs. In such a case, we can just reuse the cluster for test and do not need to initialize from scratch. While this is a hack, it saves a ton of time for the full test and de-flakes it. -
initialize
protected static final void initialize(Class<? extends org.apache.hadoop.hbase.client.ConnectionRegistry> registryImpl, int numHedgedReqs, Class<?>... cps) throws Exception - Throws:
Exception
-
afterClass
- Throws:
Exception
-
deleteColumns
protected void deleteColumns(org.apache.hadoop.hbase.client.Table ht, String value, String keyPrefix) throws IOException - Throws:
IOException
-
getNumberOfRows
protected int getNumberOfRows(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) throws Exception - Throws:
Exception
-
buildScanner
protected org.apache.hadoop.hbase.client.ResultScanner buildScanner(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) throws IOException - Throws:
IOException
-
putRows
protected void putRows(org.apache.hadoop.hbase.client.Table ht, int numRows, String value, String key) throws IOException - Throws:
IOException
-
assertRowCount
protected void assertRowCount(org.apache.hadoop.hbase.client.Table t, int expected) throws IOException - Throws:
IOException
-
createScanWithRowFilter
-
createScanWithRowFilter
protected org.apache.hadoop.hbase.client.Scan createScanWithRowFilter(byte[] key, byte[] startRow, org.apache.hadoop.hbase.CompareOperator op) -
splitTable
protected List<org.apache.hadoop.hbase.HRegionLocation> splitTable(org.apache.hadoop.hbase.client.Table t) throws IOException Split table into multiple regions.- Parameters:
t- Table to split.- Returns:
- Map of regions to servers.
- Throws:
IOException
-
waitOnSplit
private List<org.apache.hadoop.hbase.HRegionLocation> waitOnSplit(org.apache.hadoop.hbase.client.Table t) throws IOException - Throws:
IOException
-
getSingleScanResult
protected org.apache.hadoop.hbase.client.Result getSingleScanResult(org.apache.hadoop.hbase.client.Table ht, org.apache.hadoop.hbase.client.Scan scan) throws IOException - Throws:
IOException
-
makeNAscii
-
makeN
-
makeNBig
-
makeStamps
-
equals
-
assertKey
protected void assertKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, byte[] value) -
assertIncrementKey
static void assertIncrementKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, long value) -
assertNumKeys
- Throws:
Exception
-
assertNResult
protected void assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[][] families, byte[][] qualifiers, byte[][] values, int[][] idxs) -
assertNResult
protected void assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) -
assertDoubleResult
protected void assertDoubleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] familyA, byte[] qualifierA, byte[] valueA, byte[] familyB, byte[] qualifierB, byte[] valueB) Validate that result contains two specified keys, exactly. It is assumed key A sorts before key B. -
assertSingleResult
protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, byte[] value) -
assertSingleResult
protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long value) -
assertSingleResult
protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long ts, byte[] value) -
assertEmptyResult
- Throws:
Exception
-
assertNullResult
- Throws:
Exception
-
getVersionRangeAndVerifyGreaterThan
protected void getVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
getVersionRangeAndVerify
protected void getVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
getAllVersionsAndVerify
protected void getAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
scanVersionRangeAndVerifyGreaterThan
protected void scanVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
scanVersionRangeAndVerify
protected void scanVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
scanAllVersionsAndVerify
protected void scanAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
getVersionAndVerify
protected void getVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) throws Exception - Throws:
Exception
-
getVersionAndVerifyMissing
protected void getVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) throws Exception - Throws:
Exception
-
scanVersionAndVerify
protected void scanVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) throws Exception - Throws:
Exception
-
scanVersionAndVerifyMissing
protected void scanVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) throws Exception - Throws:
Exception
-
getTestNull
protected void getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) throws Exception - Throws:
Exception
-
getTestNull
protected void getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, long value) throws Exception - Throws:
Exception
-
scanTestNull
protected void scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) throws Exception - Throws:
Exception
-
scanTestNull
protected void scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value, boolean isReversedScan) throws Exception - Throws:
Exception
-
singleRowGetTest
protected void singleRowGetTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) throws Exception - Throws:
Exception
-
singleRowScanTest
protected void singleRowScanTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) throws Exception - Throws:
Exception
-
getVerifySingleColumn
protected void getVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) throws Exception Verify a single column using gets. Expects family and qualifier arrays to be valid for at least the range: idx-2 < idx < idx+2- Throws:
Exception
-
scanVerifySingleColumn
protected void scanVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) throws Exception Verify a single column using scanners. Expects family and qualifier arrays to be valid for at least the range: idx-2 to idx+2 Expects row array to be valid for at least idx to idx+2- Throws:
Exception
-
getVerifySingleEmpty
protected void getVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) throws Exception Verify we do not read any values by accident around a single column Same requirements as getVerifySingleColumn- Throws:
Exception
-
scanVerifySingleEmpty
protected void scanVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) throws Exception - Throws:
Exception
-