Class FromClientSideBase

java.lang.Object
org.apache.hadoop.hbase.client.FromClientSideBase
Direct Known Subclasses:
TestFromClientSide, TestFromClientSide4, TestFromClientSide5

Base for TestFromClientSide* classes. Has common defines and utility used by all.
  • Field Summary

    Fields
    Modifier and Type
    Field
    Description
    (package private) static byte[]
     
    (package private) static final byte[]
     
    private static final org.slf4j.Logger
     
    (package private) static byte[]
     
    (package private) static byte[]
     
    (package private) static int
     
    (package private) static HBaseTestingUtil
     
    (package private) static byte[]
     
  • Constructor Summary

    Constructors
    Constructor
    Description
     
  • Method Summary

    Modifier and Type
    Method
    Description
    protected static void
     
    protected void
    assertDoubleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] familyA, byte[] qualifierA, byte[] valueA, byte[] familyB, byte[] qualifierB, byte[] valueB)
    Validate that result contains two specified keys, exactly.
    protected void
    assertEmptyResult(org.apache.hadoop.hbase.client.Result result)
     
    (package private) static void
    assertIncrementKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, long value)
     
    protected void
    assertKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, byte[] value)
     
    protected void
    assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[][] families, byte[][] qualifiers, byte[][] values, int[][] idxs)
     
    protected void
    assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
     
    protected void
    assertNullResult(org.apache.hadoop.hbase.client.Result result)
     
    protected void
    assertNumKeys(org.apache.hadoop.hbase.client.Result result, int n)
     
    protected void
    assertRowCount(org.apache.hadoop.hbase.client.Table t, int expected)
     
    protected void
    assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, byte[] value)
     
    protected void
    assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long value)
     
    protected void
    assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long ts, byte[] value)
     
    protected org.apache.hadoop.hbase.client.ResultScanner
    buildScanner(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht)
     
    protected org.apache.hadoop.hbase.client.Scan
     
    protected org.apache.hadoop.hbase.client.Scan
    createScanWithRowFilter(byte[] key, byte[] startRow, org.apache.hadoop.hbase.CompareOperator op)
     
    protected void
    deleteColumns(org.apache.hadoop.hbase.client.Table ht, String value, String keyPrefix)
     
    protected static boolean
    equals(byte[] left, byte[] right)
     
    protected void
    getAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
     
    protected int
    getNumberOfRows(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht)
     
    protected org.apache.hadoop.hbase.client.Result
    getSingleScanResult(org.apache.hadoop.hbase.client.Table ht, org.apache.hadoop.hbase.client.Scan scan)
     
    protected void
    getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value)
     
    protected void
    getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, long value)
     
    protected void
    getVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX)
    Verify a single column using gets.
    protected void
    getVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX)
    Verify we do not read any values by accident around a single column Same requirements as getVerifySingleColumn
    protected void
    getVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value)
     
    protected void
    getVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp)
     
    protected void
    getVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
     
    protected void
    getVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
     
    protected static final void
    initialize(Class<? extends org.apache.hadoop.hbase.client.ConnectionRegistry> registryImpl, int numHedgedReqs, Class<?>... cps)
     
    protected static boolean
    isSameParameterizedCluster(Class<?> registryImpl, int numHedgedReqs)
    JUnit does not provide an easy way to run a hook after each parameterized run.
    protected byte[][]
    makeN(byte[] base, int n)
     
    (package private) byte[][]
    makeNAscii(byte[] base, int n)
     
    protected byte[][]
    makeNBig(byte[] base, int n)
     
    protected long[]
    makeStamps(int n)
     
    protected void
    putRows(org.apache.hadoop.hbase.client.Table ht, int numRows, String value, String key)
     
    protected void
    scanAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
     
    protected void
    scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value)
     
    protected void
    scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value, boolean isReversedScan)
     
    protected void
    scanVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX)
    Verify a single column using scanners.
    protected void
    scanVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX)
     
    protected void
    scanVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value)
     
    protected void
    scanVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp)
     
    protected void
    scanVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
     
    protected void
    scanVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
     
    protected void
    singleRowGetTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES)
     
    protected void
    singleRowScanTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES)
     
    protected List<org.apache.hadoop.hbase.HRegionLocation>
    splitTable(org.apache.hadoop.hbase.client.Table t)
    Split table into multiple regions.
    private List<org.apache.hadoop.hbase.HRegionLocation>
    waitOnSplit(org.apache.hadoop.hbase.client.Table t)
     

    Methods inherited from class java.lang.Object

    clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
  • Field Details

  • Constructor Details

  • Method Details

    • isSameParameterizedCluster

      protected static boolean isSameParameterizedCluster(Class<?> registryImpl, int numHedgedReqs)
      JUnit does not provide an easy way to run a hook after each parameterized run. Without that there is no easy way to restart the test cluster after each parameterized run. Annotation BeforeParam does not work either because it runs before parameterization and hence does not have access to the test parameters (which is weird). This *hack* checks if the current instance of test cluster configuration has the passed parameterized configs. In such a case, we can just reuse the cluster for test and do not need to initialize from scratch. While this is a hack, it saves a ton of time for the full test and de-flakes it.
    • initialize

      protected static final void initialize(Class<? extends org.apache.hadoop.hbase.client.ConnectionRegistry> registryImpl, int numHedgedReqs, Class<?>... cps) throws Exception
      Throws:
      Exception
    • afterClass

      protected static void afterClass() throws Exception
      Throws:
      Exception
    • deleteColumns

      protected void deleteColumns(org.apache.hadoop.hbase.client.Table ht, String value, String keyPrefix) throws IOException
      Throws:
      IOException
    • getNumberOfRows

      protected int getNumberOfRows(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) throws Exception
      Throws:
      Exception
    • buildScanner

      protected org.apache.hadoop.hbase.client.ResultScanner buildScanner(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) throws IOException
      Throws:
      IOException
    • putRows

      protected void putRows(org.apache.hadoop.hbase.client.Table ht, int numRows, String value, String key) throws IOException
      Throws:
      IOException
    • assertRowCount

      protected void assertRowCount(org.apache.hadoop.hbase.client.Table t, int expected) throws IOException
      Throws:
      IOException
    • createScanWithRowFilter

      protected org.apache.hadoop.hbase.client.Scan createScanWithRowFilter(byte[] key)
    • createScanWithRowFilter

      protected org.apache.hadoop.hbase.client.Scan createScanWithRowFilter(byte[] key, byte[] startRow, org.apache.hadoop.hbase.CompareOperator op)
    • splitTable

      protected List<org.apache.hadoop.hbase.HRegionLocation> splitTable(org.apache.hadoop.hbase.client.Table t) throws IOException
      Split table into multiple regions.
      Parameters:
      t - Table to split.
      Returns:
      Map of regions to servers.
      Throws:
      IOException
    • waitOnSplit

      private List<org.apache.hadoop.hbase.HRegionLocation> waitOnSplit(org.apache.hadoop.hbase.client.Table t) throws IOException
      Throws:
      IOException
    • getSingleScanResult

      protected org.apache.hadoop.hbase.client.Result getSingleScanResult(org.apache.hadoop.hbase.client.Table ht, org.apache.hadoop.hbase.client.Scan scan) throws IOException
      Throws:
      IOException
    • makeNAscii

      byte[][] makeNAscii(byte[] base, int n)
    • makeN

      protected byte[][] makeN(byte[] base, int n)
    • makeNBig

      protected byte[][] makeNBig(byte[] base, int n)
    • makeStamps

      protected long[] makeStamps(int n)
    • equals

      protected static boolean equals(byte[] left, byte[] right)
    • assertKey

      protected void assertKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, byte[] value)
    • assertIncrementKey

      static void assertIncrementKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, long value)
    • assertNumKeys

      protected void assertNumKeys(org.apache.hadoop.hbase.client.Result result, int n) throws Exception
      Throws:
      Exception
    • assertNResult

      protected void assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[][] families, byte[][] qualifiers, byte[][] values, int[][] idxs)
    • assertNResult

      protected void assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end)
    • assertDoubleResult

      protected void assertDoubleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] familyA, byte[] qualifierA, byte[] valueA, byte[] familyB, byte[] qualifierB, byte[] valueB)
      Validate that result contains two specified keys, exactly. It is assumed key A sorts before key B.
    • assertSingleResult

      protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, byte[] value)
    • assertSingleResult

      protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long value)
    • assertSingleResult

      protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long ts, byte[] value)
    • assertEmptyResult

      protected void assertEmptyResult(org.apache.hadoop.hbase.client.Result result) throws Exception
      Throws:
      Exception
    • assertNullResult

      protected void assertNullResult(org.apache.hadoop.hbase.client.Result result) throws Exception
      Throws:
      Exception
    • getVersionRangeAndVerifyGreaterThan

      protected void getVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException
      Throws:
      IOException
    • getVersionRangeAndVerify

      protected void getVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException
      Throws:
      IOException
    • getAllVersionsAndVerify

      protected void getAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException
      Throws:
      IOException
    • scanVersionRangeAndVerifyGreaterThan

      protected void scanVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException
      Throws:
      IOException
    • scanVersionRangeAndVerify

      protected void scanVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException
      Throws:
      IOException
    • scanAllVersionsAndVerify

      protected void scanAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException
      Throws:
      IOException
    • getVersionAndVerify

      protected void getVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) throws Exception
      Throws:
      Exception
    • getVersionAndVerifyMissing

      protected void getVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) throws Exception
      Throws:
      Exception
    • scanVersionAndVerify

      protected void scanVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) throws Exception
      Throws:
      Exception
    • scanVersionAndVerifyMissing

      protected void scanVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) throws Exception
      Throws:
      Exception
    • getTestNull

      protected void getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) throws Exception
      Throws:
      Exception
    • getTestNull

      protected void getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, long value) throws Exception
      Throws:
      Exception
    • scanTestNull

      protected void scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) throws Exception
      Throws:
      Exception
    • scanTestNull

      protected void scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value, boolean isReversedScan) throws Exception
      Throws:
      Exception
    • singleRowGetTest

      protected void singleRowGetTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) throws Exception
      Throws:
      Exception
    • singleRowScanTest

      protected void singleRowScanTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) throws Exception
      Throws:
      Exception
    • getVerifySingleColumn

      protected void getVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) throws Exception
      Verify a single column using gets. Expects family and qualifier arrays to be valid for at least the range: idx-2 < idx < idx+2
      Throws:
      Exception
    • scanVerifySingleColumn

      protected void scanVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) throws Exception
      Verify a single column using scanners. Expects family and qualifier arrays to be valid for at least the range: idx-2 to idx+2 Expects row array to be valid for at least idx to idx+2
      Throws:
      Exception
    • getVerifySingleEmpty

      protected void getVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) throws Exception
      Verify we do not read any values by accident around a single column Same requirements as getVerifySingleColumn
      Throws:
      Exception
    • scanVerifySingleEmpty

      protected void scanVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) throws Exception
      Throws:
      Exception