Package org.apache.hadoop.hbase.client
Class FromClientSideBase
java.lang.Object
org.apache.hadoop.hbase.client.FromClientSideBase
- Direct Known Subclasses:
TestFromClientSide
,TestFromClientSide4
,TestFromClientSide5
Base for TestFromClientSide* classes. Has common defines and utility used by all.
-
Field Summary
Modifier and TypeFieldDescription(package private) static byte[]
(package private) static final byte[]
private static final org.slf4j.Logger
(package private) static byte[]
(package private) static byte[]
(package private) static int
(package private) static HBaseTestingUtility
(package private) static byte[]
-
Constructor Summary
-
Method Summary
Modifier and TypeMethodDescriptionprotected static void
private boolean
allRegionsHaveHostnames
(List<org.apache.hadoop.hbase.HRegionLocation> regions) protected void
assertDoubleResult
(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] familyA, byte[] qualifierA, byte[] valueA, byte[] familyB, byte[] qualifierB, byte[] valueB) Validate that result contains two specified keys, exactly.protected void
assertEmptyResult
(org.apache.hadoop.hbase.client.Result result) protected static void
assertIncrementKey
(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, long value) protected void
assertKey
(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, byte[] value) protected void
assertNResult
(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[][] families, byte[][] qualifiers, byte[][] values, int[][] idxs) protected void
assertNResult
(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected void
assertNullResult
(org.apache.hadoop.hbase.client.Result result) protected void
assertNumKeys
(org.apache.hadoop.hbase.client.Result result, int n) protected void
assertRowCount
(org.apache.hadoop.hbase.client.Table t, int expected) protected void
assertSingleResult
(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, byte[] value) protected void
assertSingleResult
(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long value) protected void
assertSingleResult
(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long ts, byte[] value) protected org.apache.hadoop.hbase.client.ResultScanner
buildScanner
(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) protected org.apache.hadoop.hbase.client.Scan
createScanWithRowFilter
(byte[] key) protected org.apache.hadoop.hbase.client.Scan
createScanWithRowFilter
(byte[] key, byte[] startRow, org.apache.hadoop.hbase.CompareOperator op) protected void
deleteColumns
(org.apache.hadoop.hbase.client.Table ht, String value, String keyPrefix) protected static boolean
equals
(byte[] left, byte[] right) protected void
getAllVersionsAndVerify
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected int
getNumberOfRows
(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) protected org.apache.hadoop.hbase.client.Result
getSingleScanResult
(org.apache.hadoop.hbase.client.Table ht, org.apache.hadoop.hbase.client.Scan scan) protected void
getTestNull
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) protected void
getTestNull
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, long value) protected void
getVerifySingleColumn
(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) Verify a single column using gets.protected void
getVerifySingleEmpty
(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) Verify we do not read any values by accident around a single column Same requirements as getVerifySingleColumnprotected void
getVersionAndVerify
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) protected void
getVersionAndVerifyMissing
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) protected void
getVersionRangeAndVerify
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected void
getVersionRangeAndVerifyGreaterThan
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected static final void
initialize
(Class<? extends org.apache.hadoop.hbase.client.ConnectionRegistry> registryImpl, int numHedgedReqs, Class<?>... cps) protected static boolean
isSameParameterizedCluster
(Class<?> registryImpl, int numHedgedReqs) JUnit does not provide an easy way to run a hook after each parameterized run.protected byte[][]
makeN
(byte[] base, int n) protected byte[][]
makeNAscii
(byte[] base, int n) protected byte[][]
makeNBig
(byte[] base, int n) protected long[]
makeStamps
(int n) protected void
protected void
scanAllVersionsAndVerify
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected void
scanTestNull
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) protected void
scanTestNull
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value, boolean isReversedScan) protected void
scanVerifySingleColumn
(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) Verify a single column using scanners.protected void
scanVerifySingleEmpty
(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) protected void
scanVersionAndVerify
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) protected void
scanVersionAndVerifyMissing
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) protected void
scanVersionRangeAndVerify
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected void
scanVersionRangeAndVerifyGreaterThan
(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) protected void
singleRowGetTest
(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) protected void
singleRowScanTest
(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) protected List<org.apache.hadoop.hbase.HRegionLocation>
splitTable
(org.apache.hadoop.hbase.client.Table t) Split table into multiple regions.private List<org.apache.hadoop.hbase.HRegionLocation>
waitOnSplit
(org.apache.hadoop.hbase.client.Table t)
-
Field Details
-
LOG
-
TEST_UTIL
-
ROW
-
FAMILY
-
INVALID_FAMILY
-
QUALIFIER
-
VALUE
-
SLAVES
-
-
Constructor Details
-
FromClientSideBase
-
-
Method Details
-
isSameParameterizedCluster
JUnit does not provide an easy way to run a hook after each parameterized run. Without that there is no easy way to restart the test cluster after each parameterized run. Annotation BeforeParam does not work either because it runs before parameterization and hence does not have access to the test parameters (which is weird). This *hack* checks if the current instance of test cluster configuration has the passed parameterized configs. In such a case, we can just reuse the cluster for test and do not need to initialize from scratch. While this is a hack, it saves a ton of time for the full test and de-flakes it. -
initialize
protected static final void initialize(Class<? extends org.apache.hadoop.hbase.client.ConnectionRegistry> registryImpl, int numHedgedReqs, Class<?>... cps) throws Exception - Throws:
Exception
-
afterClass
- Throws:
Exception
-
deleteColumns
protected void deleteColumns(org.apache.hadoop.hbase.client.Table ht, String value, String keyPrefix) throws IOException - Throws:
IOException
-
getNumberOfRows
protected int getNumberOfRows(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) throws Exception - Throws:
Exception
-
buildScanner
protected org.apache.hadoop.hbase.client.ResultScanner buildScanner(String keyPrefix, String value, org.apache.hadoop.hbase.client.Table ht) throws IOException - Throws:
IOException
-
putRows
protected void putRows(org.apache.hadoop.hbase.client.Table ht, int numRows, String value, String key) throws IOException - Throws:
IOException
-
assertRowCount
protected void assertRowCount(org.apache.hadoop.hbase.client.Table t, int expected) throws IOException - Throws:
IOException
-
createScanWithRowFilter
-
createScanWithRowFilter
protected org.apache.hadoop.hbase.client.Scan createScanWithRowFilter(byte[] key, byte[] startRow, org.apache.hadoop.hbase.CompareOperator op) -
splitTable
protected List<org.apache.hadoop.hbase.HRegionLocation> splitTable(org.apache.hadoop.hbase.client.Table t) throws IOException Split table into multiple regions.- Parameters:
t
- Table to split.- Returns:
- Map of regions to servers.
- Throws:
IOException
-
waitOnSplit
private List<org.apache.hadoop.hbase.HRegionLocation> waitOnSplit(org.apache.hadoop.hbase.client.Table t) throws IOException - Throws:
IOException
-
allRegionsHaveHostnames
-
getSingleScanResult
protected org.apache.hadoop.hbase.client.Result getSingleScanResult(org.apache.hadoop.hbase.client.Table ht, org.apache.hadoop.hbase.client.Scan scan) throws IOException - Throws:
IOException
-
makeNAscii
-
makeN
-
makeNBig
-
makeStamps
-
equals
-
assertKey
protected void assertKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, byte[] value) -
assertIncrementKey
protected static void assertIncrementKey(org.apache.hadoop.hbase.Cell key, byte[] row, byte[] family, byte[] qualifier, long value) -
assertNumKeys
- Throws:
Exception
-
assertNResult
protected void assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[][] families, byte[][] qualifiers, byte[][] values, int[][] idxs) -
assertNResult
protected void assertNResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) -
assertDoubleResult
protected void assertDoubleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] familyA, byte[] qualifierA, byte[] valueA, byte[] familyB, byte[] qualifierB, byte[] valueB) Validate that result contains two specified keys, exactly. It is assumed key A sorts before key B. -
assertSingleResult
protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, byte[] value) -
assertSingleResult
protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long value) -
assertSingleResult
protected void assertSingleResult(org.apache.hadoop.hbase.client.Result result, byte[] row, byte[] family, byte[] qualifier, long ts, byte[] value) -
assertEmptyResult
- Throws:
Exception
-
assertNullResult
- Throws:
Exception
-
getVersionRangeAndVerifyGreaterThan
protected void getVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
getVersionRangeAndVerify
protected void getVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
getAllVersionsAndVerify
protected void getAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
scanVersionRangeAndVerifyGreaterThan
protected void scanVersionRangeAndVerifyGreaterThan(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
scanVersionRangeAndVerify
protected void scanVersionRangeAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
scanAllVersionsAndVerify
protected void scanAllVersionsAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long[] stamps, byte[][] values, int start, int end) throws IOException - Throws:
IOException
-
getVersionAndVerify
protected void getVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) throws Exception - Throws:
Exception
-
getVersionAndVerifyMissing
protected void getVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) throws Exception - Throws:
Exception
-
scanVersionAndVerify
protected void scanVersionAndVerify(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp, byte[] value) throws Exception - Throws:
Exception
-
scanVersionAndVerifyMissing
protected void scanVersionAndVerifyMissing(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] qualifier, long stamp) throws Exception - Throws:
Exception
-
getTestNull
protected void getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) throws Exception - Throws:
Exception
-
getTestNull
protected void getTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, long value) throws Exception - Throws:
Exception
-
scanTestNull
protected void scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value) throws Exception - Throws:
Exception
-
scanTestNull
protected void scanTestNull(org.apache.hadoop.hbase.client.Table ht, byte[] row, byte[] family, byte[] value, boolean isReversedScan) throws Exception - Throws:
Exception
-
singleRowGetTest
protected void singleRowGetTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) throws Exception - Throws:
Exception
-
singleRowScanTest
protected void singleRowScanTest(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, byte[][] FAMILIES, byte[][] QUALIFIERS, byte[][] VALUES) throws Exception - Throws:
Exception
-
getVerifySingleColumn
protected void getVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) throws Exception Verify a single column using gets. Expects family and qualifier arrays to be valid for at least the range: idx-2 < idx < idx+2- Throws:
Exception
-
scanVerifySingleColumn
protected void scanVerifySingleColumn(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX, byte[][] VALUES, int VALUEIDX) throws Exception Verify a single column using scanners. Expects family and qualifier arrays to be valid for at least the range: idx-2 to idx+2 Expects row array to be valid for at least idx to idx+2- Throws:
Exception
-
getVerifySingleEmpty
protected void getVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) throws Exception Verify we do not read any values by accident around a single column Same requirements as getVerifySingleColumn- Throws:
Exception
-
scanVerifySingleEmpty
protected void scanVerifySingleEmpty(org.apache.hadoop.hbase.client.Table ht, byte[][] ROWS, int ROWIDX, byte[][] FAMILIES, int FAMILYIDX, byte[][] QUALIFIERS, int QUALIFIERIDX) throws Exception - Throws:
Exception
-