001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.mapreduce; 019 020import static org.junit.Assert.assertEquals; 021 022import java.io.IOException; 023import java.util.ArrayList; 024import java.util.List; 025import org.apache.hadoop.conf.Configuration; 026import org.apache.hadoop.hbase.HBaseClassTestRule; 027import org.apache.hadoop.hbase.HBaseTestingUtil; 028import org.apache.hadoop.hbase.HConstants; 029import org.apache.hadoop.hbase.HTestConst; 030import org.apache.hadoop.hbase.KeyValue; 031import org.apache.hadoop.hbase.TableName; 032import org.apache.hadoop.hbase.client.Connection; 033import org.apache.hadoop.hbase.client.ConnectionFactory; 034import org.apache.hadoop.hbase.client.Put; 035import org.apache.hadoop.hbase.client.Scan; 036import org.apache.hadoop.hbase.client.Table; 037import org.apache.hadoop.hbase.regionserver.StoreScanner; 038import org.apache.hadoop.hbase.testclassification.MediumTests; 039import org.apache.hadoop.hbase.util.Bytes; 040import org.junit.AfterClass; 041import org.junit.BeforeClass; 042import org.junit.ClassRule; 043import org.junit.Test; 044import org.junit.experimental.categories.Category; 045 046@Category(MediumTests.class) 047public class TestTableRecordReader { 048 private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); 049 050 @ClassRule 051 public static final HBaseClassTestRule CLASS_RULE = 052 HBaseClassTestRule.forClass(TestTableRecordReader.class); 053 054 private static TableName TABLE_NAME = TableName.valueOf("TestTableRecordReader"); 055 056 private static int NUM_ROWS = 5; 057 private static byte[] ROW = Bytes.toBytes("testRow"); 058 private static byte[][] ROWS = HTestConst.makeNAscii(ROW, NUM_ROWS); 059 060 private static int NUM_FAMILIES = 2; 061 private static byte[] FAMILY = Bytes.toBytes("testFamily"); 062 private static byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, NUM_FAMILIES); 063 064 private static int NUM_QUALIFIERS = 2; 065 private static byte[] QUALIFIER = Bytes.toBytes("testQualifier"); 066 private static byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, NUM_QUALIFIERS); 067 068 private static int VALUE_SIZE = 10; 069 private static byte[] VALUE = Bytes.createMaxByteArray(VALUE_SIZE); 070 071 private static final int TIMEOUT = 4000; 072 073 @BeforeClass 074 public static void setUpBeforeClass() throws Exception { 075 Configuration conf = TEST_UTIL.getConfiguration(); 076 077 conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, TIMEOUT); 078 conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, TIMEOUT); 079 080 // Check the timeout condition after every cell 081 conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 1); 082 TEST_UTIL.startMiniCluster(1); 083 084 createTestTable(TABLE_NAME, ROWS, FAMILIES, QUALIFIERS, VALUE); 085 } 086 087 private static void createTestTable(TableName name, byte[][] rows, byte[][] families, 088 byte[][] qualifiers, byte[] cellValue) throws IOException { 089 TEST_UTIL.createTable(name, families).put(createPuts(rows, families, qualifiers, cellValue)); 090 } 091 092 private static List<Put> createPuts(byte[][] rows, byte[][] families, byte[][] qualifiers, 093 byte[] value) throws IOException { 094 List<Put> puts = new ArrayList<>(); 095 for (int row = 0; row < rows.length; row++) { 096 Put put = new Put(rows[row]); 097 for (int fam = 0; fam < families.length; fam++) { 098 for (int qual = 0; qual < qualifiers.length; qual++) { 099 KeyValue kv = new KeyValue(rows[row], families[fam], qualifiers[qual], qual, value); 100 put.add(kv); 101 } 102 } 103 puts.add(put); 104 } 105 return puts; 106 } 107 108 @AfterClass 109 public static void tearDownAfterClass() throws Exception { 110 TEST_UTIL.shutdownMiniCluster(); 111 } 112 113 @Test 114 public void test() throws Exception { 115 try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration()); 116 Table table = conn.getTable(TABLE_NAME)) { 117 org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl trr = 118 new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl(); 119 Scan scan = 120 new Scan().setMaxResultSize(1).setCaching(Integer.MAX_VALUE).setNeedCursorResult(true); 121 trr.setScan(scan); 122 trr.setHTable(table); 123 trr.initialize(null, null); 124 int num = 0; 125 while (trr.nextKeyValue()) { 126 num++; 127 } 128 assertEquals(NUM_ROWS * NUM_FAMILIES * NUM_QUALIFIERS, num); 129 } 130 } 131}