001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertTrue; 022 023import java.io.IOException; 024import org.apache.hadoop.conf.Configuration; 025import org.apache.hadoop.fs.FileSystem; 026import org.apache.hadoop.fs.Path; 027import org.apache.hadoop.hbase.Cell; 028import org.apache.hadoop.hbase.CellUtil; 029import org.apache.hadoop.hbase.HBaseClassTestRule; 030import org.apache.hadoop.hbase.HBaseTestingUtil; 031import org.apache.hadoop.hbase.HConstants; 032import org.apache.hadoop.hbase.TableName; 033import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 034import org.apache.hadoop.hbase.client.Durability; 035import org.apache.hadoop.hbase.client.Increment; 036import org.apache.hadoop.hbase.client.RegionInfo; 037import org.apache.hadoop.hbase.client.RegionInfoBuilder; 038import org.apache.hadoop.hbase.client.Result; 039import org.apache.hadoop.hbase.client.TableDescriptor; 040import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 041import org.apache.hadoop.hbase.testclassification.RegionServerTests; 042import org.apache.hadoop.hbase.testclassification.SmallTests; 043import org.apache.hadoop.hbase.util.Bytes; 044import org.junit.ClassRule; 045import org.junit.Rule; 046import org.junit.Test; 047import org.junit.experimental.categories.Category; 048import org.junit.rules.TestName; 049 050@Category({ RegionServerTests.class, SmallTests.class }) 051public class TestResettingCounters { 052 053 @ClassRule 054 public static final HBaseClassTestRule CLASS_RULE = 055 HBaseClassTestRule.forClass(TestResettingCounters.class); 056 057 @Rule 058 public TestName name = new TestName(); 059 060 @Test 061 public void testResettingCounters() throws Exception { 062 HBaseTestingUtil htu = new HBaseTestingUtil(); 063 Configuration conf = htu.getConfiguration(); 064 FileSystem fs = FileSystem.get(conf); 065 byte[] table = Bytes.toBytes(name.getMethodName()); 066 byte[][] families = 067 new byte[][] { Bytes.toBytes("family1"), Bytes.toBytes("family2"), Bytes.toBytes("family3") }; 068 int numQualifiers = 10; 069 byte[][] qualifiers = new byte[numQualifiers][]; 070 for (int i = 0; i < numQualifiers; i++) 071 qualifiers[i] = Bytes.toBytes("qf" + i); 072 int numRows = 10; 073 byte[][] rows = new byte[numRows][]; 074 for (int i = 0; i < numRows; i++) 075 rows[i] = Bytes.toBytes("r" + i); 076 077 TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(table)); 078 for (byte[] family : families) { 079 builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); 080 } 081 TableDescriptor tableDescriptor = builder.build(); 082 RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(); 083 String testDir = htu.getDataTestDir() + "/TestResettingCounters/"; 084 Path path = new Path(testDir); 085 if (fs.exists(path)) { 086 if (!fs.delete(path, true)) { 087 throw new IOException("Failed delete of " + path); 088 } 089 } 090 HRegion region = HBaseTestingUtil.createRegionAndWAL(hri, path, conf, tableDescriptor); 091 try { 092 Increment odd = new Increment(rows[0]); 093 odd.setDurability(Durability.SKIP_WAL); 094 Increment even = new Increment(rows[0]); 095 even.setDurability(Durability.SKIP_WAL); 096 Increment all = new Increment(rows[0]); 097 all.setDurability(Durability.SKIP_WAL); 098 for (int i = 0; i < numQualifiers; i++) { 099 if (i % 2 == 0) even.addColumn(families[0], qualifiers[i], 1); 100 else odd.addColumn(families[0], qualifiers[i], 1); 101 all.addColumn(families[0], qualifiers[i], 1); 102 } 103 104 // increment odd qualifiers 5 times and flush 105 for (int i = 0; i < 5; i++) 106 region.increment(odd, HConstants.NO_NONCE, HConstants.NO_NONCE); 107 region.flush(true); 108 109 // increment even qualifiers 5 times 110 for (int i = 0; i < 5; i++) 111 region.increment(even, HConstants.NO_NONCE, HConstants.NO_NONCE); 112 113 // increment all qualifiers, should have value=6 for all 114 Result result = region.increment(all, HConstants.NO_NONCE, HConstants.NO_NONCE); 115 assertEquals(numQualifiers, result.size()); 116 Cell[] kvs = result.rawCells(); 117 for (int i = 0; i < kvs.length; i++) { 118 System.out.println(kvs[i].toString()); 119 assertTrue(CellUtil.matchingQualifier(kvs[i], qualifiers[i])); 120 assertEquals(6, Bytes.toLong(CellUtil.cloneValue(kvs[i]))); 121 } 122 } finally { 123 HBaseTestingUtil.closeRegionAndWAL(region); 124 } 125 HBaseTestingUtil.closeRegionAndWAL(region); 126 } 127 128}