001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertTrue; 022 023import java.io.IOException; 024import java.util.ArrayList; 025import java.util.Collections; 026import java.util.List; 027import org.apache.hadoop.conf.Configuration; 028import org.apache.hadoop.fs.FileSystem; 029import org.apache.hadoop.fs.Path; 030import org.apache.hadoop.hbase.Cell; 031import org.apache.hadoop.hbase.CellComparatorImpl; 032import org.apache.hadoop.hbase.HBaseClassTestRule; 033import org.apache.hadoop.hbase.HBaseTestingUtility; 034import org.apache.hadoop.hbase.HConstants; 035import org.apache.hadoop.hbase.MemoryCompactionPolicy; 036import org.apache.hadoop.hbase.PrivateCellUtil; 037import org.apache.hadoop.hbase.TableName; 038import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; 039import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 040import org.apache.hadoop.hbase.client.RegionInfo; 041import org.apache.hadoop.hbase.client.RegionInfoBuilder; 042import org.apache.hadoop.hbase.client.Scan; 043import org.apache.hadoop.hbase.client.TableDescriptor; 044import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 045import org.apache.hadoop.hbase.io.hfile.BlockCache; 046import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory; 047import org.apache.hadoop.hbase.testclassification.MediumTests; 048import org.apache.hadoop.hbase.util.Bytes; 049import org.apache.hadoop.hbase.util.CommonFSUtils; 050import org.apache.hadoop.hbase.util.FSUtils; 051import org.apache.hadoop.hbase.wal.WAL; 052import org.apache.hadoop.hbase.wal.WALEdit; 053import org.apache.hadoop.hbase.wal.WALFactory; 054import org.apache.hadoop.hbase.wal.WALKey; 055import org.apache.hadoop.hbase.wal.WALSplitUtil; 056import org.apache.hadoop.hbase.wal.WALStreamReader; 057import org.junit.BeforeClass; 058import org.junit.ClassRule; 059import org.junit.Rule; 060import org.junit.Test; 061import org.junit.experimental.categories.Category; 062import org.junit.rules.TestName; 063import org.slf4j.Logger; 064import org.slf4j.LoggerFactory; 065 066/** 067 * Tests around replay of recovered.edits content. 068 */ 069@Category({ MediumTests.class }) 070public class TestRecoveredEdits { 071 072 @ClassRule 073 public static final HBaseClassTestRule CLASS_RULE = 074 HBaseClassTestRule.forClass(TestRecoveredEdits.class); 075 076 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); 077 private static final Logger LOG = LoggerFactory.getLogger(TestRecoveredEdits.class); 078 079 private static BlockCache blockCache; 080 081 @Rule 082 public TestName testName = new TestName(); 083 084 /** 085 * Path to a recovered.edits file in hbase-server test resources folder. This is a little fragile 086 * getting this path to a file of 10M of edits. 087 */ 088 @SuppressWarnings("checkstyle:VisibilityModifier") 089 public static final Path RECOVEREDEDITS_PATH = new Path( 090 System.getProperty("test.build.classes", "target/test-classes"), "0000000000000016310"); 091 092 /** 093 * Name of table referenced by edits in the recovered.edits file. 094 */ 095 public static final String RECOVEREDEDITS_TABLENAME = "IntegrationTestBigLinkedList"; 096 097 /** 098 * Column family referenced by edits in the recovered.edits file. 099 */ 100 public static final byte[] RECOVEREDEDITS_COLUMNFAMILY = Bytes.toBytes("meta"); 101 public static final byte[][] RECOVEREDITS_COLUMNFAMILY_ARRAY = 102 new byte[][] { RECOVEREDEDITS_COLUMNFAMILY }; 103 public static final ColumnFamilyDescriptor RECOVEREDEDITS_CFD = 104 ColumnFamilyDescriptorBuilder.newBuilder(RECOVEREDEDITS_COLUMNFAMILY).build(); 105 106 /** 107 * Name of table mentioned edits from recovered.edits 108 */ 109 @BeforeClass 110 public static void setUpBeforeClass() throws Exception { 111 blockCache = BlockCacheFactory.createBlockCache(TEST_UTIL.getConfiguration()); 112 } 113 114 /** 115 * HBASE-12782 ITBLL fails for me if generator does anything but 5M per maptask. Create a region. 116 * Close it. Then copy into place a file to replay, one that is bigger than configured flush size 117 * so we bring on lots of flushes. Then reopen and confirm all edits made it in. 118 */ 119 @Test 120 public void testReplayWorksThoughLotsOfFlushing() throws IOException { 121 for (MemoryCompactionPolicy policy : MemoryCompactionPolicy.values()) { 122 testReplayWorksWithMemoryCompactionPolicy(policy); 123 } 124 } 125 126 private void testReplayWorksWithMemoryCompactionPolicy(MemoryCompactionPolicy policy) 127 throws IOException { 128 Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); 129 // Set it so we flush every 1M or so. Thats a lot. 130 conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024); 131 conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, String.valueOf(policy).toLowerCase()); 132 TableDescriptor tableDescriptor = 133 TableDescriptorBuilder.newBuilder(TableName.valueOf(testName.getMethodName())) 134 .setColumnFamily(RECOVEREDEDITS_CFD).build(); 135 RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(); 136 final String encodedRegionName = hri.getEncodedName(); 137 Path hbaseRootDir = TEST_UTIL.getDataTestDir(); 138 FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration()); 139 Path tableDir = CommonFSUtils.getTableDir(hbaseRootDir, tableDescriptor.getTableName()); 140 HRegionFileSystem hrfs = new HRegionFileSystem(TEST_UTIL.getConfiguration(), fs, tableDir, hri); 141 if (fs.exists(hrfs.getRegionDir())) { 142 LOG.info("Region directory already exists. Deleting."); 143 fs.delete(hrfs.getRegionDir(), true); 144 } 145 HRegion region = 146 HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, conf, tableDescriptor, blockCache); 147 assertEquals(encodedRegionName, region.getRegionInfo().getEncodedName()); 148 List<String> storeFiles = region.getStoreFileList(RECOVEREDITS_COLUMNFAMILY_ARRAY); 149 // There should be no store files. 150 assertTrue(storeFiles.isEmpty()); 151 region.close(); 152 Path regionDir = FSUtils.getRegionDirFromRootDir(hbaseRootDir, hri); 153 Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regionDir); 154 // Copy this file under the region's recovered.edits dir so it is replayed on reopen. 155 Path destination = new Path(recoveredEditsDir, RECOVEREDEDITS_PATH.getName()); 156 fs.copyToLocalFile(RECOVEREDEDITS_PATH, destination); 157 assertTrue(fs.exists(destination)); 158 // Now the file 0000000000000016310 is under recovered.edits, reopen the region to replay. 159 region = HRegion.openHRegion(region, null); 160 assertEquals(encodedRegionName, region.getRegionInfo().getEncodedName()); 161 storeFiles = region.getStoreFileList(RECOVEREDITS_COLUMNFAMILY_ARRAY); 162 // Our 0000000000000016310 is 10MB. Most of the edits are for one region. Lets assume that if 163 // we flush at 1MB, that there are at least 3 flushed files that are there because of the 164 // replay of edits. 165 if (policy == MemoryCompactionPolicy.EAGER || policy == MemoryCompactionPolicy.ADAPTIVE) { 166 assertTrue("Files count=" + storeFiles.size(), storeFiles.size() >= 1); 167 } else { 168 assertTrue("Files count=" + storeFiles.size(), storeFiles.size() > 10); 169 } 170 // Now verify all edits made it into the region. 171 int count = verifyAllEditsMadeItIn(fs, conf, RECOVEREDEDITS_PATH, region); 172 assertTrue(count > 0); 173 LOG.info("Checked " + count + " edits made it in"); 174 } 175 176 /** Returns Return how many edits seen. */ 177 // Used by TestWALPlayer over in hbase-mapreduce too. 178 public static int verifyAllEditsMadeItIn(final FileSystem fs, final Configuration conf, 179 final Path edits, final HRegion region) throws IOException { 180 int count = 0; 181 // Read all cells from recover edits 182 List<Cell> walCells = new ArrayList<>(); 183 try (WALStreamReader reader = WALFactory.createStreamReader(fs, edits, conf)) { 184 WAL.Entry entry; 185 while ((entry = reader.next()) != null) { 186 WALKey key = entry.getKey(); 187 WALEdit val = entry.getEdit(); 188 count++; 189 // Check this edit is for this region. 190 if ( 191 !Bytes.equals(key.getEncodedRegionName(), region.getRegionInfo().getEncodedNameAsBytes()) 192 ) { 193 continue; 194 } 195 Cell previous = null; 196 for (Cell cell : val.getCells()) { 197 if (WALEdit.isMetaEditFamily(cell)) { 198 continue; 199 } 200 if (previous != null && CellComparatorImpl.COMPARATOR.compareRows(previous, cell) == 0) { 201 continue; 202 } 203 previous = cell; 204 walCells.add(cell); 205 } 206 } 207 } 208 209 // Read all cells from region 210 List<Cell> regionCells = new ArrayList<>(); 211 try (RegionScanner scanner = region.getScanner(new Scan())) { 212 List<Cell> tmpCells; 213 do { 214 tmpCells = new ArrayList<>(); 215 scanner.nextRaw(tmpCells); 216 regionCells.addAll(tmpCells); 217 } while (!tmpCells.isEmpty()); 218 } 219 220 Collections.sort(walCells, CellComparatorImpl.COMPARATOR); 221 int found = 0; 222 for (int i = 0, j = 0; i < walCells.size() && j < regionCells.size();) { 223 int compareResult = PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, 224 walCells.get(i), regionCells.get(j)); 225 if (compareResult == 0) { 226 i++; 227 j++; 228 found++; 229 } else if (compareResult > 0) { 230 j++; 231 } else { 232 i++; 233 } 234 } 235 assertEquals("Only found " + found + " cells in region, but there are " + walCells.size() 236 + " cells in recover edits", found, walCells.size()); 237 return count; 238 } 239}