001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.master.balancer; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertNotNull; 022import static org.junit.Assert.assertNotSame; 023import static org.junit.Assert.assertNull; 024import static org.junit.Assert.assertSame; 025import static org.junit.Assert.assertTrue; 026import static org.mockito.Mockito.mock; 027import static org.mockito.Mockito.when; 028 029import java.io.IOException; 030import java.util.ArrayList; 031import java.util.HashMap; 032import java.util.List; 033import java.util.Map; 034import java.util.Random; 035import java.util.TreeMap; 036import org.apache.hadoop.conf.Configuration; 037import org.apache.hadoop.hbase.ClusterMetrics; 038import org.apache.hadoop.hbase.HBaseClassTestRule; 039import org.apache.hadoop.hbase.HConstants; 040import org.apache.hadoop.hbase.HDFSBlocksDistribution; 041import org.apache.hadoop.hbase.HDFSBlocksDistribution.HostAndWeight; 042import org.apache.hadoop.hbase.RegionMetrics; 043import org.apache.hadoop.hbase.ServerMetrics; 044import org.apache.hadoop.hbase.ServerName; 045import org.apache.hadoop.hbase.TableName; 046import org.apache.hadoop.hbase.client.RegionInfo; 047import org.apache.hadoop.hbase.client.RegionInfoBuilder; 048import org.apache.hadoop.hbase.client.TableDescriptor; 049import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 050import org.apache.hadoop.hbase.testclassification.MasterTests; 051import org.apache.hadoop.hbase.testclassification.SmallTests; 052import org.apache.hadoop.hbase.util.Bytes; 053import org.junit.Before; 054import org.junit.BeforeClass; 055import org.junit.ClassRule; 056import org.junit.Test; 057import org.junit.experimental.categories.Category; 058 059@Category({ MasterTests.class, SmallTests.class }) 060public class TestRegionHDFSBlockLocationFinder { 061 062 @ClassRule 063 public static final HBaseClassTestRule CLASS_RULE = 064 HBaseClassTestRule.forClass(TestRegionHDFSBlockLocationFinder.class); 065 066 private static final Random RNG = new Random(); // This test depends on Random#setSeed 067 private static TableDescriptor TD; 068 private static List<RegionInfo> REGIONS; 069 070 private RegionHDFSBlockLocationFinder finder; 071 072 private static HDFSBlocksDistribution generate(RegionInfo region) { 073 HDFSBlocksDistribution distribution = new HDFSBlocksDistribution(); 074 int seed = region.hashCode(); 075 RNG.setSeed(seed); 076 int size = 1 + RNG.nextInt(10); 077 for (int i = 0; i < size; i++) { 078 distribution.addHostsAndBlockWeight(new String[] { "host-" + i }, 1 + RNG.nextInt(100)); 079 } 080 return distribution; 081 } 082 083 @BeforeClass 084 public static void setUpBeforeClass() { 085 TD = TableDescriptorBuilder.newBuilder(TableName.valueOf("RegionLocationFinder")).build(); 086 int numRegions = 100; 087 REGIONS = new ArrayList<>(numRegions); 088 for (int i = 1; i <= numRegions; i++) { 089 byte[] startKey = i == 0 ? HConstants.EMPTY_START_ROW : Bytes.toBytes(i); 090 byte[] endKey = i == numRegions ? HConstants.EMPTY_BYTE_ARRAY : Bytes.toBytes(i + 1); 091 RegionInfo region = RegionInfoBuilder.newBuilder(TD.getTableName()).setStartKey(startKey) 092 .setEndKey(endKey).build(); 093 REGIONS.add(region); 094 } 095 } 096 097 @Before 098 public void setUp() { 099 finder = new RegionHDFSBlockLocationFinder(); 100 finder.setClusterInfoProvider(new DummyClusterInfoProvider(null) { 101 102 @Override 103 public TableDescriptor getTableDescriptor(TableName tableName) throws IOException { 104 return TD; 105 } 106 107 @Override 108 public List<RegionInfo> getAssignedRegions() { 109 return REGIONS; 110 } 111 112 @Override 113 public HDFSBlocksDistribution computeHDFSBlocksDistribution(Configuration conf, 114 TableDescriptor tableDescriptor, RegionInfo regionInfo) throws IOException { 115 return generate(regionInfo); 116 } 117 }); 118 } 119 120 @Test 121 public void testMapHostNameToServerName() throws Exception { 122 assertTrue(finder.mapHostNameToServerName(null).isEmpty()); 123 124 List<String> hosts = new ArrayList<>(); 125 for (int i = 0; i < 10; i += 2) { 126 hosts.add("host-" + i); 127 } 128 assertTrue(finder.mapHostNameToServerName(hosts).isEmpty()); 129 130 Map<ServerName, ServerMetrics> serverMetrics = new HashMap<>(); 131 for (int i = 0; i < 10; i += 2) { 132 ServerName sn = ServerName.valueOf("host-" + i, 12345, 12345); 133 serverMetrics.put(sn, null); 134 } 135 ClusterMetrics metrics = mock(ClusterMetrics.class); 136 when(metrics.getLiveServerMetrics()).thenReturn(serverMetrics); 137 138 finder.setClusterMetrics(metrics); 139 List<ServerName> sns = finder.mapHostNameToServerName(hosts); 140 assertEquals(5, sns.size()); 141 for (int i = 0; i < 5; i++) { 142 ServerName sn = sns.get(i); 143 assertEquals("host-" + (2 * i), sn.getHostname()); 144 assertEquals(12345, sn.getPort()); 145 assertEquals(12345, sn.getStartcode()); 146 } 147 } 148 149 @Test 150 public void testRefreshAndWait() throws Exception { 151 finder.getCache().invalidateAll(); 152 for (RegionInfo region : REGIONS) { 153 assertNull(finder.getCache().getIfPresent(region)); 154 } 155 finder.refreshAndWait(REGIONS); 156 for (RegionInfo region : REGIONS) { 157 assertNotNull(finder.getCache().getIfPresent(region)); 158 } 159 } 160 161 private void assertHostAndWeightEquals(HDFSBlocksDistribution expected, 162 HDFSBlocksDistribution actual) { 163 Map<String, HostAndWeight> expectedMap = expected.getHostAndWeights(); 164 Map<String, HostAndWeight> actualMap = actual.getHostAndWeights(); 165 assertEquals(expectedMap.size(), actualMap.size()); 166 expectedMap.forEach((k, expectedHostAndWeight) -> { 167 HostAndWeight actualHostAndWeight = actualMap.get(k); 168 assertEquals(expectedHostAndWeight.getHost(), actualHostAndWeight.getHost()); 169 assertEquals(expectedHostAndWeight.getWeight(), actualHostAndWeight.getWeight()); 170 assertEquals(expectedHostAndWeight.getWeightForSsd(), actualHostAndWeight.getWeightForSsd()); 171 }); 172 } 173 174 @Test 175 public void testGetBlockDistribution() { 176 Map<RegionInfo, HDFSBlocksDistribution> cache = new HashMap<>(); 177 for (RegionInfo region : REGIONS) { 178 HDFSBlocksDistribution hbd = finder.getBlockDistribution(region); 179 assertHostAndWeightEquals(generate(region), hbd); 180 cache.put(region, hbd); 181 } 182 // the instance should be cached 183 for (RegionInfo region : REGIONS) { 184 HDFSBlocksDistribution hbd = finder.getBlockDistribution(region); 185 assertSame(cache.get(region), hbd); 186 } 187 } 188 189 @Test 190 public void testGetTopBlockLocations() { 191 Map<ServerName, ServerMetrics> serverMetrics = new HashMap<>(); 192 for (int i = 0; i < 10; i++) { 193 ServerName sn = ServerName.valueOf("host-" + i, 12345, 12345); 194 serverMetrics.put(sn, null); 195 } 196 ClusterMetrics metrics = mock(ClusterMetrics.class); 197 when(metrics.getLiveServerMetrics()).thenReturn(serverMetrics); 198 finder.setClusterMetrics(metrics); 199 for (RegionInfo region : REGIONS) { 200 List<ServerName> servers = finder.getTopBlockLocations(region); 201 long previousWeight = Long.MAX_VALUE; 202 HDFSBlocksDistribution hbd = generate(region); 203 for (ServerName server : servers) { 204 long weight = hbd.getWeight(server.getHostname()); 205 assertTrue(weight <= previousWeight); 206 previousWeight = weight; 207 } 208 } 209 } 210 211 @Test 212 public void testRefreshRegionsWithChangedLocality() throws InterruptedException { 213 ServerName testServer = ServerName.valueOf("host-0", 12345, 12345); 214 RegionInfo testRegion = REGIONS.get(0); 215 216 Map<RegionInfo, HDFSBlocksDistribution> cache = new HashMap<>(); 217 for (RegionInfo region : REGIONS) { 218 HDFSBlocksDistribution hbd = finder.getBlockDistribution(region); 219 assertHostAndWeightEquals(generate(region), hbd); 220 cache.put(region, hbd); 221 } 222 223 finder 224 .setClusterMetrics(getMetricsWithLocality(testServer, testRegion.getRegionName(), 0.123f)); 225 226 // everything should be cached, because metrics were null before 227 for (RegionInfo region : REGIONS) { 228 HDFSBlocksDistribution hbd = finder.getBlockDistribution(region); 229 assertSame(cache.get(region), hbd); 230 } 231 232 finder 233 .setClusterMetrics(getMetricsWithLocality(testServer, testRegion.getRegionName(), 0.345f)); 234 235 // cache refresh happens in a background thread, so we need to wait for the value to 236 // update before running assertions. 237 long now = System.currentTimeMillis(); 238 HDFSBlocksDistribution cached = cache.get(testRegion); 239 HDFSBlocksDistribution newValue; 240 do { 241 Thread.sleep(1_000); 242 newValue = finder.getBlockDistribution(testRegion); 243 } while (cached == newValue && System.currentTimeMillis() - now < 30_000); 244 245 // locality changed just for our test region, so it should no longer be the same 246 for (RegionInfo region : REGIONS) { 247 HDFSBlocksDistribution hbd = finder.getBlockDistribution(region); 248 if (region.equals(testRegion)) { 249 assertNotSame(cache.get(region), hbd); 250 } else { 251 assertSame(cache.get(region), hbd); 252 } 253 } 254 } 255 256 private ClusterMetrics getMetricsWithLocality(ServerName serverName, byte[] region, 257 float locality) { 258 RegionMetrics regionMetrics = mock(RegionMetrics.class); 259 when(regionMetrics.getDataLocality()).thenReturn(locality); 260 261 Map<byte[], RegionMetrics> regionMetricsMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); 262 regionMetricsMap.put(region, regionMetrics); 263 264 ServerMetrics serverMetrics = mock(ServerMetrics.class); 265 when(serverMetrics.getRegionMetrics()).thenReturn(regionMetricsMap); 266 267 Map<ServerName, ServerMetrics> serverMetricsMap = new HashMap<>(); 268 serverMetricsMap.put(serverName, serverMetrics); 269 270 ClusterMetrics metrics = mock(ClusterMetrics.class); 271 when(metrics.getLiveServerMetrics()).thenReturn(serverMetricsMap); 272 273 return metrics; 274 } 275}