001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver;
019
020import java.io.IOException;
021import java.util.List;
022import java.util.Optional;
023import org.apache.hadoop.hbase.DoNotRetryIOException;
024import org.apache.hadoop.hbase.HBaseClassTestRule;
025import org.apache.hadoop.hbase.HBaseTestingUtil;
026import org.apache.hadoop.hbase.TableName;
027import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
028import org.apache.hadoop.hbase.client.Durability;
029import org.apache.hadoop.hbase.client.Put;
030import org.apache.hadoop.hbase.client.Result;
031import org.apache.hadoop.hbase.client.ResultScanner;
032import org.apache.hadoop.hbase.client.Scan;
033import org.apache.hadoop.hbase.client.Table;
034import org.apache.hadoop.hbase.client.TableDescriptor;
035import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
036import org.apache.hadoop.hbase.coprocessor.ObserverContext;
037import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
038import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
039import org.apache.hadoop.hbase.coprocessor.RegionObserver;
040import org.apache.hadoop.hbase.io.hfile.CorruptHFileException;
041import org.apache.hadoop.hbase.testclassification.MediumTests;
042import org.apache.hadoop.hbase.util.Bytes;
043import org.junit.AfterClass;
044import org.junit.BeforeClass;
045import org.junit.ClassRule;
046import org.junit.Rule;
047import org.junit.Test;
048import org.junit.experimental.categories.Category;
049import org.junit.rules.TestName;
050
051/**
052 * Tests a scanner on a corrupt hfile.
053 */
054@Category(MediumTests.class)
055public class TestScannerWithCorruptHFile {
056
057  @ClassRule
058  public static final HBaseClassTestRule CLASS_RULE =
059    HBaseClassTestRule.forClass(TestScannerWithCorruptHFile.class);
060
061  @Rule
062  public TestName name = new TestName();
063  private static final byte[] FAMILY_NAME = Bytes.toBytes("f");
064  private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
065
066  @BeforeClass
067  public static void setup() throws Exception {
068    TEST_UTIL.startMiniCluster(1);
069  }
070
071  @AfterClass
072  public static void tearDown() throws Exception {
073    TEST_UTIL.shutdownMiniCluster();
074  }
075
076  public static class CorruptHFileCoprocessor implements RegionCoprocessor, RegionObserver {
077    @Override
078    public Optional<RegionObserver> getRegionObserver() {
079      return Optional.of(this);
080    }
081
082    @Override
083    public boolean preScannerNext(ObserverContext<? extends RegionCoprocessorEnvironment> e,
084      InternalScanner s, List<Result> results, int limit, boolean hasMore) throws IOException {
085      throw new CorruptHFileException("For test");
086    }
087  }
088
089  @Test(expected = DoNotRetryIOException.class)
090  public void testScanOnCorruptHFile() throws IOException {
091    TableName tableName = TableName.valueOf(name.getMethodName());
092    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
093      .setCoprocessor(CorruptHFileCoprocessor.class.getName())
094      .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_NAME)).build();
095    Table table = TEST_UTIL.createTable(tableDescriptor, null);
096    try {
097      loadTable(table, 1);
098      scan(table);
099    } finally {
100      table.close();
101    }
102  }
103
104  private void loadTable(Table table, int numRows) throws IOException {
105    for (int i = 0; i < numRows; ++i) {
106      byte[] row = Bytes.toBytes(i);
107      Put put = new Put(row);
108      put.setDurability(Durability.SKIP_WAL);
109      put.addColumn(FAMILY_NAME, null, row);
110      table.put(put);
111    }
112  }
113
114  private void scan(Table table) throws IOException {
115    Scan scan = new Scan();
116    scan.setCaching(1);
117    scan.setCacheBlocks(false);
118    ResultScanner scanner = table.getScanner(scan);
119    try {
120      scanner.next();
121    } finally {
122      scanner.close();
123    }
124  }
125}