001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.mapreduce; 019 020import java.io.IOException; 021import java.util.ArrayList; 022import java.util.List; 023import org.apache.hadoop.conf.Configurable; 024import org.apache.hadoop.conf.Configuration; 025import org.apache.hadoop.hbase.client.Scan; 026import org.apache.yetus.audience.InterfaceAudience; 027 028/** 029 * Convert HBase tabular data from multiple scanners into a format that is consumable by Map/Reduce. 030 * <p> 031 * Usage example 032 * </p> 033 * 034 * <pre> 035 * List<Scan> scans = new ArrayList<Scan>(); 036 * 037 * Scan scan1 = new Scan(); 038 * scan1.setStartRow(firstRow1); 039 * scan1.setStopRow(lastRow1); 040 * scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, table1); 041 * scans.add(scan1); 042 * 043 * Scan scan2 = new Scan(); 044 * scan2.setStartRow(firstRow2); 045 * scan2.setStopRow(lastRow2); 046 * scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, table2); 047 * scans.add(scan2); 048 * 049 * TableMapReduceUtil.initTableMapperJob(scans, TableMapper.class, Text.class, IntWritable.class, 050 * job); 051 * </pre> 052 */ 053@InterfaceAudience.Public 054public class MultiTableInputFormat extends MultiTableInputFormatBase implements Configurable { 055 056 /** Job parameter that specifies the scan list. */ 057 public static final String SCANS = "hbase.mapreduce.scans"; 058 059 /** The configuration. */ 060 private Configuration conf = null; 061 062 /** 063 * Returns the current configuration. 064 * @return The current configuration. 065 * @see org.apache.hadoop.conf.Configurable#getConf() 066 */ 067 @Override 068 public Configuration getConf() { 069 return conf; 070 } 071 072 /** 073 * Sets the configuration. This is used to set the details for the tables to be scanned. 074 * @param configuration The configuration to set. 075 * @see org.apache.hadoop.conf.Configurable#setConf( org.apache.hadoop.conf.Configuration) 076 */ 077 @Override 078 public void setConf(Configuration configuration) { 079 this.conf = configuration; 080 String[] rawScans = conf.getStrings(SCANS); 081 if (rawScans.length <= 0) { 082 throw new IllegalArgumentException( 083 "There must be at least 1 scan configuration set to : " + SCANS); 084 } 085 List<Scan> scans = new ArrayList<>(); 086 087 for (int i = 0; i < rawScans.length; i++) { 088 try { 089 scans.add(TableMapReduceUtil.convertStringToScan(rawScans[i])); 090 } catch (IOException e) { 091 throw new RuntimeException("Failed to convert Scan : " + rawScans[i] + " to string", e); 092 } 093 } 094 this.setScans(scans); 095 } 096}