001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.mapreduce; 019 020import java.io.IOException; 021import java.util.Arrays; 022import java.util.List; 023import org.apache.hadoop.conf.Configuration; 024import org.apache.hadoop.fs.Path; 025import org.apache.hadoop.hbase.CompareOperator; 026import org.apache.hadoop.hbase.HConstants; 027import org.apache.hadoop.hbase.TableName; 028import org.apache.hadoop.hbase.client.Scan; 029import org.apache.hadoop.hbase.filter.Filter; 030import org.apache.hadoop.hbase.filter.IncompatibleFilterException; 031import org.apache.hadoop.hbase.filter.PrefixFilter; 032import org.apache.hadoop.hbase.filter.RegexStringComparator; 033import org.apache.hadoop.hbase.filter.RowFilter; 034import org.apache.hadoop.hbase.security.visibility.Authorizations; 035import org.apache.hadoop.hbase.util.Bytes; 036import org.apache.hadoop.hbase.util.Triple; 037import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 038import org.apache.yetus.audience.InterfaceAudience; 039import org.slf4j.Logger; 040import org.slf4j.LoggerFactory; 041 042/** 043 * Some helper methods are used by {@link org.apache.hadoop.hbase.mapreduce.Export} and 044 * org.apache.hadoop.hbase.coprocessor.Export (in hbase-endpooint). 045 */ 046@InterfaceAudience.Private 047public final class ExportUtils { 048 private static final Logger LOG = LoggerFactory.getLogger(ExportUtils.class); 049 public static final String RAW_SCAN = "hbase.mapreduce.include.deleted.rows"; 050 public static final String EXPORT_BATCHING = "hbase.export.scanner.batch"; 051 public static final String EXPORT_CACHING = "hbase.export.scanner.caching"; 052 public static final String EXPORT_VISIBILITY_LABELS = "hbase.export.visibility.labels"; 053 054 /** 055 * Common usage for other export tools. 056 * @param errorMsg Error message. Can be null. 057 */ 058 public static void usage(final String errorMsg) { 059 if (errorMsg != null && errorMsg.length() > 0) { 060 System.err.println("ERROR: " + errorMsg); 061 } 062 System.err.println("Usage: Export [-D <property=value>]* <tablename> <outputdir> [<versions> " 063 + "[<starttime> [<endtime>]] [^[regex pattern] or [Prefix] to filter]]\n"); 064 System.err.println(" Note: -D properties will be applied to the conf used. "); 065 System.err.println(" For example: "); 066 System.err.println(" -D " + FileOutputFormat.COMPRESS + "=true"); 067 System.err.println( 068 " -D " + FileOutputFormat.COMPRESS_CODEC + "=org.apache.hadoop.io.compress.GzipCodec"); 069 System.err.println(" -D " + FileOutputFormat.COMPRESS_TYPE + "=BLOCK"); 070 System.err.println(" Additionally, the following SCAN properties can be specified"); 071 System.err.println(" to control/limit what is exported.."); 072 System.err 073 .println(" -D " + TableInputFormat.SCAN_COLUMN_FAMILY + "=<family1>,<family2>, ..."); 074 System.err.println(" -D " + RAW_SCAN + "=true"); 075 System.err.println(" -D " + TableInputFormat.SCAN_ROW_START + "=<ROWSTART>"); 076 System.err.println(" -D " + TableInputFormat.SCAN_ROW_STOP + "=<ROWSTOP>"); 077 System.err.println(" -D " + HConstants.HBASE_CLIENT_SCANNER_CACHING + "=100"); 078 System.err.println(" -D " + EXPORT_VISIBILITY_LABELS + "=<labels>"); 079 System.err.println("For tables with very wide rows consider setting the batch size as below:\n" 080 + " -D " + EXPORT_BATCHING + "=10\n" + " -D " + EXPORT_CACHING + "=100"); 081 } 082 083 private static Filter getExportFilter(String[] args) { 084 Filter exportFilter; 085 String filterCriteria = (args.length > 5) ? args[5] : null; 086 if (filterCriteria == null) return null; 087 if (filterCriteria.startsWith("^")) { 088 String regexPattern = filterCriteria.substring(1, filterCriteria.length()); 089 exportFilter = new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(regexPattern)); 090 } else { 091 exportFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria)); 092 } 093 return exportFilter; 094 } 095 096 public static boolean isValidArguements(String[] args) { 097 return args != null && args.length >= 2; 098 } 099 100 public static Triple<TableName, Scan, Path> getArgumentsFromCommandLine(Configuration conf, 101 String[] args) throws IOException { 102 if (!isValidArguements(args)) { 103 return null; 104 } 105 return new Triple<>(TableName.valueOf(args[0]), getScanFromCommandLine(conf, args), 106 new Path(args[1])); 107 } 108 109 static Scan getScanFromCommandLine(Configuration conf, String[] args) throws IOException { 110 Scan s = new Scan(); 111 // Optional arguments. 112 // Set Scan Versions 113 int versions = args.length > 2 ? Integer.parseInt(args[2]) : 1; 114 s.readVersions(versions); 115 // Set Scan Range 116 long startTime = args.length > 3 ? Long.parseLong(args[3]) : 0L; 117 long endTime = args.length > 4 ? Long.parseLong(args[4]) : Long.MAX_VALUE; 118 s.setTimeRange(startTime, endTime); 119 // Set cache blocks 120 s.setCacheBlocks(false); 121 // set Start and Stop row 122 if (conf.get(TableInputFormat.SCAN_ROW_START) != null) { 123 s.withStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START))); 124 } 125 if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) { 126 s.withStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP))); 127 } 128 // Set Scan Column Family 129 boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN)); 130 if (raw) { 131 s.setRaw(raw); 132 } 133 for (String columnFamily : conf.getTrimmedStrings(TableInputFormat.SCAN_COLUMN_FAMILY)) { 134 s.addFamily(Bytes.toBytes(columnFamily)); 135 } 136 // Set RowFilter or Prefix Filter if applicable. 137 Filter exportFilter = getExportFilter(args); 138 if (exportFilter != null) { 139 LOG.info("Setting Scan Filter for Export."); 140 s.setFilter(exportFilter); 141 } 142 List<String> labels = null; 143 if (conf.get(EXPORT_VISIBILITY_LABELS) != null) { 144 labels = Arrays.asList(conf.getStrings(EXPORT_VISIBILITY_LABELS)); 145 if (!labels.isEmpty()) { 146 s.setAuthorizations(new Authorizations(labels)); 147 } 148 } 149 150 int batching = conf.getInt(EXPORT_BATCHING, -1); 151 if (batching != -1) { 152 try { 153 s.setBatch(batching); 154 } catch (IncompatibleFilterException e) { 155 LOG.error("Batching could not be set", e); 156 } 157 } 158 159 int caching = conf.getInt(EXPORT_CACHING, 100); 160 if (caching != -1) { 161 try { 162 s.setCaching(caching); 163 } catch (IncompatibleFilterException e) { 164 LOG.error("Caching could not be set", e); 165 } 166 } 167 LOG.info("versions=" + versions + ", starttime=" + startTime + ", endtime=" + endTime 168 + ", keepDeletedCells=" + raw + ", visibility labels=" + labels); 169 return s; 170 } 171 172 private ExportUtils() { 173 } 174}