View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.mapreduce;
19  
20  import org.apache.commons.logging.Log;
21  import org.apache.commons.logging.LogFactory;
22  import org.apache.hadoop.conf.Configuration;
23  import org.apache.hadoop.conf.Configured;
24  import org.apache.hadoop.fs.Path;
25  import org.apache.hadoop.hbase.Cell;
26  import org.apache.hadoop.hbase.CellUtil;
27  import org.apache.hadoop.hbase.HBaseConfiguration;
28  import org.apache.hadoop.hbase.KeyValue;
29  import org.apache.hadoop.hbase.KeyValueUtil;
30  import org.apache.hadoop.hbase.TableName;
31  import org.apache.hadoop.hbase.classification.InterfaceAudience;
32  import org.apache.hadoop.hbase.classification.InterfaceStability;
33  import org.apache.hadoop.hbase.client.Connection;
34  import org.apache.hadoop.hbase.client.ConnectionFactory;
35  import org.apache.hadoop.hbase.client.Delete;
36  import org.apache.hadoop.hbase.client.Mutation;
37  import org.apache.hadoop.hbase.client.Put;
38  import org.apache.hadoop.hbase.client.RegionLocator;
39  import org.apache.hadoop.hbase.client.Table;
40  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
41  import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
42  import org.apache.hadoop.hbase.util.Bytes;
43  import org.apache.hadoop.hbase.wal.WALKey;
44  import org.apache.hadoop.mapreduce.Job;
45  import org.apache.hadoop.mapreduce.Mapper;
46  import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
47  import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
48  import org.apache.hadoop.util.GenericOptionsParser;
49  import org.apache.hadoop.util.Tool;
50  import org.apache.hadoop.util.ToolRunner;
51  
52  import java.io.IOException;
53  import java.text.ParseException;
54  import java.text.SimpleDateFormat;
55  import java.util.Map;
56  import java.util.TreeMap;
57  
58  /**
59   * A tool to replay WAL files as a M/R job.
60   * The WAL can be replayed for a set of tables or all tables,
61   * and a timerange can be provided (in milliseconds).
62   * The WAL is filtered to the passed set of tables and  the output
63   * can optionally be mapped to another set of tables.
64   *
65   * WAL replay can also generate HFiles for later bulk importing,
66   * in that case the WAL is replayed for a single table only.
67   */
68  @InterfaceAudience.Public
69  @InterfaceStability.Stable
70  public class WALPlayer extends Configured implements Tool {
71    final static Log LOG = LogFactory.getLog(WALPlayer.class);
72    final static String NAME = "WALPlayer";
73    final static String BULK_OUTPUT_CONF_KEY = "wal.bulk.output";
74    final static String TABLES_KEY = "wal.input.tables";
75    final static String TABLE_MAP_KEY = "wal.input.tablesmap";
76  
77    // This relies on Hadoop Configuration to handle warning about deprecated configs and
78    // to set the correct non-deprecated configs when an old one shows up.
79    static {
80      Configuration.addDeprecation("hlog.bulk.output", BULK_OUTPUT_CONF_KEY);
81      Configuration.addDeprecation("hlog.input.tables", TABLES_KEY);
82      Configuration.addDeprecation("hlog.input.tablesmap", TABLE_MAP_KEY);
83      Configuration.addDeprecation(HLogInputFormat.START_TIME_KEY, WALInputFormat.START_TIME_KEY);
84      Configuration.addDeprecation(HLogInputFormat.END_TIME_KEY, WALInputFormat.END_TIME_KEY);
85    }
86  
87    private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
88  
89    /**
90     * A mapper that just writes out KeyValues.
91     * This one can be used together with {@link KeyValueSortReducer}
92     */
93    static class WALKeyValueMapper
94    extends Mapper<WALKey, WALEdit, ImmutableBytesWritable, KeyValue> {
95      private byte[] table;
96  
97      @Override
98      public void map(WALKey key, WALEdit value,
99        Context context)
100     throws IOException {
101       try {
102         // skip all other tables
103         if (Bytes.equals(table, key.getTablename().getName())) {
104           for (Cell cell : value.getCells()) {
105             KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
106             if (WALEdit.isMetaEditFamily(kv.getFamily())) continue;
107             context.write(new ImmutableBytesWritable(kv.getRow()), kv);
108           }
109         }
110       } catch (InterruptedException e) {
111         e.printStackTrace();
112       }
113     }
114 
115     @Override
116     public void setup(Context context) throws IOException {
117       // only a single table is supported when HFiles are generated with HFileOutputFormat
118       String[] tables = context.getConfiguration().getStrings(TABLES_KEY);
119       if (tables == null || tables.length != 1) {
120         // this can only happen when WALMapper is used directly by a class other than WALPlayer
121         throw new IOException("Exactly one table must be specified for bulk HFile case.");
122       }
123       table = Bytes.toBytes(tables[0]);
124     }
125   }
126 
127   /**
128    * A mapper that writes out {@link Mutation} to be directly applied to
129    * a running HBase instance.
130    */
131   protected static class WALMapper
132   extends Mapper<WALKey, WALEdit, ImmutableBytesWritable, Mutation> {
133     private Map<TableName, TableName> tables = new TreeMap<TableName, TableName>();
134 
135     @Override
136     public void map(WALKey key, WALEdit value, Context context)
137     throws IOException {
138       try {
139         if (tables.isEmpty() || tables.containsKey(key.getTablename())) {
140           TableName targetTable = tables.isEmpty() ?
141                 key.getTablename() :
142                 tables.get(key.getTablename());
143           ImmutableBytesWritable tableOut = new ImmutableBytesWritable(targetTable.getName());
144           Put put = null;
145           Delete del = null;
146           Cell lastCell = null;
147           for (Cell cell : value.getCells()) {
148             // filtering WAL meta entries
149             if (WALEdit.isMetaEditFamily(cell.getFamily())) continue;
150 
151             // Allow a subclass filter out this cell.
152             if (filter(context, cell)) {
153               // A WALEdit may contain multiple operations (HBASE-3584) and/or
154               // multiple rows (HBASE-5229).
155               // Aggregate as much as possible into a single Put/Delete
156               // operation before writing to the context.
157               if (lastCell == null || lastCell.getTypeByte() != cell.getTypeByte()
158                   || !CellUtil.matchingRow(lastCell, cell)) {
159                 // row or type changed, write out aggregate KVs.
160                 if (put != null) context.write(tableOut, put);
161                 if (del != null) context.write(tableOut, del);
162                 if (CellUtil.isDelete(cell)) {
163                   del = new Delete(cell.getRow());
164                 } else {
165                   put = new Put(cell.getRow());
166                 }
167               }
168               if (CellUtil.isDelete(cell)) {
169                 del.addDeleteMarker(cell);
170               } else {
171                 put.add(cell);
172               }
173             }
174             lastCell = cell;
175           }
176           // write residual KVs
177           if (put != null) context.write(tableOut, put);
178           if (del != null) context.write(tableOut, del);
179         }
180       } catch (InterruptedException e) {
181         e.printStackTrace();
182       }
183     }
184 
185     /**
186      * @param cell
187      * @return Return true if we are to emit this cell.
188      */
189     protected boolean filter(Context context, final Cell cell) {
190       return true;
191     }
192 
193     @Override
194     public void setup(Context context) throws IOException {
195       String[] tableMap = context.getConfiguration().getStrings(TABLE_MAP_KEY);
196       String[] tablesToUse = context.getConfiguration().getStrings(TABLES_KEY);
197       if (tablesToUse == null && tableMap == null) {
198         // Then user wants all tables.
199       } else if (tablesToUse == null || tableMap == null || tablesToUse.length != tableMap.length) {
200         // this can only happen when WALMapper is used directly by a class other than WALPlayer
201         throw new IOException("No tables or incorrect table mapping specified.");
202       }
203       int i = 0;
204       if (tablesToUse != null) {
205         for (String table : tablesToUse) {
206           tables.put(TableName.valueOf(table),
207             TableName.valueOf(tableMap[i++]));
208         }
209       }
210     }
211   }
212 
213   /**
214    * @param conf The {@link Configuration} to use.
215    */
216   public WALPlayer(Configuration conf) {
217     super(conf);
218   }
219 
220   void setupTime(Configuration conf, String option) throws IOException {
221     String val = conf.get(option);
222     if (null == val) return;
223     long ms;
224     try {
225       // first try to parse in user friendly form
226       ms = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SS").parse(val).getTime();
227     } catch (ParseException pe) {
228       try {
229         // then see if just a number of ms's was specified
230         ms = Long.parseLong(val);
231       } catch (NumberFormatException nfe) {
232         throw new IOException(option
233             + " must be specified either in the form 2001-02-20T16:35:06.99 "
234             + "or as number of milliseconds");
235       }
236     }
237     conf.setLong(option, ms);
238   }
239 
240   /**
241    * Sets up the actual job.
242    *
243    * @param args  The command line parameters.
244    * @return The newly created job.
245    * @throws IOException When setting up the job fails.
246    */
247   public Job createSubmittableJob(String[] args)
248   throws IOException {
249     Configuration conf = getConf();
250     setupTime(conf, HLogInputFormat.START_TIME_KEY);
251     setupTime(conf, HLogInputFormat.END_TIME_KEY);
252     Path inputDir = new Path(args[0]);
253     String[] tables = args[1].split(",");
254     String[] tableMap;
255     if (args.length > 2) {
256       tableMap = args[2].split(",");
257       if (tableMap.length != tables.length) {
258         throw new IOException("The same number of tables and mapping must be provided.");
259       }
260     } else {
261       // if not mapping is specified map each table to itself
262       tableMap = tables;
263     }
264     conf.setStrings(TABLES_KEY, tables);
265     conf.setStrings(TABLE_MAP_KEY, tableMap);
266     Job job = Job.getInstance(conf, conf.get(JOB_NAME_CONF_KEY, NAME + "_" + inputDir));
267     job.setJarByClass(WALPlayer.class);
268     FileInputFormat.setInputPaths(job, inputDir);
269     job.setInputFormatClass(WALInputFormat.class);
270     job.setMapOutputKeyClass(ImmutableBytesWritable.class);
271     String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
272     if (hfileOutPath != null) {
273       // the bulk HFile case
274       if (tables.length != 1) {
275         throw new IOException("Exactly one table must be specified for the bulk export option");
276       }
277       TableName tableName = TableName.valueOf(tables[0]);
278       job.setMapperClass(WALKeyValueMapper.class);
279       job.setReducerClass(KeyValueSortReducer.class);
280       Path outputDir = new Path(hfileOutPath);
281       FileOutputFormat.setOutputPath(job, outputDir);
282       job.setMapOutputValueClass(KeyValue.class);
283       try (Connection conn = ConnectionFactory.createConnection(conf);
284           Table table = conn.getTable(tableName);
285           RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
286         HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator);
287       }
288       TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
289           com.google.common.base.Preconditions.class);
290     } else {
291       // output to live cluster
292       job.setMapperClass(WALMapper.class);
293       job.setOutputFormatClass(MultiTableOutputFormat.class);
294       TableMapReduceUtil.addDependencyJars(job);
295       TableMapReduceUtil.initCredentials(job);
296       // No reducers.
297       job.setNumReduceTasks(0);
298     }
299     return job;
300   }
301 
302   /*
303    * @param errorMsg Error message.  Can be null.
304    */
305   private void usage(final String errorMsg) {
306     if (errorMsg != null && errorMsg.length() > 0) {
307       System.err.println("ERROR: " + errorMsg);
308     }
309     System.err.println("Usage: " + NAME + " [options] <wal inputdir> <tables> [<tableMappings>]");
310     System.err.println("Read all WAL entries for <tables>.");
311     System.err.println("If no tables (\"\") are specific, all tables are imported.");
312     System.err.println("(Careful, even -ROOT- and hbase:meta entries will be imported in that case.)");
313     System.err.println("Otherwise <tables> is a comma separated list of tables.\n");
314     System.err.println("The WAL entries can be mapped to new set of tables via <tableMapping>.");
315     System.err.println("<tableMapping> is a command separated list of targettables.");
316     System.err.println("If specified, each table in <tables> must have a mapping.\n");
317     System.err.println("By default " + NAME + " will load data directly into HBase.");
318     System.err.println("To generate HFiles for a bulk data load instead, pass the option:");
319     System.err.println("  -D" + BULK_OUTPUT_CONF_KEY + "=/path/for/output");
320     System.err.println("  (Only one table can be specified, and no mapping is allowed!)");
321     System.err.println("Other options: (specify time range to WAL edit to consider)");
322     System.err.println("  -D" + WALInputFormat.START_TIME_KEY + "=[date|ms]");
323     System.err.println("  -D" + WALInputFormat.END_TIME_KEY + "=[date|ms]");
324     System.err.println("   -D " + JOB_NAME_CONF_KEY
325         + "=jobName - use the specified mapreduce job name for the wal player");
326     System.err.println("For performance also consider the following options:\n"
327         + "  -Dmapreduce.map.speculative=false\n"
328         + "  -Dmapreduce.reduce.speculative=false");
329   }
330 
331   /**
332    * Main entry point.
333    *
334    * @param args  The command line parameters.
335    * @throws Exception When running the job fails.
336    */
337   public static void main(String[] args) throws Exception {
338     int ret = ToolRunner.run(new WALPlayer(HBaseConfiguration.create()), args);
339     System.exit(ret);
340   }
341 
342   @Override
343   public int run(String[] args) throws Exception {
344     String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
345     if (otherArgs.length < 2) {
346       usage("Wrong number of arguments: " + otherArgs.length);
347       System.exit(-1);
348     }
349     Job job = createSubmittableJob(otherArgs);
350     return job.waitForCompletion(true) ? 0 : 1;
351   }
352 }