View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.mapreduce;
19  
20  import java.io.IOException;
21  import java.text.ParseException;
22  import java.text.SimpleDateFormat;
23  import java.util.Map;
24  import java.util.TreeMap;
25  
26  import org.apache.hadoop.classification.InterfaceAudience;
27  import org.apache.hadoop.classification.InterfaceStability;
28  import org.apache.hadoop.conf.Configuration;
29  import org.apache.hadoop.conf.Configured;
30  import org.apache.hadoop.fs.Path;
31  import org.apache.hadoop.hbase.CellUtil;
32  import org.apache.hadoop.hbase.HBaseConfiguration;
33  import org.apache.hadoop.hbase.KeyValue;
34  import org.apache.hadoop.hbase.TableName;
35  import org.apache.hadoop.hbase.client.Delete;
36  import org.apache.hadoop.hbase.client.HTable;
37  import org.apache.hadoop.hbase.client.Mutation;
38  import org.apache.hadoop.hbase.client.Put;
39  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
40  import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
41  import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
42  import org.apache.hadoop.hbase.util.Bytes;
43  import org.apache.hadoop.mapreduce.Job;
44  import org.apache.hadoop.mapreduce.Mapper;
45  import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
46  import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
47  import org.apache.hadoop.util.GenericOptionsParser;
48  import org.apache.hadoop.util.Tool;
49  import org.apache.hadoop.util.ToolRunner;
50  
51  /**
52   * A tool to replay WAL files as a M/R job.
53   * The WAL can be replayed for a set of tables or all tables,
54   * and a timerange can be provided (in milliseconds).
55   * The WAL is filtered to the passed set of tables and  the output
56   * can optionally be mapped to another set of tables.
57   *
58   * WAL replay can also generate HFiles for later bulk importing,
59   * in that case the WAL is replayed for a single table only.
60   */
61  @InterfaceAudience.Public
62  @InterfaceStability.Stable
63  public class WALPlayer extends Configured implements Tool {
64    final static String NAME = "WALPlayer";
65    final static String BULK_OUTPUT_CONF_KEY = "hlog.bulk.output";
66    final static String HLOG_INPUT_KEY = "hlog.input.dir";
67    final static String TABLES_KEY = "hlog.input.tables";
68    final static String TABLE_MAP_KEY = "hlog.input.tablesmap";
69  
70    /**
71     * A mapper that just writes out KeyValues.
72     * This one can be used together with {@link KeyValueSortReducer}
73     */
74    static class HLogKeyValueMapper
75    extends Mapper<HLogKey, WALEdit, ImmutableBytesWritable, KeyValue> {
76      private byte[] table;
77  
78      @Override
79      public void map(HLogKey key, WALEdit value,
80        Context context)
81      throws IOException {
82        try {
83          // skip all other tables
84          if (Bytes.equals(table, key.getTablename().getName())) {
85            for (KeyValue kv : value.getKeyValues()) {
86              if (WALEdit.isMetaEditFamily(kv.getFamily())) continue;
87              context.write(new ImmutableBytesWritable(kv.getRow()), kv);
88            }
89          }
90        } catch (InterruptedException e) {
91          e.printStackTrace();
92        }
93      }
94  
95      @Override
96      public void setup(Context context) throws IOException {
97        // only a single table is supported when HFiles are generated with HFileOutputFormat
98        String tables[] = context.getConfiguration().getStrings(TABLES_KEY);
99        if (tables == null || tables.length != 1) {
100         // this can only happen when HLogMapper is used directly by a class other than WALPlayer
101         throw new IOException("Exactly one table must be specified for bulk HFile case.");
102       }
103       table = Bytes.toBytes(tables[0]);
104     }
105   }
106 
107   /**
108    * A mapper that writes out {@link Mutation} to be directly applied to
109    * a running HBase instance.
110    */
111   static class HLogMapper
112   extends Mapper<HLogKey, WALEdit, ImmutableBytesWritable, Mutation> {
113     private Map<TableName, TableName> tables =
114         new TreeMap<TableName, TableName>();
115 
116     @Override
117     public void map(HLogKey key, WALEdit value,
118       Context context)
119     throws IOException {
120       try {
121         if (tables.isEmpty() || tables.containsKey(key.getTablename())) {
122           TableName targetTable = tables.isEmpty() ?
123                 key.getTablename() :
124                 tables.get(key.getTablename());
125           ImmutableBytesWritable tableOut = new ImmutableBytesWritable(targetTable.getName());
126           Put put = null;
127           Delete del = null;
128           KeyValue lastKV = null;
129           for (KeyValue kv : value.getKeyValues()) {
130             // filtering HLog meta entries
131             if (WALEdit.isMetaEditFamily(kv.getFamily())) continue;
132 
133             // A WALEdit may contain multiple operations (HBASE-3584) and/or
134             // multiple rows (HBASE-5229).
135             // Aggregate as much as possible into a single Put/Delete
136             // operation before writing to the context.
137             if (lastKV == null || lastKV.getType() != kv.getType()
138                 || !CellUtil.matchingRow(lastKV, kv)) {
139               // row or type changed, write out aggregate KVs.
140               if (put != null) context.write(tableOut, put);
141               if (del != null) context.write(tableOut, del);
142 
143               if (kv.isDelete()) {
144                 del = new Delete(kv.getRow());
145               } else {
146                 put = new Put(kv.getRow());
147               }
148             }
149             if (kv.isDelete()) {
150               del.addDeleteMarker(kv);
151             } else {
152               put.add(kv);
153             }
154             lastKV = kv;
155           }
156           // write residual KVs
157           if (put != null) context.write(tableOut, put);
158           if (del != null) context.write(tableOut, del);
159         }
160       } catch (InterruptedException e) {
161         e.printStackTrace();
162       }
163     }
164 
165     @Override
166     public void setup(Context context) throws IOException {
167       String[] tableMap = context.getConfiguration().getStrings(TABLE_MAP_KEY);
168       String[] tablesToUse = context.getConfiguration().getStrings(TABLES_KEY);
169       if (tablesToUse == null || tableMap == null || tablesToUse.length != tableMap.length) {
170         // this can only happen when HLogMapper is used directly by a class other than WALPlayer
171         throw new IOException("No tables or incorrect table mapping specified.");
172       }
173       int i = 0;
174       for (String table : tablesToUse) {
175         tables.put(TableName.valueOf(table),
176             TableName.valueOf(tableMap[i++]));
177       }
178     }
179   }
180 
181   /**
182    * @param conf The {@link Configuration} to use.
183    */
184   public WALPlayer(Configuration conf) {
185     super(conf);
186   }
187 
188   void setupTime(Configuration conf, String option) throws IOException {
189     String val = conf.get(option);
190     if (val == null) return;
191     long ms;
192     try {
193       // first try to parse in user friendly form
194       ms = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SS").parse(val).getTime();
195     } catch (ParseException pe) {
196       try {
197         // then see if just a number of ms's was specified
198         ms = Long.parseLong(val);
199       } catch (NumberFormatException nfe) {
200         throw new IOException(option
201             + " must be specified either in the form 2001-02-20T16:35:06.99 "
202             + "or as number of milliseconds");
203       }
204     }
205     conf.setLong(option, ms);
206   }
207 
208   /**
209    * Sets up the actual job.
210    *
211    * @param args  The command line parameters.
212    * @return The newly created job.
213    * @throws IOException When setting up the job fails.
214    */
215   public Job createSubmittableJob(String[] args)
216   throws IOException {
217     Configuration conf = getConf();
218     setupTime(conf, HLogInputFormat.START_TIME_KEY);
219     setupTime(conf, HLogInputFormat.END_TIME_KEY);
220     Path inputDir = new Path(args[0]);
221     String[] tables = args[1].split(",");
222     String[] tableMap;
223     if (args.length > 2) {
224       tableMap = args[2].split(",");
225       if (tableMap.length != tables.length) {
226         throw new IOException("The same number of tables and mapping must be provided.");
227       }
228     } else {
229       // if not mapping is specified map each table to itself
230       tableMap = tables;
231     }
232     conf.setStrings(TABLES_KEY, tables);
233     conf.setStrings(TABLE_MAP_KEY, tableMap);
234     Job job = new Job(conf, NAME + "_" + inputDir);
235     job.setJarByClass(WALPlayer.class);
236     FileInputFormat.setInputPaths(job, inputDir);
237     job.setInputFormatClass(HLogInputFormat.class);
238     job.setMapOutputKeyClass(ImmutableBytesWritable.class);
239     String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
240     if (hfileOutPath != null) {
241       // the bulk HFile case
242       if (tables.length != 1) {
243         throw new IOException("Exactly one table must be specified for the bulk export option");
244       }
245       HTable table = new HTable(conf, tables[0]);
246       job.setMapperClass(HLogKeyValueMapper.class);
247       job.setReducerClass(KeyValueSortReducer.class);
248       Path outputDir = new Path(hfileOutPath);
249       FileOutputFormat.setOutputPath(job, outputDir);
250       job.setMapOutputValueClass(KeyValue.class);
251       HFileOutputFormat.configureIncrementalLoad(job, table);
252       TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
253           com.google.common.base.Preconditions.class);
254     } else {
255       // output to live cluster
256       job.setMapperClass(HLogMapper.class);
257       job.setOutputFormatClass(MultiTableOutputFormat.class);
258       TableMapReduceUtil.addDependencyJars(job);
259       TableMapReduceUtil.initCredentials(job);
260       // No reducers.
261       job.setNumReduceTasks(0);
262     }
263     return job;
264   }
265 
266   /*
267    * @param errorMsg Error message.  Can be null.
268    */
269   private void usage(final String errorMsg) {
270     if (errorMsg != null && errorMsg.length() > 0) {
271       System.err.println("ERROR: " + errorMsg);
272     }
273     System.err.println("Usage: " + NAME + " [options] <wal inputdir> <tables> [<tableMappings>]");
274     System.err.println("Read all WAL entries for <tables>.");
275     System.err.println("If no tables (\"\") are specific, all tables are imported.");
276     System.err.println("(Careful, even -ROOT- and hbase:meta entries will be imported in that case.)");
277     System.err.println("Otherwise <tables> is a comma separated list of tables.\n");
278     System.err.println("The WAL entries can be mapped to new set of tables via <tableMapping>.");
279     System.err.println("<tableMapping> is a command separated list of targettables.");
280     System.err.println("If specified, each table in <tables> must have a mapping.\n");
281     System.err.println("By default " + NAME + " will load data directly into HBase.");
282     System.err.println("To generate HFiles for a bulk data load instead, pass the option:");
283     System.err.println("  -D" + BULK_OUTPUT_CONF_KEY + "=/path/for/output");
284     System.err.println("  (Only one table can be specified, and no mapping is allowed!)");
285     System.err.println("Other options: (specify time range to WAL edit to consider)");
286     System.err.println("  -D" + HLogInputFormat.START_TIME_KEY + "=[date|ms]");
287     System.err.println("  -D" + HLogInputFormat.END_TIME_KEY + "=[date|ms]");
288     System.err.println("For performance also consider the following options:\n"
289         + "  -Dmapreduce.map.speculative=false\n"
290         + "  -Dmapreduce.reduce.speculative=false");
291   }
292 
293   /**
294    * Main entry point.
295    *
296    * @param args  The command line parameters.
297    * @throws Exception When running the job fails.
298    */
299   public static void main(String[] args) throws Exception {
300     int ret = ToolRunner.run(new WALPlayer(HBaseConfiguration.create()), args);
301     System.exit(ret);
302   }
303 
304   @Override
305   public int run(String[] args) throws Exception {
306     String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
307     if (otherArgs.length < 2) {
308       usage("Wrong number of arguments: " + otherArgs.length);
309       System.exit(-1);
310     }
311     Job job = createSubmittableJob(otherArgs);
312     return job.waitForCompletion(true) ? 0 : 1;
313   }
314 }