View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.mapreduce;
20  
21  import static java.lang.String.format;
22  
23  import com.google.common.base.Preconditions;
24  import com.google.common.base.Splitter;
25  import com.google.common.collect.Lists;
26  
27  import org.apache.commons.lang.StringUtils;
28  import org.apache.commons.logging.Log;
29  import org.apache.commons.logging.LogFactory;
30  import org.apache.hadoop.conf.Configuration;
31  import org.apache.hadoop.conf.Configured;
32  import org.apache.hadoop.fs.Path;
33  import org.apache.hadoop.hbase.HColumnDescriptor;
34  import org.apache.hadoop.hbase.HConstants;
35  import org.apache.hadoop.hbase.HTableDescriptor;
36  import org.apache.hadoop.hbase.TableName;
37  import org.apache.hadoop.hbase.TableNotEnabledException;
38  import org.apache.hadoop.hbase.TableNotFoundException;
39  import org.apache.hadoop.hbase.classification.InterfaceAudience;
40  import org.apache.hadoop.hbase.classification.InterfaceStability;
41  import org.apache.hadoop.hbase.client.Admin;
42  import org.apache.hadoop.hbase.client.Connection;
43  import org.apache.hadoop.hbase.client.ConnectionFactory;
44  import org.apache.hadoop.hbase.client.Put;
45  import org.apache.hadoop.hbase.client.RegionLocator;
46  import org.apache.hadoop.hbase.client.Table;
47  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
48  import org.apache.hadoop.hbase.util.Base64;
49  import org.apache.hadoop.hbase.util.Bytes;
50  import org.apache.hadoop.hbase.util.Pair;
51  import org.apache.hadoop.io.Text;
52  import org.apache.hadoop.mapreduce.Job;
53  import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
54  import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
55  import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
56  import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
57  import org.apache.hadoop.security.Credentials;
58  import org.apache.hadoop.util.Tool;
59  import org.apache.hadoop.util.ToolRunner;
60  
61  import java.io.File;
62  import java.io.IOException;
63  import java.util.ArrayList;
64  import java.util.HashSet;
65  import java.util.Set;
66  
67  /**
68   * Tool to import data from a TSV file.
69   *
70   * This tool is rather simplistic - it doesn't do any quoting or
71   * escaping, but is useful for many data loads.
72   *
73   * @see ImportTsv#usage(String)
74   */
75  @InterfaceAudience.Public
76  @InterfaceStability.Stable
77  public class ImportTsv extends Configured implements Tool {
78  
79    protected static final Log LOG = LogFactory.getLog(ImportTsv.class);
80  
81    final static String NAME = "importtsv";
82  
83    public final static String MAPPER_CONF_KEY = "importtsv.mapper.class";
84    public final static String BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output";
85    public final static String TIMESTAMP_CONF_KEY = "importtsv.timestamp";
86    public final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
87    // TODO: the rest of these configs are used exclusively by TsvImporterMapper.
88    // Move them out of the tool and let the mapper handle its own validation.
89    public final static String DRY_RUN_CONF_KEY = "importtsv.dry.run";
90    // If true, bad lines are logged to stderr. Default: false.
91    public final static String LOG_BAD_LINES_CONF_KEY = "importtsv.log.bad.lines";
92    public final static String SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines";
93    public final static String COLUMNS_CONF_KEY = "importtsv.columns";
94    public final static String SEPARATOR_CONF_KEY = "importtsv.separator";
95    public final static String ATTRIBUTE_SEPERATOR_CONF_KEY = "attributes.seperator";
96    //This config is used to propagate credentials from parent MR jobs which launch
97    //ImportTSV jobs. SEE IntegrationTestImportTsv.
98    public final static String CREDENTIALS_LOCATION = "credentials_location";
99    final static String DEFAULT_SEPARATOR = "\t";
100   final static String DEFAULT_ATTRIBUTES_SEPERATOR = "=>";
101   final static String DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR = ",";
102   final static Class DEFAULT_MAPPER = TsvImporterMapper.class;
103   public final static String CREATE_TABLE_CONF_KEY = "create.table";
104   public final static String NO_STRICT_COL_FAMILY = "no.strict";
105   /**
106    * If table didn't exist and was created in dry-run mode, this flag is
107    * flipped to delete it when MR ends.
108    */
109   private static boolean dryRunTableCreated;
110 
111   public static class TsvParser {
112     /**
113      * Column families and qualifiers mapped to the TSV columns
114      */
115     private final byte[][] families;
116     private final byte[][] qualifiers;
117 
118     private final byte separatorByte;
119 
120     private int rowKeyColumnIndex;
121 
122     private int maxColumnCount;
123 
124     // Default value must be negative
125     public static final int DEFAULT_TIMESTAMP_COLUMN_INDEX = -1;
126 
127     private int timestampKeyColumnIndex = DEFAULT_TIMESTAMP_COLUMN_INDEX;
128 
129     public static final String ROWKEY_COLUMN_SPEC = "HBASE_ROW_KEY";
130 
131     public static final String TIMESTAMPKEY_COLUMN_SPEC = "HBASE_TS_KEY";
132 
133     public static final String ATTRIBUTES_COLUMN_SPEC = "HBASE_ATTRIBUTES_KEY";
134 
135     public static final String CELL_VISIBILITY_COLUMN_SPEC = "HBASE_CELL_VISIBILITY";
136 
137     public static final String CELL_TTL_COLUMN_SPEC = "HBASE_CELL_TTL";
138 
139     private int attrKeyColumnIndex = DEFAULT_ATTRIBUTES_COLUMN_INDEX;
140 
141     public static final int DEFAULT_ATTRIBUTES_COLUMN_INDEX = -1;
142 
143     public static final int DEFAULT_CELL_VISIBILITY_COLUMN_INDEX = -1;
144 
145     public static final int DEFAULT_CELL_TTL_COLUMN_INDEX = -1;
146 
147     private int cellVisibilityColumnIndex = DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
148 
149     private int cellTTLColumnIndex = DEFAULT_CELL_TTL_COLUMN_INDEX;
150 
151     /**
152      * @param columnsSpecification the list of columns to parser out, comma separated.
153      * The row key should be the special token TsvParser.ROWKEY_COLUMN_SPEC
154      * @param separatorStr
155      */
156     public TsvParser(String columnsSpecification, String separatorStr) {
157       // Configure separator
158       byte[] separator = Bytes.toBytes(separatorStr);
159       Preconditions.checkArgument(separator.length == 1,
160         "TsvParser only supports single-byte separators");
161       separatorByte = separator[0];
162 
163       // Configure columns
164       ArrayList<String> columnStrings = Lists.newArrayList(
165         Splitter.on(',').trimResults().split(columnsSpecification));
166 
167       maxColumnCount = columnStrings.size();
168       families = new byte[maxColumnCount][];
169       qualifiers = new byte[maxColumnCount][];
170 
171       for (int i = 0; i < columnStrings.size(); i++) {
172         String str = columnStrings.get(i);
173         if (ROWKEY_COLUMN_SPEC.equals(str)) {
174           rowKeyColumnIndex = i;
175           continue;
176         }
177         if (TIMESTAMPKEY_COLUMN_SPEC.equals(str)) {
178           timestampKeyColumnIndex = i;
179           continue;
180         }
181         if (ATTRIBUTES_COLUMN_SPEC.equals(str)) {
182           attrKeyColumnIndex = i;
183           continue;
184         }
185         if (CELL_VISIBILITY_COLUMN_SPEC.equals(str)) {
186           cellVisibilityColumnIndex = i;
187           continue;
188         }
189         if (CELL_TTL_COLUMN_SPEC.equals(str)) {
190           cellTTLColumnIndex = i;
191           continue;
192         }
193         String[] parts = str.split(":", 2);
194         if (parts.length == 1) {
195           families[i] = str.getBytes();
196           qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY;
197         } else {
198           families[i] = parts[0].getBytes();
199           qualifiers[i] = parts[1].getBytes();
200         }
201       }
202     }
203 
204     public boolean hasTimestamp() {
205       return timestampKeyColumnIndex != DEFAULT_TIMESTAMP_COLUMN_INDEX;
206     }
207 
208     public int getTimestampKeyColumnIndex() {
209       return timestampKeyColumnIndex;
210     }
211 
212     public boolean hasAttributes() {
213       return attrKeyColumnIndex != DEFAULT_ATTRIBUTES_COLUMN_INDEX;
214     }
215 
216     public boolean hasCellVisibility() {
217       return cellVisibilityColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
218     }
219 
220     public boolean hasCellTTL() {
221       return cellTTLColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
222     }
223 
224     public int getAttributesKeyColumnIndex() {
225       return attrKeyColumnIndex;
226     }
227 
228     public int getCellVisibilityColumnIndex() {
229       return cellVisibilityColumnIndex;
230     }
231 
232     public int getCellTTLColumnIndex() {
233       return cellTTLColumnIndex;
234     }
235 
236     public int getRowKeyColumnIndex() {
237       return rowKeyColumnIndex;
238     }
239 
240     public byte[] getFamily(int idx) {
241       return families[idx];
242     }
243     public byte[] getQualifier(int idx) {
244       return qualifiers[idx];
245     }
246 
247     public ParsedLine parse(byte[] lineBytes, int length)
248     throws BadTsvLineException {
249       // Enumerate separator offsets
250       ArrayList<Integer> tabOffsets = new ArrayList<Integer>(maxColumnCount);
251       for (int i = 0; i < length; i++) {
252         if (lineBytes[i] == separatorByte) {
253           tabOffsets.add(i);
254         }
255       }
256       if (tabOffsets.isEmpty()) {
257         throw new BadTsvLineException("No delimiter");
258       }
259 
260       tabOffsets.add(length);
261 
262       if (tabOffsets.size() > maxColumnCount) {
263         throw new BadTsvLineException("Excessive columns");
264       } else if (tabOffsets.size() <= getRowKeyColumnIndex()) {
265         throw new BadTsvLineException("No row key");
266       } else if (hasTimestamp()
267           && tabOffsets.size() <= getTimestampKeyColumnIndex()) {
268         throw new BadTsvLineException("No timestamp");
269       } else if (hasAttributes() && tabOffsets.size() <= getAttributesKeyColumnIndex()) {
270         throw new BadTsvLineException("No attributes specified");
271       } else if (hasCellVisibility() && tabOffsets.size() <= getCellVisibilityColumnIndex()) {
272         throw new BadTsvLineException("No cell visibility specified");
273       } else if (hasCellTTL() && tabOffsets.size() <= getCellTTLColumnIndex()) {
274         throw new BadTsvLineException("No cell TTL specified");
275       }
276       return new ParsedLine(tabOffsets, lineBytes);
277     }
278 
279     class ParsedLine {
280       private final ArrayList<Integer> tabOffsets;
281       private byte[] lineBytes;
282 
283       ParsedLine(ArrayList<Integer> tabOffsets, byte[] lineBytes) {
284         this.tabOffsets = tabOffsets;
285         this.lineBytes = lineBytes;
286       }
287 
288       public int getRowKeyOffset() {
289         return getColumnOffset(rowKeyColumnIndex);
290       }
291       public int getRowKeyLength() {
292         return getColumnLength(rowKeyColumnIndex);
293       }
294 
295       public long getTimestamp(long ts) throws BadTsvLineException {
296         // Return ts if HBASE_TS_KEY is not configured in column spec
297         if (!hasTimestamp()) {
298           return ts;
299         }
300 
301         String timeStampStr = Bytes.toString(lineBytes,
302             getColumnOffset(timestampKeyColumnIndex),
303             getColumnLength(timestampKeyColumnIndex));
304         try {
305           return Long.parseLong(timeStampStr);
306         } catch (NumberFormatException nfe) {
307           // treat this record as bad record
308           throw new BadTsvLineException("Invalid timestamp " + timeStampStr);
309         }
310       }
311 
312       private String getAttributes() {
313         if (!hasAttributes()) {
314           return null;
315         } else {
316           return Bytes.toString(lineBytes, getColumnOffset(attrKeyColumnIndex),
317               getColumnLength(attrKeyColumnIndex));
318         }
319       }
320 
321       public String[] getIndividualAttributes() {
322         String attributes = getAttributes();
323         if (attributes != null) {
324           return attributes.split(DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR);
325         } else {
326           return null;
327         }
328       }
329 
330       public int getAttributeKeyOffset() {
331         if (hasAttributes()) {
332           return getColumnOffset(attrKeyColumnIndex);
333         } else {
334           return DEFAULT_ATTRIBUTES_COLUMN_INDEX;
335         }
336       }
337 
338       public int getAttributeKeyLength() {
339         if (hasAttributes()) {
340           return getColumnLength(attrKeyColumnIndex);
341         } else {
342           return DEFAULT_ATTRIBUTES_COLUMN_INDEX;
343         }
344       }
345 
346       public int getCellVisibilityColumnOffset() {
347         if (hasCellVisibility()) {
348           return getColumnOffset(cellVisibilityColumnIndex);
349         } else {
350           return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
351         }
352       }
353 
354       public int getCellVisibilityColumnLength() {
355         if (hasCellVisibility()) {
356           return getColumnLength(cellVisibilityColumnIndex);
357         } else {
358           return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
359         }
360       }
361 
362       public String getCellVisibility() {
363         if (!hasCellVisibility()) {
364           return null;
365         } else {
366           return Bytes.toString(lineBytes, getColumnOffset(cellVisibilityColumnIndex),
367               getColumnLength(cellVisibilityColumnIndex));
368         }
369       }
370 
371       public int getCellTTLColumnOffset() {
372         if (hasCellTTL()) {
373           return getColumnOffset(cellTTLColumnIndex);
374         } else {
375           return DEFAULT_CELL_TTL_COLUMN_INDEX;
376         }
377       }
378 
379       public int getCellTTLColumnLength() {
380         if (hasCellTTL()) {
381           return getColumnLength(cellTTLColumnIndex);
382         } else {
383           return DEFAULT_CELL_TTL_COLUMN_INDEX;
384         }
385       }
386 
387       public long getCellTTL() {
388         if (!hasCellTTL()) {
389           return 0;
390         } else {
391           return Bytes.toLong(lineBytes, getColumnOffset(cellTTLColumnIndex),
392               getColumnLength(cellTTLColumnIndex));
393         }
394       }
395 
396       public int getColumnOffset(int idx) {
397         if (idx > 0)
398           return tabOffsets.get(idx - 1) + 1;
399         else
400           return 0;
401       }
402       public int getColumnLength(int idx) {
403         return tabOffsets.get(idx) - getColumnOffset(idx);
404       }
405       public int getColumnCount() {
406         return tabOffsets.size();
407       }
408       public byte[] getLineBytes() {
409         return lineBytes;
410       }
411     }
412 
413     public static class BadTsvLineException extends Exception {
414       public BadTsvLineException(String err) {
415         super(err);
416       }
417       private static final long serialVersionUID = 1L;
418     }
419 
420     /**
421      * Return starting position and length of row key from the specified line bytes.
422      * @param lineBytes
423      * @param length
424      * @return Pair of row key offset and length.
425      * @throws BadTsvLineException
426      */
427     public Pair<Integer, Integer> parseRowKey(byte[] lineBytes, int length)
428         throws BadTsvLineException {
429       int rkColumnIndex = 0;
430       int startPos = 0, endPos = 0;
431       for (int i = 0; i <= length; i++) {
432         if (i == length || lineBytes[i] == separatorByte) {
433           endPos = i - 1;
434           if (rkColumnIndex++ == getRowKeyColumnIndex()) {
435             if ((endPos + 1) == startPos) {
436               throw new BadTsvLineException("Empty value for ROW KEY.");
437             }
438             break;
439           } else {
440             startPos = endPos + 2;
441           }
442         }
443         if (i == length) {
444           throw new BadTsvLineException(
445               "Row key does not exist as number of columns in the line"
446                   + " are less than row key position.");
447         }
448       }
449       return new Pair<Integer, Integer>(startPos, endPos - startPos + 1);
450     }
451   }
452 
453   /**
454    * Sets up the actual job.
455    *
456    * @param conf  The current configuration.
457    * @param args  The command line parameters.
458    * @return The newly created job.
459    * @throws IOException When setting up the job fails.
460    */
461   protected static Job createSubmittableJob(Configuration conf, String[] args)
462       throws IOException, ClassNotFoundException {
463     Job job = null;
464     boolean isDryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false);
465     try (Connection connection = ConnectionFactory.createConnection(conf)) {
466       try (Admin admin = connection.getAdmin()) {
467         // Support non-XML supported characters
468         // by re-encoding the passed separator as a Base64 string.
469         String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
470         if (actualSeparator != null) {
471           conf.set(SEPARATOR_CONF_KEY,
472               Base64.encodeBytes(actualSeparator.getBytes()));
473         }
474 
475         // See if a non-default Mapper was set
476         String mapperClassName = conf.get(MAPPER_CONF_KEY);
477         Class mapperClass =
478           mapperClassName != null ? Class.forName(mapperClassName) : DEFAULT_MAPPER;
479 
480           TableName tableName = TableName.valueOf(args[0]);
481           Path inputDir = new Path(args[1]);
482           String jobName = conf.get(JOB_NAME_CONF_KEY,NAME + "_" + tableName.getNameAsString());
483           job = Job.getInstance(conf, jobName);
484           job.setJarByClass(mapperClass);
485           FileInputFormat.setInputPaths(job, inputDir);
486           job.setInputFormatClass(TextInputFormat.class);
487           job.setMapperClass(mapperClass);
488           job.setMapOutputKeyClass(ImmutableBytesWritable.class);
489           String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
490           String[] columns = conf.getStrings(COLUMNS_CONF_KEY);
491           if(StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) {
492             String fileLoc = conf.get(CREDENTIALS_LOCATION);
493             Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf);
494             job.getCredentials().addAll(cred);
495           }
496 
497           if (hfileOutPath != null) {
498             if (!admin.tableExists(tableName)) {
499               LOG.warn(format("Table '%s' does not exist.", tableName));
500               if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) {
501                 // TODO: this is backwards. Instead of depending on the existence of a table,
502                 // create a sane splits file for HFileOutputFormat based on data sampling.
503                 createTable(admin, tableName, columns);
504                 if (isDryRun) {
505                   LOG.warn("Dry run: Table will be deleted at end of dry run.");
506                   dryRunTableCreated = true;
507                 }
508               } else {
509                 String errorMsg =
510                     format("Table '%s' does not exist and '%s' is set to no.", tableName,
511                         CREATE_TABLE_CONF_KEY);
512                 LOG.error(errorMsg);
513                 throw new TableNotFoundException(errorMsg);
514               }
515             }
516             try (Table table = connection.getTable(tableName);
517                 RegionLocator regionLocator = connection.getRegionLocator(tableName)) {
518               boolean noStrict = conf.getBoolean(NO_STRICT_COL_FAMILY, false);
519               // if no.strict is false then check column family
520               if(!noStrict) {
521                 ArrayList<String> unmatchedFamilies = new ArrayList<String>();
522                 Set<String> cfSet = getColumnFamilies(columns);
523                 HTableDescriptor tDesc = table.getTableDescriptor();
524                 for (String cf : cfSet) {
525                   if(tDesc.getFamily(Bytes.toBytes(cf)) == null) {
526                     unmatchedFamilies.add(cf);
527                   }
528                 }
529                 if(unmatchedFamilies.size() > 0) {
530                   ArrayList<String> familyNames = new ArrayList<String>();
531                   for (HColumnDescriptor family : table.getTableDescriptor().getFamilies()) {
532                     familyNames.add(family.getNameAsString());
533                   }
534                   String msg =
535                       "Column Families " + unmatchedFamilies + " specified in " + COLUMNS_CONF_KEY
536                       + " does not match with any of the table " + tableName
537                       + " column families " + familyNames + ".\n"
538                       + "To disable column family check, use -D" + NO_STRICT_COL_FAMILY
539                       + "=true.\n";
540                   usage(msg);
541                   System.exit(-1);
542                 }
543               }
544               if (mapperClass.equals(TsvImporterTextMapper.class)) {
545                 job.setMapOutputValueClass(Text.class);
546                 job.setReducerClass(TextSortReducer.class);
547               } else {
548                 job.setMapOutputValueClass(Put.class);
549                 job.setCombinerClass(PutCombiner.class);
550                 job.setReducerClass(PutSortReducer.class);
551               }
552               if (!isDryRun) {
553                 Path outputDir = new Path(hfileOutPath);
554                 FileOutputFormat.setOutputPath(job, outputDir);
555                 HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(),
556                     regionLocator);
557               }
558             }
559           } else {
560             if (!admin.tableExists(tableName)) {
561               String errorMsg = format("Table '%s' does not exist.", tableName);
562               LOG.error(errorMsg);
563               throw new TableNotFoundException(errorMsg);
564             }
565             if (mapperClass.equals(TsvImporterTextMapper.class)) {
566               usage(TsvImporterTextMapper.class.toString()
567                   + " should not be used for non bulkloading case. use "
568                   + TsvImporterMapper.class.toString()
569                   + " or custom mapper whose value type is Put.");
570               System.exit(-1);
571             }
572             if (!isDryRun) {
573               // No reducers. Just write straight to table. Call initTableReducerJob
574               // to set up the TableOutputFormat.
575               TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job);
576             }
577             job.setNumReduceTasks(0);
578           }
579           if (isDryRun) {
580             job.setOutputFormatClass(NullOutputFormat.class);
581             job.getConfiguration().setStrings("io.serializations",
582                 job.getConfiguration().get("io.serializations"),
583                 MutationSerialization.class.getName(), ResultSerialization.class.getName(),
584                 KeyValueSerialization.class.getName());
585           }
586           TableMapReduceUtil.addDependencyJars(job);
587           TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
588               com.google.common.base.Function.class /* Guava used by TsvParser */);
589       }
590     }
591     return job;
592   }
593 
594   private static void createTable(Admin admin, TableName tableName, String[] columns)
595       throws IOException {
596     HTableDescriptor htd = new HTableDescriptor(tableName);
597     Set<String> cfSet = getColumnFamilies(columns);
598     for (String cf : cfSet) {
599       HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(cf));
600       htd.addFamily(hcd);
601     }
602     LOG.warn(format("Creating table '%s' with '%s' columns and default descriptors.",
603       tableName, cfSet));
604     admin.createTable(htd);
605   }
606 
607   private static void deleteTable(Configuration conf, String[] args) {
608     TableName tableName = TableName.valueOf(args[0]);
609     try (Connection connection = ConnectionFactory.createConnection(conf);
610          Admin admin = connection.getAdmin()) {
611       try {
612         admin.disableTable(tableName);
613       } catch (TableNotEnabledException e) {
614         LOG.debug("Dry mode: Table: " + tableName + " already disabled, so just deleting it.");
615       }
616       admin.deleteTable(tableName);
617     } catch (IOException e) {
618       LOG.error(format("***Dry run: Failed to delete table '%s'.***\n%s", tableName, e.toString()));
619       return;
620     }
621     LOG.info(format("Dry run: Deleted table '%s'.", tableName));
622   }
623 
624   private static Set<String> getColumnFamilies(String[] columns) {
625     Set<String> cfSet = new HashSet<String>();
626     for (String aColumn : columns) {
627       if (TsvParser.ROWKEY_COLUMN_SPEC.equals(aColumn)
628           || TsvParser.TIMESTAMPKEY_COLUMN_SPEC.equals(aColumn)
629           || TsvParser.CELL_VISIBILITY_COLUMN_SPEC.equals(aColumn)
630           || TsvParser.CELL_TTL_COLUMN_SPEC.equals(aColumn)
631           || TsvParser.ATTRIBUTES_COLUMN_SPEC.equals(aColumn))
632         continue;
633       // we are only concerned with the first one (in case this is a cf:cq)
634       cfSet.add(aColumn.split(":", 2)[0]);
635     }
636     return cfSet;
637   }
638 
639   /*
640    * @param errorMsg Error message.  Can be null.
641    */
642   private static void usage(final String errorMsg) {
643     if (errorMsg != null && errorMsg.length() > 0) {
644       System.err.println("ERROR: " + errorMsg);
645     }
646     String usage =
647       "Usage: " + NAME + " -D"+ COLUMNS_CONF_KEY + "=a,b,c <tablename> <inputdir>\n" +
648       "\n" +
649       "Imports the given input directory of TSV data into the specified table.\n" +
650       "\n" +
651       "The column names of the TSV data must be specified using the -D" + COLUMNS_CONF_KEY + "\n" +
652       "option. This option takes the form of comma-separated column names, where each\n" +
653       "column name is either a simple column family, or a columnfamily:qualifier. The special\n" +
654       "column name " + TsvParser.ROWKEY_COLUMN_SPEC + " is used to designate that this column should be used\n" +
655       "as the row key for each imported record. You must specify exactly one column\n" +
656       "to be the row key, and you must specify a column name for every column that exists in the\n" +
657       "input data. Another special column" + TsvParser.TIMESTAMPKEY_COLUMN_SPEC +
658       " designates that this column should be\n" +
659       "used as timestamp for each record. Unlike " + TsvParser.ROWKEY_COLUMN_SPEC + ", " +
660       TsvParser.TIMESTAMPKEY_COLUMN_SPEC + " is optional.\n" +
661       "You must specify at most one column as timestamp key for each imported record.\n" +
662       "Record with invalid timestamps (blank, non-numeric) will be treated as bad record.\n" +
663       "Note: if you use this option, then '" + TIMESTAMP_CONF_KEY + "' option will be ignored.\n" +
664       "\n" +
665       TsvParser.ATTRIBUTES_COLUMN_SPEC+" can be used to specify Operation Attributes per record.\n"+
666       " Should be specified as key=>value where "+TsvParser.DEFAULT_ATTRIBUTES_COLUMN_INDEX+ " is used \n"+
667       " as the seperator.  Note that more than one OperationAttributes can be specified.\n"+
668       "By default importtsv will load data directly into HBase. To instead generate\n" +
669       "HFiles of data to prepare for a bulk data load, pass the option:\n" +
670       "  -D" + BULK_OUTPUT_CONF_KEY + "=/path/for/output\n" +
671       "  Note: if you do not use this option, then the target table must already exist in HBase\n" +
672       "\n" +
673       "Other options that may be specified with -D include:\n" +
674       "  -D" + DRY_RUN_CONF_KEY + "=true - Dry run mode. Data is not actually populated into" +
675       " table. If table does not exist, it is created but deleted in the end.\n" +
676       "  -D" + SKIP_LINES_CONF_KEY + "=false - fail if encountering an invalid line\n" +
677       "  -D" + LOG_BAD_LINES_CONF_KEY + "=true - logs invalid lines to stderr\n" +
678       "  '-D" + SEPARATOR_CONF_KEY + "=|' - eg separate on pipes instead of tabs\n" +
679       "  -D" + TIMESTAMP_CONF_KEY + "=currentTimeAsLong - use the specified timestamp for the import\n" +
680       "  -D" + MAPPER_CONF_KEY + "=my.Mapper - A user-defined Mapper to use instead of " +
681       DEFAULT_MAPPER.getName() + "\n" +
682       "  -D" + JOB_NAME_CONF_KEY + "=jobName - use the specified mapreduce job name for the import\n" +
683       "  -D" + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by this tool\n" +
684       "  Note: if you set this to 'no', then the target table must already exist in HBase\n" +
685       "  -D" + NO_STRICT_COL_FAMILY + "=true - ignore column family check in hbase table. " +
686       "Default is false\n\n" +
687       "For performance consider the following options:\n" +
688       "  -Dmapreduce.map.speculative=false\n" +
689       "  -Dmapreduce.reduce.speculative=false";
690 
691     System.err.println(usage);
692   }
693 
694   @Override
695   public int run(String[] args) throws Exception {
696     if (args.length < 2) {
697       usage("Wrong number of arguments: " + args.length);
698       return -1;
699     }
700 
701     // When MAPPER_CONF_KEY is null, the user wants to use the provided TsvImporterMapper, so
702     // perform validation on these additional args. When it's not null, user has provided their
703     // own mapper, thus these validation are not relevant.
704     // TODO: validation for TsvImporterMapper, not this tool. Move elsewhere.
705     if (null == getConf().get(MAPPER_CONF_KEY)) {
706       // Make sure columns are specified
707       String[] columns = getConf().getStrings(COLUMNS_CONF_KEY);
708       if (columns == null) {
709         usage("No columns specified. Please specify with -D" +
710             COLUMNS_CONF_KEY+"=...");
711         return -1;
712       }
713 
714       // Make sure they specify exactly one column as the row key
715       int rowkeysFound = 0;
716       for (String col : columns) {
717         if (col.equals(TsvParser.ROWKEY_COLUMN_SPEC)) rowkeysFound++;
718       }
719       if (rowkeysFound != 1) {
720         usage("Must specify exactly one column as " + TsvParser.ROWKEY_COLUMN_SPEC);
721         return -1;
722       }
723 
724       // Make sure we have at most one column as the timestamp key
725       int tskeysFound = 0;
726       for (String col : columns) {
727         if (col.equals(TsvParser.TIMESTAMPKEY_COLUMN_SPEC))
728           tskeysFound++;
729       }
730       if (tskeysFound > 1) {
731         usage("Must specify at most one column as "
732             + TsvParser.TIMESTAMPKEY_COLUMN_SPEC);
733         return -1;
734       }
735 
736       int attrKeysFound = 0;
737       for (String col : columns) {
738         if (col.equals(TsvParser.ATTRIBUTES_COLUMN_SPEC))
739           attrKeysFound++;
740       }
741       if (attrKeysFound > 1) {
742         usage("Must specify at most one column as "
743             + TsvParser.ATTRIBUTES_COLUMN_SPEC);
744         return -1;
745       }
746 
747       // Make sure one or more columns are specified excluding rowkey and
748       // timestamp key
749       if (columns.length - (rowkeysFound + tskeysFound + attrKeysFound) < 1) {
750         usage("One or more columns in addition to the row key and timestamp(optional) are required");
751         return -1;
752       }
753     }
754 
755     // If timestamp option is not specified, use current system time.
756     long timstamp = getConf().getLong(TIMESTAMP_CONF_KEY, System.currentTimeMillis());
757 
758     // Set it back to replace invalid timestamp (non-numeric) with current
759     // system time
760     getConf().setLong(TIMESTAMP_CONF_KEY, timstamp);
761 
762     dryRunTableCreated = false;
763     Job job = createSubmittableJob(getConf(), args);
764     boolean success = job.waitForCompletion(true);
765     if (dryRunTableCreated) {
766       deleteTable(getConf(), args);
767     }
768     return success ? 0 : 1;
769   }
770 
771   public static void main(String[] args) throws Exception {
772     int status = ToolRunner.run(new Configuration(), new ImportTsv(), args);
773     System.exit(status);
774   }
775 }