001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.mapreduce; 019 020import static java.lang.String.format; 021 022import java.io.File; 023import java.io.IOException; 024import java.util.ArrayList; 025import java.util.Base64; 026import java.util.HashSet; 027import java.util.Set; 028import org.apache.commons.lang3.StringUtils; 029import org.apache.hadoop.conf.Configuration; 030import org.apache.hadoop.conf.Configured; 031import org.apache.hadoop.fs.Path; 032import org.apache.hadoop.hbase.HBaseConfiguration; 033import org.apache.hadoop.hbase.HConstants; 034import org.apache.hadoop.hbase.TableName; 035import org.apache.hadoop.hbase.TableNotEnabledException; 036import org.apache.hadoop.hbase.TableNotFoundException; 037import org.apache.hadoop.hbase.client.Admin; 038import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; 039import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; 040import org.apache.hadoop.hbase.client.Connection; 041import org.apache.hadoop.hbase.client.ConnectionFactory; 042import org.apache.hadoop.hbase.client.Put; 043import org.apache.hadoop.hbase.client.RegionLocator; 044import org.apache.hadoop.hbase.client.Table; 045import org.apache.hadoop.hbase.client.TableDescriptor; 046import org.apache.hadoop.hbase.client.TableDescriptorBuilder; 047import org.apache.hadoop.hbase.io.ImmutableBytesWritable; 048import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException; 049import org.apache.hadoop.hbase.util.Bytes; 050import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; 051import org.apache.hadoop.hbase.util.Pair; 052import org.apache.hadoop.io.Text; 053import org.apache.hadoop.mapreduce.Job; 054import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 055import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; 056import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 057import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; 058import org.apache.hadoop.security.Credentials; 059import org.apache.hadoop.util.Tool; 060import org.apache.hadoop.util.ToolRunner; 061import org.apache.yetus.audience.InterfaceAudience; 062import org.slf4j.Logger; 063import org.slf4j.LoggerFactory; 064 065import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; 066import org.apache.hbase.thirdparty.com.google.common.base.Splitter; 067import org.apache.hbase.thirdparty.com.google.common.collect.Lists; 068 069/** 070 * Tool to import data from a TSV file. This tool is rather simplistic - it doesn't do any quoting 071 * or escaping, but is useful for many data loads. 072 * @see ImportTsv#usage(String) 073 */ 074@InterfaceAudience.Public 075public class ImportTsv extends Configured implements Tool { 076 077 protected static final Logger LOG = LoggerFactory.getLogger(ImportTsv.class); 078 079 final static String NAME = "importtsv"; 080 081 public final static String MAPPER_CONF_KEY = "importtsv.mapper.class"; 082 public final static String BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output"; 083 public final static String TIMESTAMP_CONF_KEY = "importtsv.timestamp"; 084 public final static String JOB_NAME_CONF_KEY = "mapreduce.job.name"; 085 // TODO: the rest of these configs are used exclusively by TsvImporterMapper. 086 // Move them out of the tool and let the mapper handle its own validation. 087 public final static String DRY_RUN_CONF_KEY = "importtsv.dry.run"; 088 // If true, bad lines are logged to stderr. Default: false. 089 public final static String LOG_BAD_LINES_CONF_KEY = "importtsv.log.bad.lines"; 090 public final static String SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines"; 091 public final static String SKIP_EMPTY_COLUMNS = "importtsv.skip.empty.columns"; 092 public final static String COLUMNS_CONF_KEY = "importtsv.columns"; 093 public final static String SEPARATOR_CONF_KEY = "importtsv.separator"; 094 public final static String ATTRIBUTE_SEPERATOR_CONF_KEY = "attributes.seperator"; 095 // This config is used to propagate credentials from parent MR jobs which launch 096 // ImportTSV jobs. SEE IntegrationTestImportTsv. 097 public final static String CREDENTIALS_LOCATION = "credentials_location"; 098 final static String DEFAULT_SEPARATOR = "\t"; 099 final static String DEFAULT_ATTRIBUTES_SEPERATOR = "=>"; 100 final static String DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR = ","; 101 final static Class DEFAULT_MAPPER = TsvImporterMapper.class; 102 public final static String CREATE_TABLE_CONF_KEY = "create.table"; 103 public final static String NO_STRICT_COL_FAMILY = "no.strict"; 104 /** 105 * If table didn't exist and was created in dry-run mode, this flag is flipped to delete it when 106 * MR ends. 107 */ 108 private static boolean DRY_RUN_TABLE_CREATED; 109 110 public static class TsvParser { 111 /** 112 * Column families and qualifiers mapped to the TSV columns 113 */ 114 private final byte[][] families; 115 private final byte[][] qualifiers; 116 117 private final byte separatorByte; 118 119 private int rowKeyColumnIndex; 120 121 private int maxColumnCount; 122 123 // Default value must be negative 124 public static final int DEFAULT_TIMESTAMP_COLUMN_INDEX = -1; 125 126 private int timestampKeyColumnIndex = DEFAULT_TIMESTAMP_COLUMN_INDEX; 127 128 public static final String ROWKEY_COLUMN_SPEC = "HBASE_ROW_KEY"; 129 130 public static final String TIMESTAMPKEY_COLUMN_SPEC = "HBASE_TS_KEY"; 131 132 public static final String ATTRIBUTES_COLUMN_SPEC = "HBASE_ATTRIBUTES_KEY"; 133 134 public static final String CELL_VISIBILITY_COLUMN_SPEC = "HBASE_CELL_VISIBILITY"; 135 136 public static final String CELL_TTL_COLUMN_SPEC = "HBASE_CELL_TTL"; 137 138 private int attrKeyColumnIndex = DEFAULT_ATTRIBUTES_COLUMN_INDEX; 139 140 public static final int DEFAULT_ATTRIBUTES_COLUMN_INDEX = -1; 141 142 public static final int DEFAULT_CELL_VISIBILITY_COLUMN_INDEX = -1; 143 144 public static final int DEFAULT_CELL_TTL_COLUMN_INDEX = -1; 145 146 private int cellVisibilityColumnIndex = DEFAULT_CELL_VISIBILITY_COLUMN_INDEX; 147 148 private int cellTTLColumnIndex = DEFAULT_CELL_TTL_COLUMN_INDEX; 149 150 /** 151 * @param columnsSpecification the list of columns to parser out, comma separated. The row key 152 * should be the special token TsvParser.ROWKEY_COLUMN_SPEC 153 */ 154 public TsvParser(String columnsSpecification, String separatorStr) { 155 // Configure separator 156 byte[] separator = Bytes.toBytes(separatorStr); 157 Preconditions.checkArgument(separator.length == 1, 158 "TsvParser only supports single-byte separators"); 159 separatorByte = separator[0]; 160 161 // Configure columns 162 ArrayList<String> columnStrings = 163 Lists.newArrayList(Splitter.on(',').trimResults().split(columnsSpecification)); 164 165 maxColumnCount = columnStrings.size(); 166 families = new byte[maxColumnCount][]; 167 qualifiers = new byte[maxColumnCount][]; 168 169 for (int i = 0; i < columnStrings.size(); i++) { 170 String str = columnStrings.get(i); 171 if (ROWKEY_COLUMN_SPEC.equals(str)) { 172 rowKeyColumnIndex = i; 173 continue; 174 } 175 if (TIMESTAMPKEY_COLUMN_SPEC.equals(str)) { 176 timestampKeyColumnIndex = i; 177 continue; 178 } 179 if (ATTRIBUTES_COLUMN_SPEC.equals(str)) { 180 attrKeyColumnIndex = i; 181 continue; 182 } 183 if (CELL_VISIBILITY_COLUMN_SPEC.equals(str)) { 184 cellVisibilityColumnIndex = i; 185 continue; 186 } 187 if (CELL_TTL_COLUMN_SPEC.equals(str)) { 188 cellTTLColumnIndex = i; 189 continue; 190 } 191 String[] parts = str.split(":", 2); 192 if (parts.length == 1) { 193 families[i] = Bytes.toBytes(str); 194 qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY; 195 } else { 196 families[i] = Bytes.toBytes(parts[0]); 197 qualifiers[i] = Bytes.toBytes(parts[1]); 198 } 199 } 200 } 201 202 public boolean hasTimestamp() { 203 return timestampKeyColumnIndex != DEFAULT_TIMESTAMP_COLUMN_INDEX; 204 } 205 206 public int getTimestampKeyColumnIndex() { 207 return timestampKeyColumnIndex; 208 } 209 210 public boolean hasAttributes() { 211 return attrKeyColumnIndex != DEFAULT_ATTRIBUTES_COLUMN_INDEX; 212 } 213 214 public boolean hasCellVisibility() { 215 return cellVisibilityColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX; 216 } 217 218 public boolean hasCellTTL() { 219 return cellTTLColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX; 220 } 221 222 public int getAttributesKeyColumnIndex() { 223 return attrKeyColumnIndex; 224 } 225 226 public int getCellVisibilityColumnIndex() { 227 return cellVisibilityColumnIndex; 228 } 229 230 public int getCellTTLColumnIndex() { 231 return cellTTLColumnIndex; 232 } 233 234 public int getRowKeyColumnIndex() { 235 return rowKeyColumnIndex; 236 } 237 238 public byte[] getFamily(int idx) { 239 return families[idx]; 240 } 241 242 public byte[] getQualifier(int idx) { 243 return qualifiers[idx]; 244 } 245 246 public ParsedLine parse(byte[] lineBytes, int length) throws BadTsvLineException { 247 // Enumerate separator offsets 248 ArrayList<Integer> tabOffsets = new ArrayList<>(maxColumnCount); 249 for (int i = 0; i < length; i++) { 250 if (lineBytes[i] == separatorByte) { 251 tabOffsets.add(i); 252 } 253 } 254 if (tabOffsets.isEmpty()) { 255 throw new BadTsvLineException("No delimiter"); 256 } 257 258 tabOffsets.add(length); 259 260 if (tabOffsets.size() > maxColumnCount) { 261 throw new BadTsvLineException("Excessive columns"); 262 } else if (tabOffsets.size() <= getRowKeyColumnIndex()) { 263 throw new BadTsvLineException("No row key"); 264 } else if (hasTimestamp() && tabOffsets.size() <= getTimestampKeyColumnIndex()) { 265 throw new BadTsvLineException("No timestamp"); 266 } else if (hasAttributes() && tabOffsets.size() <= getAttributesKeyColumnIndex()) { 267 throw new BadTsvLineException("No attributes specified"); 268 } else if (hasCellVisibility() && tabOffsets.size() <= getCellVisibilityColumnIndex()) { 269 throw new BadTsvLineException("No cell visibility specified"); 270 } else if (hasCellTTL() && tabOffsets.size() <= getCellTTLColumnIndex()) { 271 throw new BadTsvLineException("No cell TTL specified"); 272 } 273 return new ParsedLine(tabOffsets, lineBytes); 274 } 275 276 class ParsedLine { 277 private final ArrayList<Integer> tabOffsets; 278 private byte[] lineBytes; 279 280 ParsedLine(ArrayList<Integer> tabOffsets, byte[] lineBytes) { 281 this.tabOffsets = tabOffsets; 282 this.lineBytes = lineBytes; 283 } 284 285 public int getRowKeyOffset() { 286 return getColumnOffset(rowKeyColumnIndex); 287 } 288 289 public int getRowKeyLength() { 290 return getColumnLength(rowKeyColumnIndex); 291 } 292 293 public long getTimestamp(long ts) throws BadTsvLineException { 294 // Return ts if HBASE_TS_KEY is not configured in column spec 295 if (!hasTimestamp()) { 296 return ts; 297 } 298 299 String timeStampStr = Bytes.toString(lineBytes, getColumnOffset(timestampKeyColumnIndex), 300 getColumnLength(timestampKeyColumnIndex)); 301 try { 302 return Long.parseLong(timeStampStr); 303 } catch (NumberFormatException nfe) { 304 // treat this record as bad record 305 throw new BadTsvLineException("Invalid timestamp " + timeStampStr); 306 } 307 } 308 309 private String getAttributes() { 310 if (!hasAttributes()) { 311 return null; 312 } else { 313 return Bytes.toString(lineBytes, getColumnOffset(attrKeyColumnIndex), 314 getColumnLength(attrKeyColumnIndex)); 315 } 316 } 317 318 public String[] getIndividualAttributes() { 319 String attributes = getAttributes(); 320 if (attributes != null) { 321 return attributes.split(DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR); 322 } else { 323 return null; 324 } 325 } 326 327 public int getAttributeKeyOffset() { 328 if (hasAttributes()) { 329 return getColumnOffset(attrKeyColumnIndex); 330 } else { 331 return DEFAULT_ATTRIBUTES_COLUMN_INDEX; 332 } 333 } 334 335 public int getAttributeKeyLength() { 336 if (hasAttributes()) { 337 return getColumnLength(attrKeyColumnIndex); 338 } else { 339 return DEFAULT_ATTRIBUTES_COLUMN_INDEX; 340 } 341 } 342 343 public int getCellVisibilityColumnOffset() { 344 if (hasCellVisibility()) { 345 return getColumnOffset(cellVisibilityColumnIndex); 346 } else { 347 return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX; 348 } 349 } 350 351 public int getCellVisibilityColumnLength() { 352 if (hasCellVisibility()) { 353 return getColumnLength(cellVisibilityColumnIndex); 354 } else { 355 return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX; 356 } 357 } 358 359 public String getCellVisibility() { 360 if (!hasCellVisibility()) { 361 return null; 362 } else { 363 return Bytes.toString(lineBytes, getColumnOffset(cellVisibilityColumnIndex), 364 getColumnLength(cellVisibilityColumnIndex)); 365 } 366 } 367 368 public int getCellTTLColumnOffset() { 369 if (hasCellTTL()) { 370 return getColumnOffset(cellTTLColumnIndex); 371 } else { 372 return DEFAULT_CELL_TTL_COLUMN_INDEX; 373 } 374 } 375 376 public int getCellTTLColumnLength() { 377 if (hasCellTTL()) { 378 return getColumnLength(cellTTLColumnIndex); 379 } else { 380 return DEFAULT_CELL_TTL_COLUMN_INDEX; 381 } 382 } 383 384 public long getCellTTL() { 385 if (!hasCellTTL()) { 386 return 0; 387 } else { 388 return Bytes.toLong(lineBytes, getColumnOffset(cellTTLColumnIndex), 389 getColumnLength(cellTTLColumnIndex)); 390 } 391 } 392 393 public int getColumnOffset(int idx) { 394 if (idx > 0) return tabOffsets.get(idx - 1) + 1; 395 else return 0; 396 } 397 398 public int getColumnLength(int idx) { 399 return tabOffsets.get(idx) - getColumnOffset(idx); 400 } 401 402 public int getColumnCount() { 403 return tabOffsets.size(); 404 } 405 406 public byte[] getLineBytes() { 407 return lineBytes; 408 } 409 } 410 411 public static class BadTsvLineException extends Exception { 412 public BadTsvLineException(String err) { 413 super(err); 414 } 415 416 private static final long serialVersionUID = 1L; 417 } 418 419 /** 420 * Return starting position and length of row key from the specified line bytes. 421 * @return Pair of row key offset and length. 422 */ 423 public Pair<Integer, Integer> parseRowKey(byte[] lineBytes, int length) 424 throws BadTsvLineException { 425 int rkColumnIndex = 0; 426 int startPos = 0, endPos = 0; 427 for (int i = 0; i <= length; i++) { 428 if (i == length || lineBytes[i] == separatorByte) { 429 endPos = i - 1; 430 if (rkColumnIndex++ == getRowKeyColumnIndex()) { 431 if ((endPos + 1) == startPos) { 432 throw new BadTsvLineException("Empty value for ROW KEY."); 433 } 434 break; 435 } else { 436 startPos = endPos + 2; 437 } 438 } 439 if (i == length) { 440 throw new BadTsvLineException("Row key does not exist as number of columns in the line" 441 + " are less than row key position."); 442 } 443 } 444 return new Pair<>(startPos, endPos - startPos + 1); 445 } 446 } 447 448 /** 449 * Sets up the actual job. 450 * @param conf The current configuration. 451 * @param args The command line parameters. 452 * @return The newly created job. 453 * @throws IOException When setting up the job fails. 454 */ 455 protected static Job createSubmittableJob(Configuration conf, String[] args) 456 throws IOException, ClassNotFoundException { 457 Job job = null; 458 boolean isDryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false); 459 try (Connection connection = ConnectionFactory.createConnection(conf)) { 460 try (Admin admin = connection.getAdmin()) { 461 // Support non-XML supported characters 462 // by re-encoding the passed separator as a Base64 string. 463 String actualSeparator = conf.get(SEPARATOR_CONF_KEY); 464 if (actualSeparator != null) { 465 conf.set(SEPARATOR_CONF_KEY, 466 Bytes.toString(Base64.getEncoder().encode(Bytes.toBytes(actualSeparator)))); 467 } 468 469 // See if a non-default Mapper was set 470 String mapperClassName = conf.get(MAPPER_CONF_KEY); 471 Class mapperClass = 472 mapperClassName != null ? Class.forName(mapperClassName) : DEFAULT_MAPPER; 473 474 TableName tableName = TableName.valueOf(args[0]); 475 Path inputDir = new Path(args[1]); 476 String jobName = conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName.getNameAsString()); 477 job = Job.getInstance(conf, jobName); 478 job.setJarByClass(mapperClass); 479 FileInputFormat.setInputPaths(job, inputDir); 480 job.setInputFormatClass(TextInputFormat.class); 481 job.setMapperClass(mapperClass); 482 job.setMapOutputKeyClass(ImmutableBytesWritable.class); 483 String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY); 484 String[] columns = conf.getStrings(COLUMNS_CONF_KEY); 485 if (StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) { 486 String fileLoc = conf.get(CREDENTIALS_LOCATION); 487 Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf); 488 job.getCredentials().addAll(cred); 489 } 490 491 if (hfileOutPath != null) { 492 if (!admin.tableExists(tableName)) { 493 LOG.warn(format("Table '%s' does not exist.", tableName)); 494 if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) { 495 // TODO: this is backwards. Instead of depending on the existence of a table, 496 // create a sane splits file for HFileOutputFormat based on data sampling. 497 createTable(admin, tableName, columns); 498 if (isDryRun) { 499 LOG.warn("Dry run: Table will be deleted at end of dry run."); 500 synchronized (ImportTsv.class) { 501 DRY_RUN_TABLE_CREATED = true; 502 } 503 } 504 } else { 505 String errorMsg = format("Table '%s' does not exist and '%s' is set to no.", 506 tableName, CREATE_TABLE_CONF_KEY); 507 LOG.error(errorMsg); 508 throw new TableNotFoundException(errorMsg); 509 } 510 } 511 try (Table table = connection.getTable(tableName); 512 RegionLocator regionLocator = connection.getRegionLocator(tableName)) { 513 boolean noStrict = conf.getBoolean(NO_STRICT_COL_FAMILY, false); 514 // if no.strict is false then check column family 515 if (!noStrict) { 516 ArrayList<String> unmatchedFamilies = new ArrayList<>(); 517 Set<String> cfSet = getColumnFamilies(columns); 518 TableDescriptor tDesc = table.getDescriptor(); 519 for (String cf : cfSet) { 520 if (!tDesc.hasColumnFamily(Bytes.toBytes(cf))) { 521 unmatchedFamilies.add(cf); 522 } 523 } 524 if (unmatchedFamilies.size() > 0) { 525 ArrayList<String> familyNames = new ArrayList<>(); 526 for (ColumnFamilyDescriptor family : table.getDescriptor().getColumnFamilies()) { 527 familyNames.add(family.getNameAsString()); 528 } 529 String msg = "Column Families " + unmatchedFamilies + " specified in " 530 + COLUMNS_CONF_KEY + " does not match with any of the table " + tableName 531 + " column families " + familyNames + ".\n" 532 + "To disable column family check, use -D" + NO_STRICT_COL_FAMILY + "=true.\n"; 533 usage(msg); 534 System.exit(-1); 535 } 536 } 537 if (mapperClass.equals(TsvImporterTextMapper.class)) { 538 job.setMapOutputValueClass(Text.class); 539 job.setReducerClass(TextSortReducer.class); 540 } else { 541 job.setMapOutputValueClass(Put.class); 542 job.setCombinerClass(PutCombiner.class); 543 job.setReducerClass(PutSortReducer.class); 544 } 545 if (!isDryRun) { 546 Path outputDir = new Path(hfileOutPath); 547 FileOutputFormat.setOutputPath(job, outputDir); 548 HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(), 549 regionLocator); 550 } 551 } 552 } else { 553 if (!admin.tableExists(tableName)) { 554 String errorMsg = format("Table '%s' does not exist.", tableName); 555 LOG.error(errorMsg); 556 throw new TableNotFoundException(errorMsg); 557 } 558 try (Table table = connection.getTable(tableName)) { 559 ArrayList<String> unmatchedFamilies = new ArrayList<>(); 560 Set<String> cfSet = getColumnFamilies(columns); 561 TableDescriptor tDesc = table.getDescriptor(); 562 for (String cf : cfSet) { 563 if (!tDesc.hasColumnFamily(Bytes.toBytes(cf))) { 564 unmatchedFamilies.add(cf); 565 } 566 } 567 if (unmatchedFamilies.size() > 0) { 568 String noSuchColumnFamiliesMsg = 569 format("Column families: %s do not exist.", unmatchedFamilies); 570 LOG.error(noSuchColumnFamiliesMsg); 571 throw new NoSuchColumnFamilyException(noSuchColumnFamiliesMsg); 572 } 573 } 574 if (mapperClass.equals(TsvImporterTextMapper.class)) { 575 usage(TsvImporterTextMapper.class.toString() 576 + " should not be used for non bulkloading case. use " 577 + TsvImporterMapper.class.toString() + " or custom mapper whose value type is Put."); 578 System.exit(-1); 579 } 580 if (!isDryRun) { 581 // No reducers. Just write straight to table. Call initTableReducerJob 582 // to set up the TableOutputFormat. 583 TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job); 584 } 585 job.setNumReduceTasks(0); 586 } 587 if (isDryRun) { 588 job.setOutputFormatClass(NullOutputFormat.class); 589 job.getConfiguration().setStrings("io.serializations", 590 job.getConfiguration().get("io.serializations"), MutationSerialization.class.getName(), 591 ResultSerialization.class.getName(), CellSerialization.class.getName()); 592 } 593 TableMapReduceUtil.addDependencyJars(job); 594 TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), 595 org.apache.hbase.thirdparty.com.google.common.base.Function.class /* 596 * Guava used by 597 * TsvParser 598 */); 599 } 600 } 601 return job; 602 } 603 604 private static void createTable(Admin admin, TableName tableName, String[] columns) 605 throws IOException { 606 TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); 607 Set<String> cfSet = getColumnFamilies(columns); 608 for (String cf : cfSet) { 609 builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(cf)); 610 } 611 LOG.warn( 612 format("Creating table '%s' with '%s' columns and default descriptors.", tableName, cfSet)); 613 admin.createTable(builder.build()); 614 } 615 616 private static void deleteTable(Configuration conf, String[] args) { 617 TableName tableName = TableName.valueOf(args[0]); 618 try (Connection connection = ConnectionFactory.createConnection(conf); 619 Admin admin = connection.getAdmin()) { 620 try { 621 admin.disableTable(tableName); 622 } catch (TableNotEnabledException e) { 623 LOG.debug("Dry mode: Table: " + tableName + " already disabled, so just deleting it."); 624 } 625 admin.deleteTable(tableName); 626 } catch (IOException e) { 627 LOG.error(format("***Dry run: Failed to delete table '%s'.***%n%s", tableName, e.toString())); 628 return; 629 } 630 LOG.info(format("Dry run: Deleted table '%s'.", tableName)); 631 } 632 633 private static Set<String> getColumnFamilies(String[] columns) { 634 Set<String> cfSet = new HashSet<>(); 635 for (String aColumn : columns) { 636 if ( 637 TsvParser.ROWKEY_COLUMN_SPEC.equals(aColumn) 638 || TsvParser.TIMESTAMPKEY_COLUMN_SPEC.equals(aColumn) 639 || TsvParser.CELL_VISIBILITY_COLUMN_SPEC.equals(aColumn) 640 || TsvParser.CELL_TTL_COLUMN_SPEC.equals(aColumn) 641 || TsvParser.ATTRIBUTES_COLUMN_SPEC.equals(aColumn) 642 ) continue; 643 // we are only concerned with the first one (in case this is a cf:cq) 644 cfSet.add(aColumn.split(":", 2)[0]); 645 } 646 return cfSet; 647 } 648 649 /* 650 * @param errorMsg Error message. Can be null. 651 */ 652 private static void usage(final String errorMsg) { 653 if (errorMsg != null && errorMsg.length() > 0) { 654 System.err.println("ERROR: " + errorMsg); 655 } 656 String usage = "Usage: " + NAME + " -D" + COLUMNS_CONF_KEY + "=a,b,c <tablename> <inputdir>\n" 657 + "\n" + "Imports the given input directory of TSV data into the specified table.\n" + "\n" 658 + "The column names of the TSV data must be specified using the -D" + COLUMNS_CONF_KEY + "\n" 659 + "option. This option takes the form of comma-separated column names, where each\n" 660 + "column name is either a simple column family, or a columnfamily:qualifier. The special\n" 661 + "column name " + TsvParser.ROWKEY_COLUMN_SPEC 662 + " is used to designate that this column should be used\n" 663 + "as the row key for each imported record. You must specify exactly one column\n" 664 + "to be the row key, and you must specify a column name for every column that exists in the\n" 665 + "input data. Another special column" + TsvParser.TIMESTAMPKEY_COLUMN_SPEC 666 + " designates that this column should be\n" + "used as timestamp for each record. Unlike " 667 + TsvParser.ROWKEY_COLUMN_SPEC + ", " + TsvParser.TIMESTAMPKEY_COLUMN_SPEC + " is optional." 668 + "\n" + "You must specify at most one column as timestamp key for each imported record.\n" 669 + "Record with invalid timestamps (blank, non-numeric) will be treated as bad record.\n" 670 + "Note: if you use this option, then '" + TIMESTAMP_CONF_KEY + "' option will be ignored.\n" 671 + "\n" + "Other special columns that can be specified are " + TsvParser.CELL_TTL_COLUMN_SPEC 672 + " and " + TsvParser.CELL_VISIBILITY_COLUMN_SPEC + ".\n" + TsvParser.CELL_TTL_COLUMN_SPEC 673 + " designates that this column will be used " + "as a Cell's Time To Live (TTL) attribute.\n" 674 + TsvParser.CELL_VISIBILITY_COLUMN_SPEC + " designates that this column contains the " 675 + "visibility label expression.\n" + "\n" + TsvParser.ATTRIBUTES_COLUMN_SPEC 676 + " can be used to specify Operation Attributes per record.\n" 677 + " Should be specified as key=>value where " + TsvParser.DEFAULT_ATTRIBUTES_COLUMN_INDEX 678 + " is used \n" 679 + " as the seperator. Note that more than one OperationAttributes can be specified.\n" 680 + "By default importtsv will load data directly into HBase. To instead generate\n" 681 + "HFiles of data to prepare for a bulk data load, pass the option:\n" + " -D" 682 + BULK_OUTPUT_CONF_KEY + "=/path/for/output\n" 683 + " Note: if you do not use this option, then the target table must already exist in HBase\n" 684 + "\n" + "Other options that may be specified with -D include:\n" + " -D" + DRY_RUN_CONF_KEY 685 + "=true - Dry run mode. Data is not actually populated into" 686 + " table. If table does not exist, it is created but deleted in the end.\n" + " -D" 687 + SKIP_LINES_CONF_KEY + "=false - fail if encountering an invalid line\n" + " -D" 688 + LOG_BAD_LINES_CONF_KEY + "=true - logs invalid lines to stderr\n" + " -D" 689 + SKIP_EMPTY_COLUMNS + "=false - If true then skip empty columns in bulk import\n" + " '-D" 690 + SEPARATOR_CONF_KEY + "=|' - eg separate on pipes instead of tabs\n" + " -D" 691 + TIMESTAMP_CONF_KEY + "=currentTimeAsLong - use the specified timestamp for the import\n" 692 + " -D" + MAPPER_CONF_KEY + "=my.Mapper - A user-defined Mapper to use instead of " 693 + DEFAULT_MAPPER.getName() + "\n" + " -D" + JOB_NAME_CONF_KEY 694 + "=jobName - use the specified mapreduce job name for the import\n" + " -D" 695 + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by this tool\n" 696 + " Note: if you set this to 'no', then the target table must already exist in HBase\n" 697 + " -D" + NO_STRICT_COL_FAMILY + "=true - ignore column family check in hbase table. " 698 + "Default is false\n\n" + "For performance consider the following options:\n" 699 + " -Dmapreduce.map.speculative=false\n" + " -Dmapreduce.reduce.speculative=false"; 700 701 System.err.println(usage); 702 } 703 704 @Override 705 public int run(String[] args) throws Exception { 706 if (args.length < 2) { 707 usage("Wrong number of arguments: " + args.length); 708 return -1; 709 } 710 711 // When MAPPER_CONF_KEY is null, the user wants to use the provided TsvImporterMapper, so 712 // perform validation on these additional args. When it's not null, user has provided their 713 // own mapper, thus these validation are not relevant. 714 // TODO: validation for TsvImporterMapper, not this tool. Move elsewhere. 715 if (null == getConf().get(MAPPER_CONF_KEY)) { 716 // Make sure columns are specified 717 String[] columns = getConf().getStrings(COLUMNS_CONF_KEY); 718 if (columns == null) { 719 usage("No columns specified. Please specify with -D" + COLUMNS_CONF_KEY + "=..."); 720 return -1; 721 } 722 723 // Make sure they specify exactly one column as the row key 724 int rowkeysFound = 0; 725 for (String col : columns) { 726 if (col.equals(TsvParser.ROWKEY_COLUMN_SPEC)) rowkeysFound++; 727 } 728 if (rowkeysFound != 1) { 729 usage("Must specify exactly one column as " + TsvParser.ROWKEY_COLUMN_SPEC); 730 return -1; 731 } 732 733 // Make sure we have at most one column as the timestamp key 734 int tskeysFound = 0; 735 for (String col : columns) { 736 if (col.equals(TsvParser.TIMESTAMPKEY_COLUMN_SPEC)) tskeysFound++; 737 } 738 if (tskeysFound > 1) { 739 usage("Must specify at most one column as " + TsvParser.TIMESTAMPKEY_COLUMN_SPEC); 740 return -1; 741 } 742 743 int attrKeysFound = 0; 744 for (String col : columns) { 745 if (col.equals(TsvParser.ATTRIBUTES_COLUMN_SPEC)) attrKeysFound++; 746 } 747 if (attrKeysFound > 1) { 748 usage("Must specify at most one column as " + TsvParser.ATTRIBUTES_COLUMN_SPEC); 749 return -1; 750 } 751 752 // Make sure one or more columns are specified excluding rowkey and 753 // timestamp key 754 if (columns.length - (rowkeysFound + tskeysFound + attrKeysFound) < 1) { 755 usage( 756 "One or more columns in addition to the row key and timestamp(optional) are required"); 757 return -1; 758 } 759 } 760 761 // If timestamp option is not specified, use current system time. 762 long timstamp = getConf().getLong(TIMESTAMP_CONF_KEY, EnvironmentEdgeManager.currentTime()); 763 764 // Set it back to replace invalid timestamp (non-numeric) with current 765 // system time 766 getConf().setLong(TIMESTAMP_CONF_KEY, timstamp); 767 768 synchronized (ImportTsv.class) { 769 DRY_RUN_TABLE_CREATED = false; 770 } 771 Job job = createSubmittableJob(getConf(), args); 772 boolean success = job.waitForCompletion(true); 773 boolean delete = false; 774 synchronized (ImportTsv.class) { 775 delete = DRY_RUN_TABLE_CREATED; 776 } 777 if (delete) { 778 deleteTable(getConf(), args); 779 } 780 return success ? 0 : 1; 781 } 782 783 public static void main(String[] args) throws Exception { 784 int status = ToolRunner.run(HBaseConfiguration.create(), new ImportTsv(), args); 785 System.exit(status); 786 } 787}