@InterfaceAudience.Private public final class BackupUtils extends Object
Modifier and Type | Field and Description |
---|---|
private static org.slf4j.Logger |
LOG |
static String |
LOGNAME_SEPARATOR |
static int |
MILLISEC_IN_HOUR |
Modifier | Constructor and Description |
---|---|
private |
BackupUtils() |
Modifier and Type | Method and Description |
---|---|
static boolean |
checkPathExist(String backupStr,
org.apache.hadoop.conf.Configuration conf)
Check whether the backup path exist
|
static void |
checkTargetDir(String backupRootPath,
org.apache.hadoop.conf.Configuration conf)
Check target path first, confirm it doesn't exist before backup
|
static void |
cleanupBackupData(BackupInfo context,
org.apache.hadoop.conf.Configuration conf) |
private static void |
cleanupHLogDir(BackupInfo backupInfo,
org.apache.hadoop.conf.Configuration conf)
Clean up directories which are generated when DistCp copying hlogs
|
private static void |
cleanupTargetDir(BackupInfo backupInfo,
org.apache.hadoop.conf.Configuration conf) |
static void |
copyTableRegionInfo(Connection conn,
BackupInfo backupInfo,
org.apache.hadoop.conf.Configuration conf)
copy out Table RegionInfo into incremental backup image need to consider move this logic into
HBackupFileSystem
|
static BulkLoadHFiles |
createLoader(org.apache.hadoop.conf.Configuration config) |
static RestoreRequest |
createRestoreRequest(String backupRootDir,
String backupId,
boolean check,
TableName[] fromTables,
TableName[] toTables,
boolean isOverwrite)
Create restore request.
|
static boolean |
failed(int result) |
static String |
findMostRecentBackupId(String[] backupIds) |
static org.apache.hadoop.fs.Path |
getBulkOutputDir(org.apache.hadoop.fs.Path restoreRootDir,
String tableName,
org.apache.hadoop.conf.Configuration conf) |
static org.apache.hadoop.fs.Path |
getBulkOutputDir(org.apache.hadoop.fs.Path restoreRootDir,
String tableName,
org.apache.hadoop.conf.Configuration conf,
boolean deleteOnExit) |
static org.apache.hadoop.fs.Path |
getBulkOutputDir(String tableName,
org.apache.hadoop.conf.Configuration conf,
boolean deleteOnExit) |
static Long |
getCreationTime(org.apache.hadoop.fs.Path p)
Given the log file, parse the timestamp from the file name.
|
static String |
getFileNameCompatibleString(TableName table) |
static List<String> |
getFiles(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path rootDir,
List<String> files,
org.apache.hadoop.fs.PathFilter filter) |
static long |
getFilesLength(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path dir)
Get the total length of files under the given directory recursively.
|
static List<BackupInfo> |
getHistory(org.apache.hadoop.conf.Configuration conf,
int n,
org.apache.hadoop.fs.Path backupRootPath,
BackupInfo.Filter... filters) |
private static List<BackupInfo> |
getHistory(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.fs.Path backupRootPath) |
static String |
getLogBackupDir(String backupRootDir,
String backupId)
Given the backup root dir and the backup id, return the log file location for an incremental
backup.
|
static <T> Long |
getMinValue(Map<T,Long> map)
Get the min value for all the Values a map.
|
static String |
getPath(org.apache.hadoop.fs.Path p)
Return the 'path' component of a Path.
|
static Map<String,Long> |
getRSLogTimestampMins(Map<TableName,Map<String,Long>> rsLogTimestampMap)
Loop through the RS log timestamp map for the tables, for each RS, find the min timestamp value
for the RS among the tables.
|
static String |
getTableBackupDir(String backupRootDir,
String backupId,
TableName tableName)
Given the backup root dir, backup id and the table name, return the backup image location,
which is also where the backup manifest file is.
|
static org.apache.hadoop.fs.Path |
getTmpRestoreOutputDir(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.conf.Configuration conf)
Build temporary output path
|
static String |
getUniqueWALFileNamePart(org.apache.hadoop.fs.Path p)
Returns WAL file name
|
static String |
getUniqueWALFileNamePart(String walFileName)
Returns WAL file name
|
static List<String> |
getWALFilesOlderThan(org.apache.hadoop.conf.Configuration c,
HashMap<String,Long> hostTimestampMap)
Get list of all old WAL files (WALs and archive)
|
static org.apache.hadoop.fs.FileStatus[] |
listStatus(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path dir,
org.apache.hadoop.fs.PathFilter filter)
Calls fs.listStatus() and treats FileNotFoundException as non-fatal This accommodates
differences between hadoop versions, where hadoop 1 does not throw a FileNotFoundException, and
return an empty FileStatus[] while Hadoop 2 will throw FileNotFoundException.
|
static BackupInfo |
loadBackupInfo(org.apache.hadoop.fs.Path backupRootPath,
String backupId,
org.apache.hadoop.fs.FileSystem fs) |
static String |
parseHostFromOldLog(org.apache.hadoop.fs.Path p)
Parses host name:port from archived WAL path
|
static String |
parseHostNameFromLogFile(org.apache.hadoop.fs.Path p)
Parses hostname:port from WAL file path
|
static TableName[] |
parseTableNames(String tables) |
static ArrayList<BackupInfo> |
sortHistoryListDesc(ArrayList<BackupInfo> historyList)
Sort history list by start time in descending order.
|
static boolean |
succeeded(int result) |
static boolean |
validate(HashMap<TableName,BackupManifest> backupManifestMap,
org.apache.hadoop.conf.Configuration conf) |
static void |
writeRegioninfoOnFilesystem(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path regionInfoDir,
RegionInfo regionInfo)
Write the .regioninfo file on-disk.
|
private static final org.slf4j.Logger LOG
public static final String LOGNAME_SEPARATOR
public static final int MILLISEC_IN_HOUR
private BackupUtils()
public static Map<String,Long> getRSLogTimestampMins(Map<TableName,Map<String,Long>> rsLogTimestampMap)
rsLogTimestampMap
- timestamp mappublic static void copyTableRegionInfo(Connection conn, BackupInfo backupInfo, org.apache.hadoop.conf.Configuration conf) throws IOException
conn
- connectionbackupInfo
- backup infoconf
- configurationIOException
- exceptionpublic static void writeRegioninfoOnFilesystem(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path regionInfoDir, RegionInfo regionInfo) throws IOException
IOException
public static String parseHostNameFromLogFile(org.apache.hadoop.fs.Path p)
p
- path to WAL filepublic static String getUniqueWALFileNamePart(String walFileName)
walFileName
- WAL file namepublic static String getUniqueWALFileNamePart(org.apache.hadoop.fs.Path p)
p
- WAL file pathpublic static long getFilesLength(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path dir) throws IOException
fs
- The hadoop file systemdir
- The target directoryIOException
- exceptionpublic static List<String> getWALFilesOlderThan(org.apache.hadoop.conf.Configuration c, HashMap<String,Long> hostTimestampMap) throws IOException
c
- configurationhostTimestampMap
- {host,timestamp} mapIOException
- exceptionpublic static TableName[] parseTableNames(String tables)
public static boolean checkPathExist(String backupStr, org.apache.hadoop.conf.Configuration conf) throws IOException
backupStr
- backupconf
- configurationIOException
- exceptionpublic static void checkTargetDir(String backupRootPath, org.apache.hadoop.conf.Configuration conf) throws IOException
backupRootPath
- backup destination pathconf
- configurationIOException
- exceptionpublic static <T> Long getMinValue(Map<T,Long> map)
map
- mappublic static String parseHostFromOldLog(org.apache.hadoop.fs.Path p)
p
- pathpublic static Long getCreationTime(org.apache.hadoop.fs.Path p) throws IOException
p
- a path to the log fileIOException
- exceptionpublic static List<String> getFiles(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path rootDir, List<String> files, org.apache.hadoop.fs.PathFilter filter) throws IOException
IOException
public static void cleanupBackupData(BackupInfo context, org.apache.hadoop.conf.Configuration conf) throws IOException
IOException
private static void cleanupHLogDir(BackupInfo backupInfo, org.apache.hadoop.conf.Configuration conf) throws IOException
backupInfo
- backup infoconf
- configurationIOException
- exceptionprivate static void cleanupTargetDir(BackupInfo backupInfo, org.apache.hadoop.conf.Configuration conf)
public static String getTableBackupDir(String backupRootDir, String backupId, TableName tableName)
backupRootDir
- backup root directorybackupId
- backup idtableName
- table namepublic static ArrayList<BackupInfo> sortHistoryListDesc(ArrayList<BackupInfo> historyList)
historyList
- history listpublic static org.apache.hadoop.fs.FileStatus[] listStatus(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path dir, org.apache.hadoop.fs.PathFilter filter) throws IOException
fs
- file systemdir
- directoryfilter
- path filterIOException
public static String getPath(org.apache.hadoop.fs.Path p)
hdfs://example.org:9000/hbase_trunk/TestTable/compaction.dir
, this method returns
/hbase_trunk/TestTable/compaction.dir
. This method is useful if you want to print
out a Path without qualifying Filesystem instance.p
- file system Path whose 'path' component we are to return.public static String getLogBackupDir(String backupRootDir, String backupId)
backupRootDir
- backup root directorybackupId
- backup idprivate static List<BackupInfo> getHistory(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.fs.Path backupRootPath) throws IOException
IOException
public static List<BackupInfo> getHistory(org.apache.hadoop.conf.Configuration conf, int n, org.apache.hadoop.fs.Path backupRootPath, BackupInfo.Filter... filters) throws IOException
IOException
public static BackupInfo loadBackupInfo(org.apache.hadoop.fs.Path backupRootPath, String backupId, org.apache.hadoop.fs.FileSystem fs) throws IOException
IOException
public static RestoreRequest createRestoreRequest(String backupRootDir, String backupId, boolean check, TableName[] fromTables, TableName[] toTables, boolean isOverwrite)
backupRootDir
- backup root dirbackupId
- backup idcheck
- check onlyfromTables
- table list fromtoTables
- table list toisOverwrite
- overwrite datapublic static boolean validate(HashMap<TableName,BackupManifest> backupManifestMap, org.apache.hadoop.conf.Configuration conf) throws IOException
IOException
public static org.apache.hadoop.fs.Path getBulkOutputDir(org.apache.hadoop.fs.Path restoreRootDir, String tableName, org.apache.hadoop.conf.Configuration conf, boolean deleteOnExit) throws IOException
IOException
public static org.apache.hadoop.fs.Path getBulkOutputDir(org.apache.hadoop.fs.Path restoreRootDir, String tableName, org.apache.hadoop.conf.Configuration conf) throws IOException
IOException
public static org.apache.hadoop.fs.Path getBulkOutputDir(String tableName, org.apache.hadoop.conf.Configuration conf, boolean deleteOnExit) throws IOException
IOException
public static org.apache.hadoop.fs.Path getTmpRestoreOutputDir(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.conf.Configuration conf)
fs
- filesystem for default output dirconf
- configurationpublic static String getFileNameCompatibleString(TableName table)
public static boolean failed(int result)
public static boolean succeeded(int result)
public static BulkLoadHFiles createLoader(org.apache.hadoop.conf.Configuration config)
public static String findMostRecentBackupId(String[] backupIds)
Copyright © 2007–2020 The Apache Software Foundation. All rights reserved.