001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.util; 019 020import java.io.IOException; 021import org.apache.hadoop.conf.Configuration; 022import org.apache.hadoop.fs.Path; 023import org.apache.hadoop.hbase.HConstants; 024import org.apache.hadoop.hbase.TableName; 025import org.apache.hadoop.hbase.client.RegionInfo; 026import org.apache.hadoop.hbase.regionserver.HRegion; 027import org.apache.hadoop.hbase.regionserver.HStore; 028import org.apache.yetus.audience.InterfaceAudience; 029 030/** 031 * Helper class for all utilities related to archival/retrieval of HFiles 032 */ 033@InterfaceAudience.Private 034public final class HFileArchiveUtil { 035 private HFileArchiveUtil() { 036 // non-external instantiation - util class 037 } 038 039 /** 040 * Get the directory to archive a store directory 041 * @param conf {@link Configuration} to read for the archive directory name 042 * @param tableName table name under which the store currently lives 043 * @param regionName region encoded name under which the store currently lives 044 * @param familyName name of the family in the store 045 * @return {@link Path} to the directory to archive the given store or <tt>null</tt> if it should 046 * not be archived 047 */ 048 public static Path getStoreArchivePath(final Configuration conf, final TableName tableName, 049 final String regionName, final String familyName) throws IOException { 050 Path tableArchiveDir = getTableArchivePath(conf, tableName); 051 return HStore.getStoreHomedir(tableArchiveDir, regionName, Bytes.toBytes(familyName)); 052 } 053 054 /** 055 * Get the directory to archive a store directory 056 * @param conf {@link Configuration} to read for the archive directory name. 057 * @param region parent region information under which the store currently lives 058 * @param tabledir directory for the table under which the store currently lives 059 * @param family name of the family in the store 060 * @return {@link Path} to the directory to archive the given store or <tt>null</tt> if it should 061 * not be archived 062 */ 063 public static Path getStoreArchivePath(Configuration conf, RegionInfo region, Path tabledir, 064 byte[] family) throws IOException { 065 return getStoreArchivePath(conf, region, family); 066 } 067 068 /** 069 * Gets the directory to archive a store directory. 070 * @param conf {@link Configuration} to read for the archive directory name. 071 * @param region parent region information under which the store currently lives 072 * @param family name of the family in the store 073 * @return {@link Path} to the directory to archive the given store or <tt>null</tt> if it should 074 * not be archived 075 */ 076 public static Path getStoreArchivePath(Configuration conf, RegionInfo region, byte[] family) 077 throws IOException { 078 Path rootDir = CommonFSUtils.getRootDir(conf); 079 Path tableArchiveDir = getTableArchivePath(rootDir, region.getTable()); 080 return HStore.getStoreHomedir(tableArchiveDir, region, family); 081 } 082 083 /** 084 * Gets the archive directory under specified root dir. One scenario where this is useful is when 085 * WAL and root dir are configured under different file systems, i.e. root dir on S3 and WALs on 086 * HDFS. This is mostly useful for archiving recovered edits, when 087 * <b>hbase.region.archive.recovered.edits</b> is enabled. 088 * @param rootDir {@link Path} the root dir under which archive path should be created. 089 * @param region parent region information under which the store currently lives 090 * @param family name of the family in the store 091 * @return {@link Path} to the WAL FS directory to archive the given store or <tt>null</tt> if it 092 * should not be archived 093 */ 094 public static Path getStoreArchivePathForRootDir(Path rootDir, RegionInfo region, byte[] family) { 095 Path tableArchiveDir = getTableArchivePath(rootDir, region.getTable()); 096 return HStore.getStoreHomedir(tableArchiveDir, region, family); 097 } 098 099 public static Path getStoreArchivePathForArchivePath(Path archivePath, RegionInfo region, 100 byte[] family) { 101 Path tableArchiveDir = CommonFSUtils.getTableDir(archivePath, region.getTable()); 102 return HStore.getStoreHomedir(tableArchiveDir, region, family); 103 } 104 105 /** 106 * Get the archive directory for a given region under the specified table 107 * @param tableName the table name. Cannot be null. 108 * @param regiondir the path to the region directory. Cannot be null. 109 * @return {@link Path} to the directory to archive the given region, or <tt>null</tt> if it 110 * should not be archived 111 */ 112 public static Path getRegionArchiveDir(Path rootDir, TableName tableName, Path regiondir) { 113 // get the archive directory for a table 114 Path archiveDir = getTableArchivePath(rootDir, tableName); 115 116 // then add on the region path under the archive 117 String encodedRegionName = regiondir.getName(); 118 return HRegion.getRegionDir(archiveDir, encodedRegionName); 119 } 120 121 /** 122 * Get the archive directory for a given region under the specified table 123 * @param rootDir {@link Path} to the root directory where hbase files are stored (for building 124 * the archive path) 125 * @param tableName name of the table to archive. Cannot be null. 126 * @return {@link Path} to the directory to archive the given region, or <tt>null</tt> if it 127 * should not be archived 128 */ 129 public static Path getRegionArchiveDir(Path rootDir, TableName tableName, 130 String encodedRegionName) { 131 // get the archive directory for a table 132 Path archiveDir = getTableArchivePath(rootDir, tableName); 133 return HRegion.getRegionDir(archiveDir, encodedRegionName); 134 } 135 136 /** 137 * Get the path to the table archive directory based on the configured archive directory. 138 * <p> 139 * Get the path to the table's archive directory. 140 * <p> 141 * Generally of the form: /hbase/.archive/[tablename] 142 * @param rootdir {@link Path} to the root directory where hbase files are stored (for building 143 * the archive path) 144 * @param tableName Name of the table to be archived. Cannot be null. 145 * @return {@link Path} to the archive directory for the table 146 */ 147 public static Path getTableArchivePath(final Path rootdir, final TableName tableName) { 148 return CommonFSUtils.getTableDir(getArchivePath(rootdir), tableName); 149 } 150 151 /** 152 * Get the path to the table archive directory based on the configured archive directory. 153 * <p> 154 * Assumed that the table should already be archived. 155 * @param conf {@link Configuration} to read the archive directory property. Can be null 156 * @param tableName Name of the table to be archived. Cannot be null. 157 * @return {@link Path} to the archive directory for the table 158 */ 159 public static Path getTableArchivePath(final Configuration conf, final TableName tableName) 160 throws IOException { 161 return CommonFSUtils.getTableDir(getArchivePath(conf), tableName); 162 } 163 164 /** 165 * Get the full path to the archive directory on the configured 166 * {@link org.apache.hadoop.hbase.master.MasterFileSystem} 167 * @param conf to look for archive directory name and root directory. Cannot be null. Notes for 168 * testing: requires a FileSystem root directory to be specified. 169 * @return the full {@link Path} to the archive directory, as defined by the configuration 170 * @throws IOException if an unexpected error occurs 171 */ 172 public static Path getArchivePath(Configuration conf) throws IOException { 173 return getArchivePath(CommonFSUtils.getRootDir(conf)); 174 } 175 176 /** 177 * Get the full path to the archive directory on the configured 178 * {@link org.apache.hadoop.hbase.master.MasterFileSystem} 179 * @param rootdir {@link Path} to the root directory where hbase files are stored (for building 180 * the archive path) 181 * @return the full {@link Path} to the archive directory, as defined by the configuration 182 */ 183 private static Path getArchivePath(final Path rootdir) { 184 return new Path(rootdir, HConstants.HFILE_ARCHIVE_DIRECTORY); 185 } 186 187 /* 188 * @return table name given archive file path 189 */ 190 public static TableName getTableName(Path archivePath) { 191 Path p = archivePath; 192 String tbl = null; 193 // namespace is the 4th parent of file 194 for (int i = 0; i < 5; i++) { 195 if (p == null) return null; 196 if (i == 3) tbl = p.getName(); 197 p = p.getParent(); 198 } 199 if (p == null) return null; 200 return TableName.valueOf(p.getName(), tbl); 201 } 202}