001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.util;
019
020import java.io.IOException;
021
022import org.apache.hadoop.conf.Configuration;
023import org.apache.hadoop.fs.Path;
024import org.apache.hadoop.hbase.HConstants;
025import org.apache.hadoop.hbase.TableName;
026import org.apache.hadoop.hbase.client.RegionInfo;
027import org.apache.hadoop.hbase.regionserver.HRegion;
028import org.apache.hadoop.hbase.regionserver.HStore;
029import org.apache.yetus.audience.InterfaceAudience;
030
031/**
032 * Helper class for all utilities related to archival/retrieval of HFiles
033 */
034@InterfaceAudience.Private
035public final class HFileArchiveUtil {
036  private HFileArchiveUtil() {
037    // non-external instantiation - util class
038  }
039
040  /**
041   * Get the directory to archive a store directory
042   * @param conf {@link Configuration} to read for the archive directory name
043   * @param tableName table name under which the store currently lives
044   * @param regionName region encoded name under which the store currently lives
045   * @param familyName name of the family in the store
046   * @return {@link Path} to the directory to archive the given store or
047   *         <tt>null</tt> if it should not be archived
048   */
049  public static Path getStoreArchivePath(final Configuration conf,
050                                         final TableName tableName,
051      final String regionName, final String familyName) throws IOException {
052    Path tableArchiveDir = getTableArchivePath(conf, tableName);
053    return HStore.getStoreHomedir(tableArchiveDir, regionName, Bytes.toBytes(familyName));
054  }
055
056  /**
057   * Get the directory to archive a store directory
058   * @param conf {@link Configuration} to read for the archive directory name.
059   * @param region parent region information under which the store currently lives
060   * @param tabledir directory for the table under which the store currently lives
061   * @param family name of the family in the store
062   * @return {@link Path} to the directory to archive the given store or <tt>null</tt> if it should
063   *         not be archived
064   */
065  public static Path getStoreArchivePath(Configuration conf,
066                                         RegionInfo region,
067                                         Path tabledir,
068      byte[] family) throws IOException {
069    return getStoreArchivePath(conf, region, family);
070  }
071
072  /**
073   * Gets the directory to archive a store directory.
074   * @param conf {@link Configuration} to read for the archive directory name.
075   * @param region parent region information under which the store currently lives
076   * @param family name of the family in the store
077   * @return {@link Path} to the directory to archive the given store or <tt>null</tt> if it should
078   *         not be archived
079   */
080  public static Path getStoreArchivePath(Configuration conf,
081                                         RegionInfo region,
082      byte[] family) throws IOException {
083    Path rootDir = FSUtils.getRootDir(conf);
084    Path tableArchiveDir = getTableArchivePath(rootDir, region.getTable());
085    return HStore.getStoreHomedir(tableArchiveDir, region, family);
086  }
087
088  /**
089   * Gets the archive directory under specified root dir. One scenario where this is useful is
090   * when WAL and root dir are configured under different file systems,
091   * i.e. root dir on S3 and WALs on HDFS.
092   * This is mostly useful for archiving recovered edits, when
093   * <b>hbase.region.archive.recovered.edits</b> is enabled.
094   * @param rootDir {@link Path} the root dir under which archive path should be created.
095   * @param region parent region information under which the store currently lives
096   * @param family name of the family in the store
097   * @return {@link Path} to the WAL FS directory to archive the given store
098   *         or <tt>null</tt> if it should not be archived
099   */
100  public static Path getStoreArchivePathForRootDir(Path rootDir, RegionInfo region, byte[] family) {
101    Path tableArchiveDir = getTableArchivePath(rootDir, region.getTable());
102    return HStore.getStoreHomedir(tableArchiveDir, region, family);
103  }
104
105  /**
106   * Get the archive directory for a given region under the specified table
107   * @param tableName the table name. Cannot be null.
108   * @param regiondir the path to the region directory. Cannot be null.
109   * @return {@link Path} to the directory to archive the given region, or <tt>null</tt> if it
110   *         should not be archived
111   */
112  public static Path getRegionArchiveDir(Path rootDir,
113                                         TableName tableName,
114                                         Path regiondir) {
115    // get the archive directory for a table
116    Path archiveDir = getTableArchivePath(rootDir, tableName);
117
118    // then add on the region path under the archive
119    String encodedRegionName = regiondir.getName();
120    return HRegion.getRegionDir(archiveDir, encodedRegionName);
121  }
122
123  /**
124   * Get the archive directory for a given region under the specified table
125   * @param rootDir {@link Path} to the root directory where hbase files are stored (for building
126   *          the archive path)
127   * @param tableName name of the table to archive. Cannot be null.
128   * @return {@link Path} to the directory to archive the given region, or <tt>null</tt> if it
129   *         should not be archived
130   */
131  public static Path getRegionArchiveDir(Path rootDir,
132                                         TableName tableName, String encodedRegionName) {
133    // get the archive directory for a table
134    Path archiveDir = getTableArchivePath(rootDir, tableName);
135    return HRegion.getRegionDir(archiveDir, encodedRegionName);
136  }
137
138  /**
139   * Get the path to the table archive directory based on the configured archive directory.
140   * <p>
141   * Get the path to the table's archive directory.
142   * <p>
143   * Generally of the form: /hbase/.archive/[tablename]
144   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building
145   *          the archive path)
146   * @param tableName Name of the table to be archived. Cannot be null.
147   * @return {@link Path} to the archive directory for the table
148   */
149  public static Path getTableArchivePath(final Path rootdir, final TableName tableName) {
150    return FSUtils.getTableDir(getArchivePath(rootdir), tableName);
151  }
152
153  /**
154   * Get the path to the table archive directory based on the configured archive directory.
155   * <p>
156   * Assumed that the table should already be archived.
157   * @param conf {@link Configuration} to read the archive directory property. Can be null
158   * @param tableName Name of the table to be archived. Cannot be null.
159   * @return {@link Path} to the archive directory for the table
160   */
161  public static Path getTableArchivePath(final Configuration conf,
162                                         final TableName tableName)
163      throws IOException {
164    return FSUtils.getTableDir(getArchivePath(conf), tableName);
165  }
166
167  /**
168   * Get the full path to the archive directory on the configured
169   * {@link org.apache.hadoop.hbase.master.MasterFileSystem}
170   * @param conf to look for archive directory name and root directory. Cannot be null. Notes for
171   *          testing: requires a FileSystem root directory to be specified.
172   * @return the full {@link Path} to the archive directory, as defined by the configuration
173   * @throws IOException if an unexpected error occurs
174   */
175  public static Path getArchivePath(Configuration conf) throws IOException {
176    return getArchivePath(FSUtils.getRootDir(conf));
177  }
178
179  /**
180   * Get the full path to the archive directory on the configured
181   * {@link org.apache.hadoop.hbase.master.MasterFileSystem}
182   * @param rootdir {@link Path} to the root directory where hbase files are stored (for building
183   *          the archive path)
184   * @return the full {@link Path} to the archive directory, as defined by the configuration
185   */
186  private static Path getArchivePath(final Path rootdir) {
187    return new Path(rootdir, HConstants.HFILE_ARCHIVE_DIRECTORY);
188  }
189
190  /*
191   * @return table name given archive file path
192   */
193  public static TableName getTableName(Path archivePath) {
194    Path p = archivePath;
195    String tbl = null;
196    // namespace is the 4th parent of file
197    for (int i = 0; i < 5; i++) {
198      if (p == null) return null;
199      if (i == 3) tbl = p.getName();
200      p = p.getParent();
201    }
202    if (p == null) return null;
203    return TableName.valueOf(p.getName(), tbl);
204  }
205}