View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.master.cleaner;
19  
20  import java.io.IOException;
21  import java.util.LinkedList;
22  import java.util.List;
23  
24  import org.apache.commons.logging.Log;
25  import org.apache.commons.logging.LogFactory;
26  import org.apache.hadoop.conf.Configuration;
27  import org.apache.hadoop.fs.FileStatus;
28  import org.apache.hadoop.fs.FileSystem;
29  import org.apache.hadoop.fs.Path;
30  import org.apache.hadoop.hbase.Chore;
31  import org.apache.hadoop.hbase.HBaseFileSystem;
32  import org.apache.hadoop.hbase.RemoteExceptionHandler;
33  import org.apache.hadoop.hbase.Stoppable;
34  import org.apache.hadoop.hbase.util.FSUtils;
35  
36  import com.google.common.annotations.VisibleForTesting;
37  import com.google.common.collect.ImmutableSet;
38  import com.google.common.collect.Iterables;
39  import com.google.common.collect.Lists;
40  
41  /**
42   * Abstract Cleaner that uses a chain of delegates to clean a directory of files
43   * @param <T> Cleaner delegate class that is dynamically loaded from configuration
44   */
45  public abstract class CleanerChore<T extends FileCleanerDelegate> extends Chore {
46  
47    private static final Log LOG = LogFactory.getLog(CleanerChore.class.getName());
48  
49    private final FileSystem fs;
50    private final Path oldFileDir;
51    private final Configuration conf;
52    List<T> cleanersChain;
53  
54    /**
55     * @param name name of the chore being run
56     * @param sleepPeriod the period of time to sleep between each run
57     * @param s the stopper
58     * @param conf configuration to use
59     * @param fs handle to the FS
60     * @param oldFileDir the path to the archived files
61     * @param confKey configuration key for the classes to instantiate
62     */
63    public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Configuration conf,
64        FileSystem fs, Path oldFileDir, String confKey) {
65      super(name, sleepPeriod, s);
66      this.fs = fs;
67      this.oldFileDir = oldFileDir;
68      this.conf = conf;
69  
70      initCleanerChain(confKey);
71    }
72  
73    /**
74     * Validate the file to see if it even belongs in the directory. If it is valid, then the file
75     * will go through the cleaner delegates, but otherwise the file is just deleted.
76     * @param file full {@link Path} of the file to be checked
77     * @return <tt>true</tt> if the file is valid, <tt>false</tt> otherwise
78     */
79    protected abstract boolean validate(Path file);
80  
81    /**
82     * Instantiate and initialize all the file cleaners set in the configuration
83     * @param confKey key to get the file cleaner classes from the configuration
84     */
85    private void initCleanerChain(String confKey) {
86      this.cleanersChain = new LinkedList<T>();
87      String[] logCleaners = conf.getStrings(confKey);
88      if (logCleaners != null) {
89        for (String className : logCleaners) {
90          T logCleaner = newFileCleaner(className, conf);
91          if (logCleaner != null) {
92            LOG.debug("initialize cleaner=" + className);
93            this.cleanersChain.add(logCleaner);
94          }
95        }
96      }
97    }
98  
99    /**
100    * A utility method to create new instances of LogCleanerDelegate based on the class name of the
101    * LogCleanerDelegate.
102    * @param className fully qualified class name of the LogCleanerDelegate
103    * @param conf
104    * @return the new instance
105    */
106   private T newFileCleaner(String className, Configuration conf) {
107     try {
108       Class<? extends FileCleanerDelegate> c = Class.forName(className).asSubclass(
109         FileCleanerDelegate.class);
110       @SuppressWarnings("unchecked")
111       T cleaner = (T) c.newInstance();
112       cleaner.setConf(conf);
113       return cleaner;
114     } catch (Exception e) {
115       LOG.warn("Can NOT create CleanerDelegate: " + className, e);
116       // skipping if can't instantiate
117       return null;
118     }
119   }
120 
121   @Override
122   protected void chore() {
123     try {
124       FileStatus[] files = FSUtils.listStatus(this.fs, this.oldFileDir);
125       checkAndDeleteEntries(files);
126     } catch (IOException e) {
127       e = RemoteExceptionHandler.checkIOException(e);
128       LOG.warn("Error while cleaning the logs", e);
129     }
130   }
131 
132   /**
133    * Loop over the given directory entries, and check whether they can be deleted.
134    * If an entry is itself a directory it will be recursively checked and deleted itself iff
135    * all subentries are deleted (and no new subentries are added in the mean time)
136    *
137    * @param entries directory entries to check
138    * @return true if all entries were successfully deleted
139    */
140   private boolean checkAndDeleteEntries(FileStatus[] entries) {
141     if (entries == null) {
142       return true;
143     }
144     boolean allEntriesDeleted = true;
145     List<FileStatus> files = Lists.newArrayListWithCapacity(entries.length);
146     for (FileStatus child : entries) {
147       Path path = child.getPath();
148       if (child.isDir()) {
149         // for each subdirectory delete it and all entries if possible
150         if (!checkAndDeleteDirectory(path)) {
151           allEntriesDeleted = false;
152         }
153       } else {
154         // collect all files to attempt to delete in one batch
155         files.add(child);
156       }
157     }
158     if (!checkAndDeleteFiles(files)) {
159       allEntriesDeleted = false;
160     }
161     return allEntriesDeleted;
162   }
163   
164   /**
165    * Attempt to delete a directory and all files under that directory. Each child file is passed
166    * through the delegates to see if it can be deleted. If the directory has no children when the
167    * cleaners have finished it is deleted.
168    * <p>
169    * If new children files are added between checks of the directory, the directory will <b>not</b>
170    * be deleted.
171    * @param dir directory to check
172    * @return <tt>true</tt> if the directory was deleted, <tt>false</tt> otherwise.
173    */
174   @VisibleForTesting boolean checkAndDeleteDirectory(Path dir) {
175     if (LOG.isTraceEnabled()) {
176       LOG.trace("Checking directory: " + dir);
177     }
178 
179     try {
180       FileStatus[] children = FSUtils.listStatus(fs, dir);
181       boolean allChildrenDeleted = checkAndDeleteEntries(children);
182   
183       // if the directory still has children, we can't delete it, so we are done
184       if (!allChildrenDeleted) return false;
185     } catch (IOException e) {
186       e = RemoteExceptionHandler.checkIOException(e);
187       LOG.warn("Error while listing directory: " + dir, e);
188       // couldn't list directory, so don't try to delete, and don't return success
189       return false;
190     }
191 
192     // otherwise, all the children (that we know about) have been deleted, so we should try to
193     // delete this directory. However, don't do so recursively so we don't delete files that have
194     // been added since we last checked.
195     try {
196       return HBaseFileSystem.deleteFileFromFileSystem(fs, dir);
197     } catch (IOException e) {
198       if (LOG.isTraceEnabled()) {
199         LOG.trace("Couldn't delete directory: " + dir, e);
200       }
201       // couldn't delete w/o exception, so we can't return success.
202       return false;
203     }
204   }
205 
206   /**
207    * Run the given files through each of the cleaners to see if it should be deleted, deleting it if
208    * necessary.
209    * @param files List of FileStatus for the files to check (and possibly delete)
210    * @return true iff successfully deleted all files
211    */
212   private boolean checkAndDeleteFiles(List<FileStatus> files) {
213     // first check to see if the path is valid
214     List<FileStatus> validFiles = Lists.newArrayListWithCapacity(files.size());
215     List<FileStatus> invalidFiles = Lists.newArrayList();
216     for (FileStatus file : files) {
217       if (validate(file.getPath())) {
218         validFiles.add(file);
219       } else {
220         LOG.warn("Found a wrongly formatted file: " + file.getPath() + " - will delete it.");
221         invalidFiles.add(file);
222       }
223     }
224 
225     Iterable<FileStatus> deletableValidFiles = validFiles;
226     // check each of the cleaners for the valid files
227     for (T cleaner : cleanersChain) {
228       if (cleaner.isStopped() || this.stopper.isStopped()) {
229         LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any more files in:"
230             + this.oldFileDir);
231         return false;
232       }
233 
234       Iterable<FileStatus> filteredFiles = cleaner.getDeletableFiles(deletableValidFiles);
235       
236       // trace which cleaner is holding on to each file
237       if (LOG.isTraceEnabled()) {
238         ImmutableSet<FileStatus> filteredFileSet = ImmutableSet.copyOf(filteredFiles);
239         for (FileStatus file : deletableValidFiles) {
240           if (!filteredFileSet.contains(file)) {
241             LOG.trace(file.getPath() + " is not deletable according to:" + cleaner);
242           }
243         }
244       }
245       
246       deletableValidFiles = filteredFiles;
247     }
248     
249     Iterable<FileStatus> filesToDelete = Iterables.concat(invalidFiles, deletableValidFiles);
250     int deletedFileCount = 0;
251     for (FileStatus file : filesToDelete) {
252       Path filePath = file.getPath();
253       if (LOG.isTraceEnabled()) {
254         LOG.trace("Removing: " + filePath + " from archive");
255       }
256       try {
257         boolean success = HBaseFileSystem.deleteFileFromFileSystem(fs, filePath);
258         if (success) {
259           deletedFileCount++;
260         } else {
261           LOG.warn("Attempted to delete:" + filePath
262               + ", but couldn't. Run cleaner chain and attempt to delete on next pass.");
263         }
264       } catch (IOException e) {
265         e = RemoteExceptionHandler.checkIOException(e);
266         LOG.warn("Error while deleting: " + filePath, e);
267       }
268     }
269 
270     return deletedFileCount == files.size();
271   }
272 
273   @Override
274   public void cleanup() {
275     for (T lc : this.cleanersChain) {
276       try {
277         lc.stop("Exiting");
278       } catch (Throwable t) {
279         LOG.warn("Stopping", t);
280       }
281     }
282   }
283 }