View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.master.cleaner;
19  
20  import java.io.IOException;
21  import java.util.LinkedList;
22  import java.util.List;
23  
24  import org.apache.commons.logging.Log;
25  import org.apache.commons.logging.LogFactory;
26  import org.apache.hadoop.conf.Configuration;
27  import org.apache.hadoop.fs.FileStatus;
28  import org.apache.hadoop.fs.FileSystem;
29  import org.apache.hadoop.fs.Path;
30  import org.apache.hadoop.hbase.Chore;
31  import org.apache.hadoop.hbase.Stoppable;
32  import org.apache.hadoop.hbase.util.FSUtils;
33  import org.apache.hadoop.ipc.RemoteException;
34  
35  import com.google.common.annotations.VisibleForTesting;
36  import com.google.common.collect.ImmutableSet;
37  import com.google.common.collect.Iterables;
38  import com.google.common.collect.Lists;
39  
40  /**
41   * Abstract Cleaner that uses a chain of delegates to clean a directory of files
42   * @param <T> Cleaner delegate class that is dynamically loaded from configuration
43   */
44  public abstract class CleanerChore<T extends FileCleanerDelegate> extends Chore {
45  
46    private static final Log LOG = LogFactory.getLog(CleanerChore.class.getName());
47  
48    private final FileSystem fs;
49    private final Path oldFileDir;
50    private final Configuration conf;
51    protected List<T> cleanersChain;
52  
53    /**
54     * @param name name of the chore being run
55     * @param sleepPeriod the period of time to sleep between each run
56     * @param s the stopper
57     * @param conf configuration to use
58     * @param fs handle to the FS
59     * @param oldFileDir the path to the archived files
60     * @param confKey configuration key for the classes to instantiate
61     */
62    public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Configuration conf,
63        FileSystem fs, Path oldFileDir, String confKey) {
64      super(name, sleepPeriod, s);
65      this.fs = fs;
66      this.oldFileDir = oldFileDir;
67      this.conf = conf;
68  
69      initCleanerChain(confKey);
70    }
71  
72    /**
73     * Validate the file to see if it even belongs in the directory. If it is valid, then the file
74     * will go through the cleaner delegates, but otherwise the file is just deleted.
75     * @param file full {@link Path} of the file to be checked
76     * @return <tt>true</tt> if the file is valid, <tt>false</tt> otherwise
77     */
78    protected abstract boolean validate(Path file);
79  
80    /**
81     * Instantiate and initialize all the file cleaners set in the configuration
82     * @param confKey key to get the file cleaner classes from the configuration
83     */
84    private void initCleanerChain(String confKey) {
85      this.cleanersChain = new LinkedList<T>();
86      String[] logCleaners = conf.getStrings(confKey);
87      if (logCleaners != null) {
88        for (String className : logCleaners) {
89          T logCleaner = newFileCleaner(className, conf);
90          if (logCleaner != null) {
91            LOG.debug("initialize cleaner=" + className);
92            this.cleanersChain.add(logCleaner);
93          }
94        }
95      }
96    }
97  
98    /**
99     * A utility method to create new instances of LogCleanerDelegate based on the class name of the
100    * LogCleanerDelegate.
101    * @param className fully qualified class name of the LogCleanerDelegate
102    * @param conf
103    * @return the new instance
104    */
105   private T newFileCleaner(String className, Configuration conf) {
106     try {
107       Class<? extends FileCleanerDelegate> c = Class.forName(className).asSubclass(
108         FileCleanerDelegate.class);
109       @SuppressWarnings("unchecked")
110       T cleaner = (T) c.newInstance();
111       cleaner.setConf(conf);
112       return cleaner;
113     } catch (Exception e) {
114       LOG.warn("Can NOT create CleanerDelegate: " + className, e);
115       // skipping if can't instantiate
116       return null;
117     }
118   }
119 
120   @Override
121   protected void chore() {
122     try {
123       FileStatus[] files = FSUtils.listStatus(this.fs, this.oldFileDir);
124       checkAndDeleteEntries(files);
125     } catch (IOException e) {
126       e = e instanceof RemoteException ?
127               ((RemoteException)e).unwrapRemoteException() : e;
128       LOG.warn("Error while cleaning the logs", e);
129     }
130   }
131 
132   /**
133    * Loop over the given directory entries, and check whether they can be deleted.
134    * If an entry is itself a directory it will be recursively checked and deleted itself iff
135    * all subentries are deleted (and no new subentries are added in the mean time)
136    *
137    * @param entries directory entries to check
138    * @return true if all entries were successfully deleted
139    */
140   private boolean checkAndDeleteEntries(FileStatus[] entries) {
141     if (entries == null) {
142       return true;
143     }
144     boolean allEntriesDeleted = true;
145     List<FileStatus> files = Lists.newArrayListWithCapacity(entries.length);
146     for (FileStatus child : entries) {
147       Path path = child.getPath();
148       if (child.isDirectory()) {
149         // for each subdirectory delete it and all entries if possible
150         if (!checkAndDeleteDirectory(path)) {
151           allEntriesDeleted = false;
152         }
153       } else {
154         // collect all files to attempt to delete in one batch
155         files.add(child);
156       }
157     }
158     if (!checkAndDeleteFiles(files)) {
159       allEntriesDeleted = false;
160     }
161     return allEntriesDeleted;
162   }
163   
164   /**
165    * Attempt to delete a directory and all files under that directory. Each child file is passed
166    * through the delegates to see if it can be deleted. If the directory has no children when the
167    * cleaners have finished it is deleted.
168    * <p>
169    * If new children files are added between checks of the directory, the directory will <b>not</b>
170    * be deleted.
171    * @param dir directory to check
172    * @return <tt>true</tt> if the directory was deleted, <tt>false</tt> otherwise.
173    */
174   @VisibleForTesting boolean checkAndDeleteDirectory(Path dir) {
175     if (LOG.isTraceEnabled()) {
176       LOG.trace("Checking directory: " + dir);
177     }
178 
179     try {
180       FileStatus[] children = FSUtils.listStatus(fs, dir);
181       boolean allChildrenDeleted = checkAndDeleteEntries(children);
182   
183       // if the directory still has children, we can't delete it, so we are done
184       if (!allChildrenDeleted) return false;
185     } catch (IOException e) {
186       e = e instanceof RemoteException ?
187               ((RemoteException)e).unwrapRemoteException() : e;
188       LOG.warn("Error while listing directory: " + dir, e);
189       // couldn't list directory, so don't try to delete, and don't return success
190       return false;
191     }
192 
193     // otherwise, all the children (that we know about) have been deleted, so we should try to
194     // delete this directory. However, don't do so recursively so we don't delete files that have
195     // been added since we last checked.
196     try {
197       return fs.delete(dir, false);
198     } catch (IOException e) {
199       if (LOG.isTraceEnabled()) {
200         LOG.trace("Couldn't delete directory: " + dir, e);
201       }
202       // couldn't delete w/o exception, so we can't return success.
203       return false;
204     }
205   }
206 
207   /**
208    * Run the given files through each of the cleaners to see if it should be deleted, deleting it if
209    * necessary.
210    * @param files List of FileStatus for the files to check (and possibly delete)
211    * @return true iff successfully deleted all files
212    */
213   private boolean checkAndDeleteFiles(List<FileStatus> files) {
214     // first check to see if the path is valid
215     List<FileStatus> validFiles = Lists.newArrayListWithCapacity(files.size());
216     List<FileStatus> invalidFiles = Lists.newArrayList();
217     for (FileStatus file : files) {
218       if (validate(file.getPath())) {
219         validFiles.add(file);
220       } else {
221         LOG.warn("Found a wrongly formatted file: " + file.getPath() + " - will delete it.");
222         invalidFiles.add(file);
223       }
224     }
225 
226     Iterable<FileStatus> deletableValidFiles = validFiles;
227     // check each of the cleaners for the valid files
228     for (T cleaner : cleanersChain) {
229       if (cleaner.isStopped() || this.getStopper().isStopped()) {
230         LOG.warn("A file cleaner" + this.getName() + " is stopped, won't delete any more files in:"
231             + this.oldFileDir);
232         return false;
233       }
234 
235       Iterable<FileStatus> filteredFiles = cleaner.getDeletableFiles(deletableValidFiles);
236       
237       // trace which cleaner is holding on to each file
238       if (LOG.isTraceEnabled()) {
239         ImmutableSet<FileStatus> filteredFileSet = ImmutableSet.copyOf(filteredFiles);
240         for (FileStatus file : deletableValidFiles) {
241           if (!filteredFileSet.contains(file)) {
242             LOG.trace(file.getPath() + " is not deletable according to:" + cleaner);
243           }
244         }
245       }
246       
247       deletableValidFiles = filteredFiles;
248     }
249     
250     Iterable<FileStatus> filesToDelete = Iterables.concat(invalidFiles, deletableValidFiles);
251     int deletedFileCount = 0;
252     for (FileStatus file : filesToDelete) {
253       Path filePath = file.getPath();
254       if (LOG.isTraceEnabled()) {
255         LOG.trace("Removing: " + filePath + " from archive");
256       }
257       try {
258         boolean success = this.fs.delete(filePath, false);
259         if (success) {
260           deletedFileCount++;
261         } else {
262           LOG.warn("Attempted to delete:" + filePath
263               + ", but couldn't. Run cleaner chain and attempt to delete on next pass.");
264         }
265       } catch (IOException e) {
266         e = e instanceof RemoteException ?
267                   ((RemoteException)e).unwrapRemoteException() : e;
268         LOG.warn("Error while deleting: " + filePath, e);
269       }
270     }
271 
272     return deletedFileCount == files.size();
273   }
274 
275   @Override
276   public void cleanup() {
277     for (T lc : this.cleanersChain) {
278       try {
279         lc.stop("Exiting");
280       } catch (Throwable t) {
281         LOG.warn("Stopping", t);
282       }
283     }
284   }
285 }