001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.regionserver.compactions;
019
020import static org.apache.hadoop.hbase.regionserver.Store.NO_PRIORITY;
021
022import java.util.Collection;
023import java.util.Collections;
024import java.util.function.Consumer;
025import java.util.stream.Collectors;
026import org.apache.hadoop.fs.Path;
027import org.apache.hadoop.hbase.regionserver.HStoreFile;
028import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
029import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
030import org.apache.yetus.audience.InterfaceAudience;
031
032import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
033
034/**
035 * This class holds all logical details necessary to run a compaction.
036 */
037@InterfaceAudience.Private
038public class CompactionRequestImpl implements CompactionRequest {
039
040  // was this compaction promoted to an off-peak
041  private boolean isOffPeak = false;
042
043  private enum DisplayCompactionType {
044    MINOR,
045    ALL_FILES,
046    MAJOR
047  }
048
049  private DisplayCompactionType isMajor = DisplayCompactionType.MINOR;
050  private int priority = NO_PRIORITY;
051  private Collection<HStoreFile> filesToCompact;
052  private boolean isAfterSplit = false;
053
054  // CompactRequest object creation time.
055  private long selectionTime;
056  private String regionName = "";
057  private String storeName = "";
058  private long totalSize = -1L;
059  private CompactionLifeCycleTracker tracker = CompactionLifeCycleTracker.DUMMY;
060  private Consumer<Path> writerCreationTracker;
061
062  public CompactionRequestImpl(Collection<HStoreFile> files) {
063    this.selectionTime = EnvironmentEdgeManager.currentTime();
064    this.filesToCompact = Preconditions.checkNotNull(files, "files for compaction can not null");
065    recalculateSize();
066  }
067
068  public void updateFiles(Collection<HStoreFile> files) {
069    this.filesToCompact = Preconditions.checkNotNull(files, "files for compaction can not null");
070    recalculateSize();
071  }
072
073  @Override
074  public Collection<HStoreFile> getFiles() {
075    return Collections.unmodifiableCollection(this.filesToCompact);
076  }
077
078  /**
079   * Sets the region/store name, for logging.
080   */
081  public void setDescription(String regionName, String storeName) {
082    this.regionName = regionName;
083    this.storeName = storeName;
084  }
085
086  /** Gets the total size of all StoreFiles in compaction */
087  @Override
088  public long getSize() {
089    return totalSize;
090  }
091
092  @Override
093  public boolean isAllFiles() {
094    return this.isMajor == DisplayCompactionType.MAJOR
095      || this.isMajor == DisplayCompactionType.ALL_FILES;
096  }
097
098  @Override
099  public boolean isMajor() {
100    return this.isMajor == DisplayCompactionType.MAJOR;
101  }
102
103  /** Gets the priority for the request */
104  @Override
105  public int getPriority() {
106    return priority;
107  }
108
109  /** Sets the priority for the request */
110  public void setPriority(int p) {
111    this.priority = p;
112  }
113
114  @Override
115  public boolean isOffPeak() {
116    return this.isOffPeak;
117  }
118
119  public void setOffPeak(boolean value) {
120    this.isOffPeak = value;
121  }
122
123  @Override
124  public long getSelectionTime() {
125    return this.selectionTime;
126  }
127
128  /**
129   * Specify if this compaction should be a major compaction based on the state of the store
130   * @param isMajor <tt>true</tt> if the system determines that this compaction should be a major
131   *                compaction
132   */
133  public void setIsMajor(boolean isMajor, boolean isAllFiles) {
134    assert isAllFiles || !isMajor;
135    this.isMajor = !isAllFiles
136      ? DisplayCompactionType.MINOR
137      : (isMajor ? DisplayCompactionType.MAJOR : DisplayCompactionType.ALL_FILES);
138  }
139
140  public void setTracker(CompactionLifeCycleTracker tracker) {
141    this.tracker = tracker;
142  }
143
144  public CompactionLifeCycleTracker getTracker() {
145    return tracker;
146  }
147
148  public Consumer<Path> getWriterCreationTracker() {
149    return writerCreationTracker;
150  }
151
152  public void setWriterCreationTracker(Consumer<Path> writerCreationTracker) {
153    this.writerCreationTracker = writerCreationTracker;
154  }
155
156  public boolean isAfterSplit() {
157    return isAfterSplit;
158  }
159
160  public void setAfterSplit(boolean afterSplit) {
161    isAfterSplit = afterSplit;
162  }
163
164  @Override
165  public int hashCode() {
166    final int prime = 31;
167    int result = 1;
168    result = prime * result + ((filesToCompact == null) ? 0 : filesToCompact.hashCode());
169    result = prime * result + ((isMajor == null) ? 0 : isMajor.hashCode());
170    result = prime * result + (isOffPeak ? 1231 : 1237);
171    result = prime * result + priority;
172    result = prime * result + ((regionName == null) ? 0 : regionName.hashCode());
173    result = prime * result + (int) (selectionTime ^ (selectionTime >>> 32));
174    result = prime * result + ((storeName == null) ? 0 : storeName.hashCode());
175    result = prime * result + (int) (totalSize ^ (totalSize >>> 32));
176    result = prime * result + ((tracker == null) ? 0 : tracker.hashCode());
177    result = prime * result + (isAfterSplit ? 1231 : 1237);
178    return result;
179  }
180
181  @Override
182  public boolean equals(Object obj) {
183    if (this == obj) {
184      return true;
185    }
186    if (obj == null) {
187      return false;
188    }
189    if (getClass() != obj.getClass()) {
190      return false;
191    }
192    CompactionRequestImpl other = (CompactionRequestImpl) obj;
193    if (filesToCompact == null) {
194      if (other.filesToCompact != null) {
195        return false;
196      }
197    } else if (!filesToCompact.equals(other.filesToCompact)) {
198      return false;
199    }
200    if (isMajor != other.isMajor) {
201      return false;
202    }
203    if (isOffPeak != other.isOffPeak) {
204      return false;
205    }
206    if (priority != other.priority) {
207      return false;
208    }
209    if (regionName == null) {
210      if (other.regionName != null) {
211        return false;
212      }
213    } else if (!regionName.equals(other.regionName)) {
214      return false;
215    }
216    if (selectionTime != other.selectionTime) {
217      return false;
218    }
219    if (storeName == null) {
220      if (other.storeName != null) {
221        return false;
222      }
223    } else if (!storeName.equals(other.storeName)) {
224      return false;
225    }
226    if (totalSize != other.totalSize) {
227      return false;
228    }
229    if (isAfterSplit != other.isAfterSplit) {
230      return false;
231    }
232    if (tracker == null) {
233      if (other.tracker != null) {
234        return false;
235      }
236    } else if (!tracker.equals(other.tracker)) {
237      return false;
238    }
239    return true;
240  }
241
242  @Override
243  public String toString() {
244    String fsList = filesToCompact.stream().filter(f -> f.getReader() != null)
245      .map(f -> TraditionalBinaryPrefix.long2String(f.getReader().length(), "", 1))
246      .collect(Collectors.joining(", "));
247
248    return "regionName=" + regionName + ", storeName=" + storeName + ", fileCount="
249      + this.getFiles().size() + ", fileSize="
250      + TraditionalBinaryPrefix.long2String(totalSize, "", 1)
251      + ((fsList.isEmpty()) ? "" : " (" + fsList + ")") + ", priority=" + priority + ", time="
252      + selectionTime;
253  }
254
255  /**
256   * Recalculate the size of the compaction based on current files.
257   */
258  private void recalculateSize() {
259    this.totalSize = filesToCompact.stream().map(HStoreFile::getReader)
260      .mapToLong(r -> r != null ? r.length() : 0L).sum();
261  }
262}