001/**
002 *
003 * Licensed to the Apache Software Foundation (ASF) under one
004 * or more contributor license agreements.  See the NOTICE file
005 * distributed with this work for additional information
006 * regarding copyright ownership.  The ASF licenses this file
007 * to you under the Apache License, Version 2.0 (the
008 * "License"); you may not use this file except in compliance
009 * with the License.  You may obtain a copy of the License at
010 *
011 *     http://www.apache.org/licenses/LICENSE-2.0
012 *
013 * Unless required by applicable law or agreed to in writing, software
014 * distributed under the License is distributed on an "AS IS" BASIS,
015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
016 * See the License for the specific language governing permissions and
017 * limitations under the License.
018 */
019package org.apache.hadoop.hbase.util;
020
021import java.io.IOException;
022import java.util.Locale;
023
024import org.apache.commons.lang3.StringUtils;
025import org.apache.hadoop.hbase.CellComparator;
026import org.apache.yetus.audience.InterfaceAudience;
027import org.apache.yetus.audience.InterfaceStability;
028import org.slf4j.Logger;
029import org.slf4j.LoggerFactory;
030import org.apache.hadoop.conf.Configuration;
031import org.apache.hadoop.fs.FileSystem;
032import org.apache.hadoop.fs.Path;
033import org.apache.hadoop.hbase.Cell;
034import org.apache.hadoop.hbase.CellUtil;
035import org.apache.hadoop.hbase.DoNotRetryIOException;
036import org.apache.hadoop.hbase.HBaseConfiguration;
037import org.apache.hadoop.hbase.HBaseInterfaceAudience;
038import org.apache.hadoop.hbase.io.compress.Compression;
039import org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
040import org.apache.hadoop.hbase.io.hfile.CacheConfig;
041import org.apache.hadoop.hbase.io.hfile.HFile;
042import org.apache.hadoop.hbase.io.hfile.HFileContext;
043import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
044import org.apache.hadoop.hbase.io.hfile.HFileScanner;
045import org.apache.hadoop.io.compress.Compressor;
046
047/**
048 * Compression validation test.  Checks compression is working.  Be sure to run
049 * on every node in your cluster.
050 */
051@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
052@InterfaceStability.Evolving
053public class CompressionTest {
054  private static final Logger LOG = LoggerFactory.getLogger(CompressionTest.class);
055
056  public static boolean testCompression(String codec) {
057    codec = codec.toLowerCase(Locale.ROOT);
058
059    Compression.Algorithm a;
060
061    try {
062      a = Compression.getCompressionAlgorithmByName(codec);
063    } catch (IllegalArgumentException e) {
064      LOG.warn("Codec type: " + codec + " is not known");
065      return false;
066    }
067
068    try {
069      testCompression(a);
070      return true;
071    } catch (IOException ignored) {
072      LOG.warn("Can't instantiate codec: " + codec, ignored);
073      return false;
074    }
075  }
076
077  private final static Boolean[] compressionTestResults
078      = new Boolean[Compression.Algorithm.values().length];
079  static {
080    for (int i = 0 ; i < compressionTestResults.length ; ++i) {
081      compressionTestResults[i] = null;
082    }
083  }
084
085  public static void testCompression(Compression.Algorithm algo)
086      throws IOException {
087    if (compressionTestResults[algo.ordinal()] != null) {
088      if (compressionTestResults[algo.ordinal()]) {
089        return ; // already passed test, dont do it again.
090      } else {
091        // failed.
092        throw new DoNotRetryIOException("Compression algorithm '" + algo.getName() + "'" +
093        " previously failed test.");
094      }
095    }
096
097    try {
098      Compressor c = algo.getCompressor();
099      algo.returnCompressor(c);
100      compressionTestResults[algo.ordinal()] = true; // passes
101    } catch (Throwable t) {
102      compressionTestResults[algo.ordinal()] = false; // failure
103      throw new DoNotRetryIOException(t);
104    }
105  }
106
107  protected static Path path = new Path(".hfile-comp-test");
108
109  public static void usage() {
110
111    System.err.println(
112      "Usage: CompressionTest <path> " +
113      StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase(Locale.ROOT) +
114      "\n" +
115      "For example:\n" +
116      "  hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");
117    System.exit(1);
118  }
119
120  public static void doSmokeTest(FileSystem fs, Path path, String codec)
121  throws Exception {
122    Configuration conf = HBaseConfiguration.create();
123    HFileContext context = new HFileContextBuilder()
124                           .withCompression(HFileWriterImpl.compressionByName(codec)).build();
125    HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
126        .withPath(fs, path)
127        .withFileContext(context)
128        .create();
129    // Write any-old Cell...
130    final byte [] rowKey = Bytes.toBytes("compressiontestkey");
131    Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval"));
132    writer.append(c);
133    writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
134    writer.close();
135    Cell cc = null;
136    HFile.Reader reader = HFile.createReader(fs, path, CacheConfig.DISABLED, true, conf);
137    try {
138      reader.loadFileInfo();
139      HFileScanner scanner = reader.getScanner(false, true);
140      scanner.seekTo(); // position to the start of file
141      // Scanner does not do Cells yet. Do below for now till fixed.
142      cc = scanner.getCell();
143      if (CellComparator.getInstance().compareRows(c, cc) != 0) {
144        throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
145      }
146    } finally {
147      reader.close();
148    }
149  }
150
151  public static void main(String[] args) throws Exception {
152    if (args.length != 2) {
153      usage();
154      System.exit(1);
155    }
156
157    Configuration conf = new Configuration();
158    Path path = new Path(args[0]);
159    FileSystem fs = path.getFileSystem(conf);
160    if (fs.exists(path)) {
161      System.err.println("The specified path exists, aborting!");
162      System.exit(1);
163    }
164
165    try {
166      doSmokeTest(fs, path, args[1]);
167    } finally {
168      fs.delete(path, false);
169    }
170    System.out.println("SUCCESS");
171  }
172}