View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.util;
20  
21  import java.io.IOException;
22  
23  import org.apache.commons.lang.StringUtils;
24  import org.apache.commons.logging.Log;
25  import org.apache.commons.logging.LogFactory;
26  import org.apache.hadoop.hbase.classification.InterfaceAudience;
27  import org.apache.hadoop.hbase.classification.InterfaceStability;
28  import org.apache.hadoop.conf.Configuration;
29  import org.apache.hadoop.fs.FileSystem;
30  import org.apache.hadoop.fs.Path;
31  import org.apache.hadoop.hbase.Cell;
32  import org.apache.hadoop.hbase.CellComparator;
33  import org.apache.hadoop.hbase.CellUtil;
34  import org.apache.hadoop.hbase.HBaseConfiguration;
35  import org.apache.hadoop.hbase.HConstants;
36  import org.apache.hadoop.hbase.io.compress.Compression;
37  import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter;
38  import org.apache.hadoop.hbase.io.hfile.CacheConfig;
39  import org.apache.hadoop.hbase.io.hfile.HFile;
40  import org.apache.hadoop.hbase.io.hfile.HFileContext;
41  import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
42  import org.apache.hadoop.hbase.io.hfile.HFileScanner;
43  import org.apache.hadoop.io.compress.Compressor;
44  
45  /**
46   * Compression validation test.  Checks compression is working.  Be sure to run
47   * on every node in your cluster.
48   */
49  @InterfaceAudience.Public
50  @InterfaceStability.Evolving
51  public class CompressionTest {
52    static final Log LOG = LogFactory.getLog(CompressionTest.class);
53  
54    public static boolean testCompression(String codec) {
55      codec = codec.toLowerCase();
56  
57      Compression.Algorithm a;
58  
59      try {
60        a = Compression.getCompressionAlgorithmByName(codec);
61      } catch (IllegalArgumentException e) {
62        LOG.warn("Codec type: " + codec + " is not known");
63        return false;
64      }
65  
66      try {
67        testCompression(a);
68        return true;
69      } catch (IOException ignored) {
70        LOG.warn("Can't instantiate codec: " + codec, ignored);
71        return false;
72      }
73    }
74  
75    private final static Boolean[] compressionTestResults
76        = new Boolean[Compression.Algorithm.values().length];
77    static {
78      for (int i = 0 ; i < compressionTestResults.length ; ++i) {
79        compressionTestResults[i] = null;
80      }
81    }
82  
83    public static void testCompression(Compression.Algorithm algo)
84        throws IOException {
85      if (compressionTestResults[algo.ordinal()] != null) {
86        if (compressionTestResults[algo.ordinal()]) {
87          return ; // already passed test, dont do it again.
88        } else {
89          // failed.
90          throw new IOException("Compression algorithm '" + algo.getName() + "'" +
91          " previously failed test.");
92        }
93      }
94  
95      try {
96        Compressor c = algo.getCompressor();
97        algo.returnCompressor(c);
98        compressionTestResults[algo.ordinal()] = true; // passes
99      } catch (Throwable t) {
100       compressionTestResults[algo.ordinal()] = false; // failure
101       throw new IOException(t);
102     }
103   }
104 
105   protected static Path path = new Path(".hfile-comp-test");
106 
107   public static void usage() {
108 
109     System.err.println(
110       "Usage: CompressionTest <path> " +
111       StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase() +
112       "\n" +
113       "For example:\n" +
114       "  hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");
115     System.exit(1);
116   }
117 
118   public static void doSmokeTest(FileSystem fs, Path path, String codec)
119   throws Exception {
120     Configuration conf = HBaseConfiguration.create();
121     HFileContext context = new HFileContextBuilder()
122                            .withCompression(AbstractHFileWriter.compressionByName(codec)).build();
123     HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
124         .withPath(fs, path)
125         .withFileContext(context)
126         .create();
127     // Write any-old Cell...
128     final byte [] rowKey = Bytes.toBytes("compressiontestkey");
129     Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval"));
130     writer.append(c);
131     writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
132     writer.close();
133     Cell cc = null;
134     HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
135     try {
136       reader.loadFileInfo();
137       HFileScanner scanner = reader.getScanner(false, true);
138       scanner.next();
139       // Scanner does not do Cells yet. Do below for now till fixed.
140       cc = scanner.getKeyValue();
141       if (CellComparator.compareRows(c, cc) != 0) {
142         throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
143       }
144     } finally {
145       reader.close();
146     }
147   }
148 
149   public static void main(String[] args) throws Exception {
150     if (args.length != 2) {
151       usage();
152       System.exit(1);
153     }
154 
155     Configuration conf = new Configuration();
156     Path path = new Path(args[0]);
157     FileSystem fs = path.getFileSystem(conf);
158     try {
159       doSmokeTest(fs, path, args[1]);
160     } finally {
161       fs.delete(path, false);
162     }
163     System.out.println("SUCCESS");
164   }
165 }