View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.util;
20  
21  import java.io.IOException;
22  
23  import org.apache.commons.lang.StringUtils;
24  import org.apache.commons.logging.Log;
25  import org.apache.commons.logging.LogFactory;
26  import org.apache.hadoop.classification.InterfaceAudience;
27  import org.apache.hadoop.classification.InterfaceStability;
28  import org.apache.hadoop.conf.Configuration;
29  import org.apache.hadoop.fs.FileSystem;
30  import org.apache.hadoop.fs.Path;
31  import org.apache.hadoop.hbase.HBaseConfiguration;
32  import org.apache.hadoop.hbase.io.compress.Compression;
33  import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter;
34  import org.apache.hadoop.hbase.io.hfile.CacheConfig;
35  import org.apache.hadoop.hbase.io.hfile.HFile;
36  import org.apache.hadoop.hbase.io.hfile.HFileContext;
37  import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
38  import org.apache.hadoop.io.compress.Compressor;
39  
40  /**
41   * Compression validation test.  Checks compression is working.  Be sure to run
42   * on every node in your cluster.
43   */
44  @InterfaceAudience.Public
45  @InterfaceStability.Evolving
46  public class CompressionTest {
47    static final Log LOG = LogFactory.getLog(CompressionTest.class);
48  
49    public static boolean testCompression(String codec) {
50      codec = codec.toLowerCase();
51  
52      Compression.Algorithm a;
53  
54      try {
55        a = Compression.getCompressionAlgorithmByName(codec);
56      } catch (IllegalArgumentException e) {
57        LOG.warn("Codec type: " + codec + " is not known");
58        return false;
59      }
60  
61      try {
62        testCompression(a);
63        return true;
64      } catch (IOException ignored) {
65        LOG.warn("Can't instantiate codec: " + codec, ignored);
66        return false;
67      }
68    }
69  
70    private final static Boolean[] compressionTestResults
71        = new Boolean[Compression.Algorithm.values().length];
72    static {
73      for (int i = 0 ; i < compressionTestResults.length ; ++i) {
74        compressionTestResults[i] = null;
75      }
76    }
77  
78    public static void testCompression(Compression.Algorithm algo)
79        throws IOException {
80      if (compressionTestResults[algo.ordinal()] != null) {
81        if (compressionTestResults[algo.ordinal()]) {
82          return ; // already passed test, dont do it again.
83        } else {
84          // failed.
85          throw new IOException("Compression algorithm '" + algo.getName() + "'" +
86          " previously failed test.");
87        }
88      }
89  
90      try {
91        Compressor c = algo.getCompressor();
92        algo.returnCompressor(c);
93        compressionTestResults[algo.ordinal()] = true; // passes
94      } catch (Throwable t) {
95        compressionTestResults[algo.ordinal()] = false; // failure
96        throw new IOException(t);
97      }
98    }
99  
100   protected static Path path = new Path(".hfile-comp-test");
101 
102   public static void usage() {
103 
104     System.err.println(
105       "Usage: CompressionTest <path> " +
106       StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase() +
107       "\n" +
108       "For example:\n" +
109       "  hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");
110     System.exit(1);
111   }
112 
113   public static void doSmokeTest(FileSystem fs, Path path, String codec)
114   throws Exception {
115     Configuration conf = HBaseConfiguration.create();
116     HFileContext context = new HFileContextBuilder()
117                            .withCompression(AbstractHFileWriter.compressionByName(codec)).build();
118     HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
119         .withPath(fs, path)
120         .withFileContext(context)
121         .create();
122     writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
123     writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
124     writer.close();
125 
126     HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
127     reader.loadFileInfo();
128     byte[] key = reader.getFirstKey();
129     boolean rc = Bytes.toString(key).equals("testkey");
130     reader.close();
131 
132     if (!rc) {
133       throw new Exception("Read back incorrect result: " +
134                           Bytes.toStringBinary(key));
135     }
136   }
137 
138   public static void main(String[] args) throws Exception {
139     if (args.length != 2) {
140       usage();
141       System.exit(1);
142     }
143 
144     Configuration conf = new Configuration();
145     Path path = new Path(args[0]);
146     FileSystem fs = path.getFileSystem(conf);
147     try {
148       doSmokeTest(fs, path, args[1]);
149     } finally {
150       fs.delete(path, false);
151     }
152     System.out.println("SUCCESS");
153   }
154 }