001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.junit.Assert.assertEquals; 021import static org.junit.Assert.assertNull; 022import static org.junit.Assert.assertTrue; 023import static org.junit.Assert.fail; 024 025import java.io.ByteArrayInputStream; 026import java.io.ByteArrayOutputStream; 027import java.io.DataInputStream; 028import java.io.DataOutputStream; 029import java.io.IOException; 030import java.util.ArrayList; 031import java.util.Collection; 032import java.util.List; 033import org.apache.hadoop.fs.FSDataInputStream; 034import org.apache.hadoop.fs.FSDataOutputStream; 035import org.apache.hadoop.fs.FileSystem; 036import org.apache.hadoop.fs.Path; 037import org.apache.hadoop.hbase.CellComparator; 038import org.apache.hadoop.hbase.CellComparatorImpl; 039import org.apache.hadoop.hbase.HBaseClassTestRule; 040import org.apache.hadoop.hbase.HBaseTestingUtility; 041import org.apache.hadoop.hbase.KeyValue; 042import org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos; 043import org.apache.hadoop.hbase.testclassification.IOTests; 044import org.apache.hadoop.hbase.testclassification.SmallTests; 045import org.apache.hadoop.hbase.util.Bytes; 046import org.junit.Before; 047import org.junit.ClassRule; 048import org.junit.Rule; 049import org.junit.Test; 050import org.junit.experimental.categories.Category; 051import org.junit.rules.ExpectedException; 052import org.junit.runner.RunWith; 053import org.junit.runners.Parameterized; 054import org.junit.runners.Parameterized.Parameters; 055import org.slf4j.Logger; 056import org.slf4j.LoggerFactory; 057 058@RunWith(Parameterized.class) 059@Category({IOTests.class, SmallTests.class}) 060public class TestFixedFileTrailer { 061 062 @ClassRule 063 public static final HBaseClassTestRule CLASS_RULE = 064 HBaseClassTestRule.forClass(TestFixedFileTrailer.class); 065 066 private static final Logger LOG = LoggerFactory.getLogger(TestFixedFileTrailer.class); 067 private static final int MAX_COMPARATOR_NAME_LENGTH = 128; 068 069 /** 070 * The number of used fields by version. Indexed by version minus two. 071 * Min version that we support is V2 072 */ 073 private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14, 15 }; 074 075 private HBaseTestingUtility util = new HBaseTestingUtility(); 076 private FileSystem fs; 077 private ByteArrayOutputStream baos = new ByteArrayOutputStream(); 078 private int version; 079 080 static { 081 assert NUM_FIELDS_BY_VERSION.length == HFile.MAX_FORMAT_VERSION 082 - HFile.MIN_FORMAT_VERSION + 1; 083 } 084 085 public TestFixedFileTrailer(int version) { 086 this.version = version; 087 } 088 089 @Rule 090 public ExpectedException expectedEx = ExpectedException.none(); 091 092 @Parameters 093 public static Collection<Object[]> getParameters() { 094 List<Object[]> versionsToTest = new ArrayList<>(); 095 for (int v = HFile.MIN_FORMAT_VERSION; v <= HFile.MAX_FORMAT_VERSION; ++v) 096 versionsToTest.add(new Integer[] { v } ); 097 return versionsToTest; 098 } 099 100 @Before 101 public void setUp() throws IOException { 102 fs = FileSystem.get(util.getConfiguration()); 103 } 104 105 @Test 106 public void testComparatorIsHBase1Compatible() { 107 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 108 t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); 109 assertEquals(CellComparatorImpl.COMPARATOR.getClass().getName(), t.getComparatorClassName()); 110 HFileProtos.FileTrailerProto pb = t.toProtobuf(); 111 assertEquals(KeyValue.COMPARATOR.getClass().getName(), pb.getComparatorClassName()); 112 t.setComparatorClass(CellComparatorImpl.MetaCellComparator.META_COMPARATOR.getClass()); 113 pb = t.toProtobuf(); 114 assertEquals(KeyValue.META_COMPARATOR.getClass().getName(), 115 pb.getComparatorClassName()); 116 } 117 118 @Test 119 public void testCreateComparator() throws IOException { 120 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 121 try { 122 assertEquals(CellComparatorImpl.class, 123 t.createComparator(KeyValue.COMPARATOR.getLegacyKeyComparatorName()).getClass()); 124 assertEquals(CellComparatorImpl.class, 125 t.createComparator(KeyValue.COMPARATOR.getClass().getName()).getClass()); 126 assertEquals(CellComparatorImpl.class, 127 t.createComparator(CellComparator.class.getName()).getClass()); 128 assertEquals(CellComparatorImpl.MetaCellComparator.class, 129 t.createComparator(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName()).getClass()); 130 assertEquals(CellComparatorImpl.MetaCellComparator.class, 131 t.createComparator(KeyValue.META_COMPARATOR.getClass().getName()).getClass()); 132 assertEquals(CellComparatorImpl.MetaCellComparator.class, t.createComparator( 133 CellComparatorImpl.MetaCellComparator.META_COMPARATOR.getClass().getName()).getClass()); 134 assertNull(t.createComparator(Bytes.BYTES_RAWCOMPARATOR.getClass().getName())); 135 assertNull(t.createComparator("org.apache.hadoop.hbase.KeyValue$RawBytesComparator")); 136 } catch (IOException e) { 137 fail("Unexpected exception while testing FixedFileTrailer#createComparator()"); 138 } 139 140 // Test an invalid comparatorClassName 141 expectedEx.expect(IOException.class); 142 t.createComparator(""); 143 144 } 145 146 @Test 147 public void testTrailer() throws IOException { 148 FixedFileTrailer t = new FixedFileTrailer(version, 149 HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 150 t.setDataIndexCount(3); 151 t.setEntryCount(((long) Integer.MAX_VALUE) + 1); 152 153 t.setLastDataBlockOffset(291); 154 t.setNumDataIndexLevels(3); 155 t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); 156 t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. 157 t.setUncompressedDataIndexSize(827398717L); // Something random. 158 159 t.setLoadOnOpenOffset(128); 160 t.setMetaIndexCount(7); 161 162 t.setTotalUncompressedBytes(129731987); 163 164 { 165 DataOutputStream dos = new DataOutputStream(baos); // Limited scope. 166 t.serialize(dos); 167 dos.flush(); 168 assertEquals(dos.size(), FixedFileTrailer.getTrailerSize(version)); 169 } 170 171 byte[] bytes = baos.toByteArray(); 172 baos.reset(); 173 174 assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version)); 175 176 ByteArrayInputStream bais = new ByteArrayInputStream(bytes); 177 178 // Finished writing, trying to read. 179 { 180 DataInputStream dis = new DataInputStream(bais); 181 FixedFileTrailer t2 = new FixedFileTrailer(version, 182 HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 183 t2.deserialize(dis); 184 assertEquals(-1, bais.read()); // Ensure we have read everything. 185 checkLoadedTrailer(version, t, t2); 186 } 187 188 // Now check what happens if the trailer is corrupted. 189 Path trailerPath = new Path(util.getDataTestDir(), "trailer_" + version); 190 191 { 192 for (byte invalidVersion : new byte[] { HFile.MIN_FORMAT_VERSION - 1, 193 HFile.MAX_FORMAT_VERSION + 1}) { 194 bytes[bytes.length - 1] = invalidVersion; 195 writeTrailer(trailerPath, null, bytes); 196 try { 197 readTrailer(trailerPath); 198 fail("Exception expected"); 199 } catch (IllegalArgumentException ex) { 200 // Make it easy to debug this. 201 String msg = ex.getMessage(); 202 String cleanMsg = msg.replaceAll( 203 "^(java(\\.[a-zA-Z]+)+:\\s+)?|\\s+\\(.*\\)\\s*$", ""); 204 assertEquals("Actual exception message is \"" + msg + "\".\n" + 205 "Cleaned-up message", // will be followed by " expected: ..." 206 "Invalid HFile version: " + invalidVersion, cleanMsg); 207 LOG.info("Got an expected exception: " + msg); 208 } 209 } 210 211 } 212 213 // Now write the trailer into a file and auto-detect the version. 214 writeTrailer(trailerPath, t, null); 215 216 FixedFileTrailer t4 = readTrailer(trailerPath); 217 218 checkLoadedTrailer(version, t, t4); 219 220 String trailerStr = t.toString(); 221 assertEquals("Invalid number of fields in the string representation " 222 + "of the trailer: " + trailerStr, NUM_FIELDS_BY_VERSION[version - 2], 223 trailerStr.split(", ").length); 224 assertEquals(trailerStr, t4.toString()); 225 } 226 227 @Test 228 public void testTrailerForV2NonPBCompatibility() throws Exception { 229 if (version == 2) { 230 FixedFileTrailer t = new FixedFileTrailer(version, 231 HFileReaderImpl.MINOR_VERSION_NO_CHECKSUM); 232 t.setDataIndexCount(3); 233 t.setEntryCount(((long) Integer.MAX_VALUE) + 1); 234 t.setLastDataBlockOffset(291); 235 t.setNumDataIndexLevels(3); 236 t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); 237 t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. 238 t.setUncompressedDataIndexSize(827398717L); // Something random. 239 t.setLoadOnOpenOffset(128); 240 t.setMetaIndexCount(7); 241 t.setTotalUncompressedBytes(129731987); 242 243 { 244 DataOutputStream dos = new DataOutputStream(baos); // Limited scope. 245 serializeAsWritable(dos, t); 246 dos.flush(); 247 assertEquals(FixedFileTrailer.getTrailerSize(version), dos.size()); 248 } 249 250 byte[] bytes = baos.toByteArray(); 251 baos.reset(); 252 assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version)); 253 254 ByteArrayInputStream bais = new ByteArrayInputStream(bytes); 255 { 256 DataInputStream dis = new DataInputStream(bais); 257 FixedFileTrailer t2 = new FixedFileTrailer(version, 258 HFileReaderImpl.MINOR_VERSION_NO_CHECKSUM); 259 t2.deserialize(dis); 260 assertEquals(-1, bais.read()); // Ensure we have read everything. 261 checkLoadedTrailer(version, t, t2); 262 } 263 } 264 } 265 266 // Copied from FixedFileTrailer for testing the reading part of 267 // FixedFileTrailer of non PB 268 // serialized FFTs. 269 private void serializeAsWritable(DataOutputStream output, FixedFileTrailer fft) 270 throws IOException { 271 BlockType.TRAILER.write(output); 272 output.writeLong(fft.getFileInfoOffset()); 273 output.writeLong(fft.getLoadOnOpenDataOffset()); 274 output.writeInt(fft.getDataIndexCount()); 275 output.writeLong(fft.getUncompressedDataIndexSize()); 276 output.writeInt(fft.getMetaIndexCount()); 277 output.writeLong(fft.getTotalUncompressedBytes()); 278 output.writeLong(fft.getEntryCount()); 279 output.writeInt(fft.getCompressionCodec().ordinal()); 280 output.writeInt(fft.getNumDataIndexLevels()); 281 output.writeLong(fft.getFirstDataBlockOffset()); 282 output.writeLong(fft.getLastDataBlockOffset()); 283 Bytes.writeStringFixedSize(output, fft.getComparatorClassName(), MAX_COMPARATOR_NAME_LENGTH); 284 output.writeInt(FixedFileTrailer.materializeVersion(fft.getMajorVersion(), 285 fft.getMinorVersion())); 286 } 287 288 289 private FixedFileTrailer readTrailer(Path trailerPath) throws IOException { 290 FSDataInputStream fsdis = fs.open(trailerPath); 291 FixedFileTrailer trailerRead = FixedFileTrailer.readFromStream(fsdis, 292 fs.getFileStatus(trailerPath).getLen()); 293 fsdis.close(); 294 return trailerRead; 295 } 296 297 private void writeTrailer(Path trailerPath, FixedFileTrailer t, 298 byte[] useBytesInstead) throws IOException { 299 assert (t == null) != (useBytesInstead == null); // Expect one non-null. 300 301 FSDataOutputStream fsdos = fs.create(trailerPath); 302 fsdos.write(135); // to make deserializer's job less trivial 303 if (useBytesInstead != null) { 304 fsdos.write(useBytesInstead); 305 } else { 306 t.serialize(fsdos); 307 } 308 fsdos.close(); 309 } 310 311 private void checkLoadedTrailer(int version, FixedFileTrailer expected, 312 FixedFileTrailer loaded) throws IOException { 313 assertEquals(version, loaded.getMajorVersion()); 314 assertEquals(expected.getDataIndexCount(), loaded.getDataIndexCount()); 315 316 assertEquals(Math.min(expected.getEntryCount(), 317 version == 1 ? Integer.MAX_VALUE : Long.MAX_VALUE), 318 loaded.getEntryCount()); 319 320 if (version == 1) { 321 assertEquals(expected.getFileInfoOffset(), loaded.getFileInfoOffset()); 322 } 323 324 if (version == 2) { 325 assertEquals(expected.getLastDataBlockOffset(), 326 loaded.getLastDataBlockOffset()); 327 assertEquals(expected.getNumDataIndexLevels(), 328 loaded.getNumDataIndexLevels()); 329 assertEquals(expected.createComparator().getClass().getName(), 330 loaded.createComparator().getClass().getName()); 331 assertEquals(expected.getFirstDataBlockOffset(), 332 loaded.getFirstDataBlockOffset()); 333 assertTrue( 334 expected.createComparator() instanceof CellComparatorImpl); 335 assertEquals(expected.getUncompressedDataIndexSize(), 336 loaded.getUncompressedDataIndexSize()); 337 } 338 339 assertEquals(expected.getLoadOnOpenDataOffset(), 340 loaded.getLoadOnOpenDataOffset()); 341 assertEquals(expected.getMetaIndexCount(), loaded.getMetaIndexCount()); 342 343 assertEquals(expected.getTotalUncompressedBytes(), 344 loaded.getTotalUncompressedBytes()); 345 } 346 347 348} 349