001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile; 019 020import static org.apache.hadoop.hbase.io.hfile.FixedFileTrailer.createComparator; 021import static org.junit.jupiter.api.Assertions.assertEquals; 022import static org.junit.jupiter.api.Assertions.assertNull; 023import static org.junit.jupiter.api.Assertions.assertThrows; 024import static org.junit.jupiter.api.Assertions.assertTrue; 025import static org.junit.jupiter.api.Assertions.fail; 026 027import java.io.ByteArrayInputStream; 028import java.io.ByteArrayOutputStream; 029import java.io.DataInputStream; 030import java.io.DataOutputStream; 031import java.io.IOException; 032import java.util.ArrayList; 033import java.util.List; 034import java.util.stream.Stream; 035import org.apache.hadoop.fs.FSDataInputStream; 036import org.apache.hadoop.fs.FSDataOutputStream; 037import org.apache.hadoop.fs.FileSystem; 038import org.apache.hadoop.fs.Path; 039import org.apache.hadoop.hbase.CellComparator; 040import org.apache.hadoop.hbase.CellComparatorImpl; 041import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; 042import org.apache.hadoop.hbase.HBaseTestingUtil; 043import org.apache.hadoop.hbase.InnerStoreCellComparator; 044import org.apache.hadoop.hbase.MetaCellComparator; 045import org.apache.hadoop.hbase.testclassification.IOTests; 046import org.apache.hadoop.hbase.testclassification.SmallTests; 047import org.apache.hadoop.hbase.util.Bytes; 048import org.junit.jupiter.api.BeforeEach; 049import org.junit.jupiter.api.Tag; 050import org.junit.jupiter.api.TestTemplate; 051import org.junit.jupiter.params.provider.Arguments; 052import org.slf4j.Logger; 053import org.slf4j.LoggerFactory; 054 055@Tag(IOTests.TAG) 056@Tag(SmallTests.TAG) 057@HBaseParameterizedTestTemplate(name = "{index}: version={0}") 058public class TestFixedFileTrailer { 059 060 private static final Logger LOG = LoggerFactory.getLogger(TestFixedFileTrailer.class); 061 private static final int MAX_COMPARATOR_NAME_LENGTH = 128; 062 063 /** 064 * The number of used fields by version. Indexed by version minus two. Min version that we support 065 * is V2 066 */ 067 private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14, 15 }; 068 069 private HBaseTestingUtil util = new HBaseTestingUtil(); 070 private FileSystem fs; 071 private ByteArrayOutputStream baos = new ByteArrayOutputStream(); 072 private int version; 073 074 static { 075 assert NUM_FIELDS_BY_VERSION.length == HFile.MAX_FORMAT_VERSION - HFile.MIN_FORMAT_VERSION + 1; 076 } 077 078 public TestFixedFileTrailer(int version) { 079 this.version = version; 080 } 081 082 public static Stream<Arguments> parameters() { 083 List<Object[]> versionsToTest = new ArrayList<>(); 084 for (int v = HFile.MIN_FORMAT_VERSION; v <= HFile.MAX_FORMAT_VERSION; ++v) 085 versionsToTest.add(new Integer[] { v }); 086 return versionsToTest.stream().map(arr -> Arguments.of(arr[0])); 087 } 088 089 @BeforeEach 090 public void setUp() throws IOException { 091 fs = FileSystem.get(util.getConfiguration()); 092 } 093 094 @TestTemplate 095 public void testCreateComparator() throws IOException { 096 assertEquals(InnerStoreCellComparator.class, 097 createComparator("org.apache.hadoop.hbase.KeyValue$KVComparator").getClass()); 098 assertEquals(InnerStoreCellComparator.class, 099 createComparator(CellComparator.class.getName()).getClass()); 100 101 assertEquals(MetaCellComparator.class, 102 createComparator("org.apache.hadoop.hbase.KeyValue$MetaComparator").getClass()); 103 assertEquals(MetaCellComparator.class, 104 createComparator("org.apache.hadoop.hbase.CellComparator$MetaCellComparator").getClass()); 105 assertEquals(MetaCellComparator.class, 106 createComparator("org.apache.hadoop.hbase.CellComparatorImpl$MetaCellComparator").getClass()); 107 assertEquals(MetaCellComparator.class, 108 createComparator(MetaCellComparator.META_COMPARATOR.getClass().getName()).getClass()); 109 assertEquals(MetaCellComparator.META_COMPARATOR.getClass(), 110 createComparator(MetaCellComparator.META_COMPARATOR.getClass().getName()).getClass()); 111 112 assertEquals(CellComparatorImpl.COMPARATOR.getClass(), 113 createComparator(MetaCellComparator.COMPARATOR.getClass().getName()).getClass()); 114 115 assertNull(createComparator(Bytes.BYTES_RAWCOMPARATOR.getClass().getName())); 116 assertNull(createComparator("org.apache.hadoop.hbase.KeyValue$RawBytesComparator")); 117 118 // Test an invalid comparatorClassName 119 assertThrows(IOException.class, () -> createComparator("")); 120 } 121 122 @TestTemplate 123 public void testTrailer() throws IOException { 124 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 125 t.setDataIndexCount(3); 126 t.setEntryCount(((long) Integer.MAX_VALUE) + 1); 127 128 t.setLastDataBlockOffset(291); 129 t.setNumDataIndexLevels(3); 130 t.setComparatorClass(InnerStoreCellComparator.INNER_STORE_COMPARATOR.getClass()); 131 t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. 132 t.setUncompressedDataIndexSize(827398717L); // Something random. 133 134 t.setLoadOnOpenOffset(128); 135 t.setMetaIndexCount(7); 136 137 t.setTotalUncompressedBytes(129731987); 138 139 { 140 DataOutputStream dos = new DataOutputStream(baos); // Limited scope. 141 t.serialize(dos); 142 dos.flush(); 143 assertEquals(dos.size(), FixedFileTrailer.getTrailerSize(version)); 144 } 145 146 byte[] bytes = baos.toByteArray(); 147 baos.reset(); 148 149 assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version)); 150 151 ByteArrayInputStream bais = new ByteArrayInputStream(bytes); 152 153 // Finished writing, trying to read. 154 { 155 DataInputStream dis = new DataInputStream(bais); 156 FixedFileTrailer t2 = 157 new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); 158 t2.deserialize(dis); 159 assertEquals(-1, bais.read()); // Ensure we have read everything. 160 checkLoadedTrailer(version, t, t2); 161 } 162 163 // Now check what happens if the trailer is corrupted. 164 Path trailerPath = new Path(util.getDataTestDir(), "trailer_" + version); 165 166 { 167 for (byte invalidVersion : new byte[] { HFile.MIN_FORMAT_VERSION - 1, 168 HFile.MAX_FORMAT_VERSION + 1 }) { 169 bytes[bytes.length - 1] = invalidVersion; 170 writeTrailer(trailerPath, null, bytes); 171 try { 172 readTrailer(trailerPath); 173 fail("Exception expected"); 174 } catch (IllegalArgumentException ex) { 175 // Make it easy to debug this. 176 String msg = ex.getMessage(); 177 String cleanMsg = msg.replaceAll("^(java(\\.[a-zA-Z]+)+:\\s+)?|\\s+\\(.*\\)\\s*$", ""); 178 // will be followed by " expected: ..." 179 assertEquals("Invalid HFile version: " + invalidVersion, cleanMsg, 180 "Actual exception message is \"" + msg + "\".\nCleaned-up message"); 181 LOG.info("Got an expected exception: " + msg); 182 } 183 } 184 185 } 186 187 // Now write the trailer into a file and auto-detect the version. 188 writeTrailer(trailerPath, t, null); 189 190 FixedFileTrailer t4 = readTrailer(trailerPath); 191 192 checkLoadedTrailer(version, t, t4); 193 194 String trailerStr = t.toString(); 195 assertEquals(NUM_FIELDS_BY_VERSION[version - 2], trailerStr.split(", ").length, 196 "Invalid number of fields in the string representation " + "of the trailer: " + trailerStr); 197 assertEquals(trailerStr, t4.toString()); 198 } 199 200 @TestTemplate 201 public void testTrailerForV2NonPBCompatibility() throws Exception { 202 if (version == 2) { 203 FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.MINOR_VERSION_NO_CHECKSUM); 204 t.setDataIndexCount(3); 205 t.setEntryCount(((long) Integer.MAX_VALUE) + 1); 206 t.setLastDataBlockOffset(291); 207 t.setNumDataIndexLevels(3); 208 t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); 209 t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. 210 t.setUncompressedDataIndexSize(827398717L); // Something random. 211 t.setLoadOnOpenOffset(128); 212 t.setMetaIndexCount(7); 213 t.setTotalUncompressedBytes(129731987); 214 215 { 216 DataOutputStream dos = new DataOutputStream(baos); // Limited scope. 217 serializeAsWritable(dos, t); 218 dos.flush(); 219 assertEquals(FixedFileTrailer.getTrailerSize(version), dos.size()); 220 } 221 222 byte[] bytes = baos.toByteArray(); 223 baos.reset(); 224 assertEquals(bytes.length, FixedFileTrailer.getTrailerSize(version)); 225 226 ByteArrayInputStream bais = new ByteArrayInputStream(bytes); 227 { 228 DataInputStream dis = new DataInputStream(bais); 229 FixedFileTrailer t2 = 230 new FixedFileTrailer(version, HFileReaderImpl.MINOR_VERSION_NO_CHECKSUM); 231 t2.deserialize(dis); 232 assertEquals(-1, bais.read()); // Ensure we have read everything. 233 checkLoadedTrailer(version, t, t2); 234 } 235 } 236 } 237 238 // Copied from FixedFileTrailer for testing the reading part of 239 // FixedFileTrailer of non PB 240 // serialized FFTs. 241 private void serializeAsWritable(DataOutputStream output, FixedFileTrailer fft) 242 throws IOException { 243 BlockType.TRAILER.write(output); 244 output.writeLong(fft.getFileInfoOffset()); 245 output.writeLong(fft.getLoadOnOpenDataOffset()); 246 output.writeInt(fft.getDataIndexCount()); 247 output.writeLong(fft.getUncompressedDataIndexSize()); 248 output.writeInt(fft.getMetaIndexCount()); 249 output.writeLong(fft.getTotalUncompressedBytes()); 250 output.writeLong(fft.getEntryCount()); 251 output.writeInt(fft.getCompressionCodec().ordinal()); 252 output.writeInt(fft.getNumDataIndexLevels()); 253 output.writeLong(fft.getFirstDataBlockOffset()); 254 output.writeLong(fft.getLastDataBlockOffset()); 255 Bytes.writeStringFixedSize(output, fft.getComparatorClassName(), MAX_COMPARATOR_NAME_LENGTH); 256 output 257 .writeInt(FixedFileTrailer.materializeVersion(fft.getMajorVersion(), fft.getMinorVersion())); 258 } 259 260 private FixedFileTrailer readTrailer(Path trailerPath) throws IOException { 261 FSDataInputStream fsdis = fs.open(trailerPath); 262 FixedFileTrailer trailerRead = 263 FixedFileTrailer.readFromStream(fsdis, fs.getFileStatus(trailerPath).getLen()); 264 fsdis.close(); 265 return trailerRead; 266 } 267 268 private void writeTrailer(Path trailerPath, FixedFileTrailer t, byte[] useBytesInstead) 269 throws IOException { 270 assert (t == null) != (useBytesInstead == null); // Expect one non-null. 271 272 FSDataOutputStream fsdos = fs.create(trailerPath); 273 fsdos.write(135); // to make deserializer's job less trivial 274 if (useBytesInstead != null) { 275 fsdos.write(useBytesInstead); 276 } else { 277 t.serialize(fsdos); 278 } 279 fsdos.close(); 280 } 281 282 private void checkLoadedTrailer(int version, FixedFileTrailer expected, FixedFileTrailer loaded) 283 throws IOException { 284 assertEquals(version, loaded.getMajorVersion()); 285 assertEquals(expected.getDataIndexCount(), loaded.getDataIndexCount()); 286 287 assertEquals( 288 Math.min(expected.getEntryCount(), version == 1 ? Integer.MAX_VALUE : Long.MAX_VALUE), 289 loaded.getEntryCount()); 290 291 if (version == 1) { 292 assertEquals(expected.getFileInfoOffset(), loaded.getFileInfoOffset()); 293 } 294 295 if (version == 2) { 296 assertEquals(expected.getLastDataBlockOffset(), loaded.getLastDataBlockOffset()); 297 assertEquals(expected.getNumDataIndexLevels(), loaded.getNumDataIndexLevels()); 298 assertEquals(expected.createComparator().getClass().getName(), 299 loaded.createComparator().getClass().getName()); 300 assertEquals(expected.getFirstDataBlockOffset(), loaded.getFirstDataBlockOffset()); 301 assertTrue(expected.createComparator() instanceof CellComparatorImpl); 302 assertEquals(expected.getUncompressedDataIndexSize(), loaded.getUncompressedDataIndexSize()); 303 } 304 305 assertEquals(expected.getLoadOnOpenDataOffset(), loaded.getLoadOnOpenDataOffset()); 306 assertEquals(expected.getMetaIndexCount(), loaded.getMetaIndexCount()); 307 308 assertEquals(expected.getTotalUncompressedBytes(), loaded.getTotalUncompressedBytes()); 309 } 310 311}