1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.io.hfile;
19
20 import java.io.ByteArrayOutputStream;
21 import java.io.IOException;
22 import java.nio.ByteBuffer;
23
24 import org.apache.commons.logging.Log;
25 import org.apache.commons.logging.LogFactory;
26 import org.apache.hadoop.fs.ChecksumException;
27 import org.apache.hadoop.hbase.classification.InterfaceAudience;
28 import org.apache.hadoop.fs.Path;
29 import org.apache.hadoop.hbase.util.ChecksumType;
30 import org.apache.hadoop.util.DataChecksum;
31
32
33
34
35 @InterfaceAudience.Private
36 public class ChecksumUtil {
37 public static final Log LOG = LogFactory.getLog(ChecksumUtil.class);
38
39
40 private static byte[] DUMMY_VALUE = new byte[128 * HFileBlock.CHECKSUM_SIZE];
41
42
43
44
45
46
47
48
49 private static boolean generateExceptions = false;
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65 static void generateChecksums(byte[] indata, int startOffset, int endOffset,
66 byte[] outdata, int outOffset, ChecksumType checksumType,
67 int bytesPerChecksum) throws IOException {
68
69 if (checksumType == ChecksumType.NULL) {
70 return;
71 }
72
73 DataChecksum checksum = DataChecksum.newDataChecksum(
74 checksumType.getDataChecksumType(), bytesPerChecksum);
75
76 checksum.calculateChunkedSums(
77 ByteBuffer.wrap(indata, startOffset, endOffset - startOffset),
78 ByteBuffer.wrap(outdata, outOffset, outdata.length - outOffset));
79 }
80
81
82
83
84
85
86
87
88
89
90 static boolean validateChecksum(ByteBuffer buffer, Path path, long offset, int hdrSize)
91 throws IOException {
92
93
94 ChecksumType cktype =
95 ChecksumType.codeToType(buffer.get(HFileBlock.Header.CHECKSUM_TYPE_INDEX));
96 if (cktype == ChecksumType.NULL) {
97 return true;
98 }
99
100
101 int bytesPerChecksum = buffer.getInt(HFileBlock.Header.BYTES_PER_CHECKSUM_INDEX);
102
103 DataChecksum dataChecksum = DataChecksum.newDataChecksum(
104 cktype.getDataChecksumType(), bytesPerChecksum);
105 assert dataChecksum != null;
106 int onDiskDataSizeWithHeader =
107 buffer.getInt(HFileBlock.Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);
108 if (LOG.isTraceEnabled()) {
109 LOG.info("dataLength=" + buffer.capacity()
110 + ", sizeWithHeader=" + onDiskDataSizeWithHeader
111 + ", checksumType=" + cktype.getName()
112 + ", file=" + path.toString()
113 + ", offset=" + offset
114 + ", headerSize=" + hdrSize
115 + ", bytesPerChecksum=" + bytesPerChecksum);
116 }
117 try {
118 ByteBuffer data = (ByteBuffer) buffer.duplicate().position(0).limit(onDiskDataSizeWithHeader);
119 ByteBuffer checksums = (ByteBuffer) buffer.duplicate().position(onDiskDataSizeWithHeader)
120 .limit(buffer.capacity());
121 dataChecksum.verifyChunkedSums(data, checksums, path.toString(), 0);
122 } catch (ChecksumException e) {
123 return false;
124 }
125 return true;
126 }
127
128
129
130
131
132
133
134
135 static long numBytes(long datasize, int bytesPerChecksum) {
136 return numChunks(datasize, bytesPerChecksum) *
137 HFileBlock.CHECKSUM_SIZE;
138 }
139
140
141
142
143
144
145
146
147 static long numChunks(long datasize, int bytesPerChecksum) {
148 long numChunks = datasize/bytesPerChecksum;
149 if (datasize % bytesPerChecksum != 0) {
150 numChunks++;
151 }
152 return numChunks;
153 }
154
155
156
157
158
159
160
161
162
163 static void reserveSpaceForChecksums(ByteArrayOutputStream baos,
164 int numBytes, int bytesPerChecksum) throws IOException {
165 long numChunks = numChunks(numBytes, bytesPerChecksum);
166 long bytesLeft = numChunks * HFileBlock.CHECKSUM_SIZE;
167 while (bytesLeft > 0) {
168 long count = Math.min(bytesLeft, DUMMY_VALUE.length);
169 baos.write(DUMMY_VALUE, 0, (int)count);
170 bytesLeft -= count;
171 }
172 }
173
174
175
176
177
178
179
180 public static void generateExceptionForChecksumFailureForTest(boolean value) {
181 generateExceptions = value;
182 }
183 }
184