1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.regionserver.wal;
20
21 import java.io.DataInput;
22 import java.io.DataOutput;
23 import java.io.EOFException;
24 import java.io.IOException;
25 import java.util.Iterator;
26 import java.util.List;
27 import java.util.UUID;
28
29 import org.apache.commons.logging.Log;
30 import org.apache.commons.logging.LogFactory;
31 import org.apache.hadoop.hbase.classification.InterfaceAudience;
32 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
33 import org.apache.hadoop.hbase.HRegionInfo;
34 import org.apache.hadoop.hbase.TableName;
35 import org.apache.hadoop.hbase.util.Bytes;
36 import org.apache.hadoop.hbase.wal.WALKey;
37 import org.apache.hadoop.io.Writable;
38 import org.apache.hadoop.io.WritableUtils;
39
40 import com.google.common.annotations.VisibleForTesting;
41
42
43
44
45
46
47
48
49
50
51
52
53 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION)
54 @Deprecated
55 public class HLogKey extends WALKey implements Writable {
56 public static final Log LOG = LogFactory.getLog(HLogKey.class);
57
58 public HLogKey() {
59 super();
60 }
61
62 @VisibleForTesting
63 public HLogKey(final byte[] encodedRegionName, final TableName tablename, long logSeqNum,
64 final long now, UUID clusterId) {
65 super(encodedRegionName, tablename, logSeqNum, now, clusterId);
66 }
67
68 public HLogKey(final byte[] encodedRegionName, final TableName tablename) {
69 super(encodedRegionName, tablename);
70 }
71
72 public HLogKey(final byte[] encodedRegionName, final TableName tablename, final long now) {
73 super(encodedRegionName, tablename, now);
74 }
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89 public HLogKey(final byte [] encodedRegionName, final TableName tablename,
90 long logSeqNum, final long now, List<UUID> clusterIds, long nonceGroup, long nonce) {
91 super(encodedRegionName, tablename, logSeqNum, now, clusterIds, nonceGroup, nonce);
92 }
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107 public HLogKey(final byte [] encodedRegionName, final TableName tablename,
108 final long now, List<UUID> clusterIds, long nonceGroup, long nonce) {
109 super(encodedRegionName, tablename, now, clusterIds, nonceGroup, nonce);
110 }
111
112
113
114
115
116
117
118
119
120
121
122
123
124 public HLogKey(final byte [] encodedRegionName, final TableName tablename, long logSeqNum,
125 long nonceGroup, long nonce) {
126 super(encodedRegionName, tablename, logSeqNum, nonceGroup, nonce);
127 }
128
129
130
131
132 @Override
133 @Deprecated
134 public void write(DataOutput out) throws IOException {
135 LOG.warn("HLogKey is being serialized to writable - only expected in test code");
136 WritableUtils.writeVInt(out, VERSION.code);
137 if (compressionContext == null) {
138 Bytes.writeByteArray(out, this.encodedRegionName);
139 Bytes.writeByteArray(out, this.tablename.getName());
140 } else {
141 Compressor.writeCompressed(this.encodedRegionName, 0,
142 this.encodedRegionName.length, out,
143 compressionContext.regionDict);
144 Compressor.writeCompressed(this.tablename.getName(), 0, this.tablename.getName().length, out,
145 compressionContext.tableDict);
146 }
147 out.writeLong(this.logSeqNum);
148 out.writeLong(this.writeTime);
149
150
151 Iterator<UUID> iterator = clusterIds.iterator();
152 if(iterator.hasNext()){
153 out.writeBoolean(true);
154 UUID clusterId = iterator.next();
155 out.writeLong(clusterId.getMostSignificantBits());
156 out.writeLong(clusterId.getLeastSignificantBits());
157 } else {
158 out.writeBoolean(false);
159 }
160 }
161
162 @Override
163 public void readFields(DataInput in) throws IOException {
164 Version version = Version.UNVERSIONED;
165
166
167
168
169
170
171
172
173 setScopes(null);
174 int len = WritableUtils.readVInt(in);
175 byte[] tablenameBytes = null;
176 if (len < 0) {
177
178 version = Version.fromCode(len);
179
180
181 if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
182 len = WritableUtils.readVInt(in);
183 }
184 }
185 if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
186 this.encodedRegionName = new byte[len];
187 in.readFully(this.encodedRegionName);
188 tablenameBytes = Bytes.readByteArray(in);
189 } else {
190 this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict);
191 tablenameBytes = Compressor.readCompressed(in, compressionContext.tableDict);
192 }
193
194 this.logSeqNum = in.readLong();
195 this.writeTime = in.readLong();
196
197 this.clusterIds.clear();
198 if (version.atLeast(Version.INITIAL)) {
199 if (in.readBoolean()) {
200
201
202 clusterIds.add(new UUID(in.readLong(), in.readLong()));
203 }
204 } else {
205 try {
206
207 in.readByte();
208 } catch(EOFException e) {
209
210 if (LOG.isTraceEnabled()) LOG.trace(e);
211 }
212 }
213 try {
214 this.tablename = TableName.valueOf(tablenameBytes);
215 } catch (IllegalArgumentException iae) {
216 if (Bytes.toString(tablenameBytes).equals(TableName.OLD_META_STR)) {
217
218 LOG.info("Got an old .META. edit, continuing with new format ");
219 this.tablename = TableName.META_TABLE_NAME;
220 this.encodedRegionName = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes();
221 } else if (Bytes.toString(tablenameBytes).equals(TableName.OLD_ROOT_STR)) {
222 this.tablename = TableName.OLD_ROOT_TABLE_NAME;
223 throw iae;
224 } else throw iae;
225 }
226
227 }
228
229 }