View Javadoc

1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: hbase.proto
3   
4   package org.apache.hadoop.hbase.protobuf.generated;
5   
6   public final class HBaseProtos {
7     private HBaseProtos() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    public interface SnapshotDescriptionOrBuilder
12        extends com.google.protobuf.MessageOrBuilder {
13      
14      // required string name = 1;
15      boolean hasName();
16      String getName();
17      
18      // optional string table = 2;
19      boolean hasTable();
20      String getTable();
21      
22      // optional int64 creationTime = 3 [default = 0];
23      boolean hasCreationTime();
24      long getCreationTime();
25      
26      // optional .SnapshotDescription.Type type = 4 [default = FLUSH];
27      boolean hasType();
28      org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType();
29      
30      // optional int32 version = 5;
31      boolean hasVersion();
32      int getVersion();
33    }
34    public static final class SnapshotDescription extends
35        com.google.protobuf.GeneratedMessage
36        implements SnapshotDescriptionOrBuilder {
37      // Use SnapshotDescription.newBuilder() to construct.
38      private SnapshotDescription(Builder builder) {
39        super(builder);
40      }
41      private SnapshotDescription(boolean noInit) {}
42      
43      private static final SnapshotDescription defaultInstance;
44      public static SnapshotDescription getDefaultInstance() {
45        return defaultInstance;
46      }
47      
48      public SnapshotDescription getDefaultInstanceForType() {
49        return defaultInstance;
50      }
51      
52      public static final com.google.protobuf.Descriptors.Descriptor
53          getDescriptor() {
54        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor;
55      }
56      
57      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
58          internalGetFieldAccessorTable() {
59        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable;
60      }
61      
62      public enum Type
63          implements com.google.protobuf.ProtocolMessageEnum {
64        DISABLED(0, 0),
65        FLUSH(1, 1),
66        SKIPFLUSH(2, 2),
67        ;
68        
69        public static final int DISABLED_VALUE = 0;
70        public static final int FLUSH_VALUE = 1;
71        public static final int SKIPFLUSH_VALUE = 2;
72        
73        
74        public final int getNumber() { return value; }
75        
76        public static Type valueOf(int value) {
77          switch (value) {
78            case 0: return DISABLED;
79            case 1: return FLUSH;
80            case 2: return SKIPFLUSH;
81            default: return null;
82          }
83        }
84        
85        public static com.google.protobuf.Internal.EnumLiteMap<Type>
86            internalGetValueMap() {
87          return internalValueMap;
88        }
89        private static com.google.protobuf.Internal.EnumLiteMap<Type>
90            internalValueMap =
91              new com.google.protobuf.Internal.EnumLiteMap<Type>() {
92                public Type findValueByNumber(int number) {
93                  return Type.valueOf(number);
94                }
95              };
96        
97        public final com.google.protobuf.Descriptors.EnumValueDescriptor
98            getValueDescriptor() {
99          return getDescriptor().getValues().get(index);
100       }
101       public final com.google.protobuf.Descriptors.EnumDescriptor
102           getDescriptorForType() {
103         return getDescriptor();
104       }
105       public static final com.google.protobuf.Descriptors.EnumDescriptor
106           getDescriptor() {
107         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0);
108       }
109       
110       private static final Type[] VALUES = {
111         DISABLED, FLUSH, SKIPFLUSH, 
112       };
113       
114       public static Type valueOf(
115           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
116         if (desc.getType() != getDescriptor()) {
117           throw new java.lang.IllegalArgumentException(
118             "EnumValueDescriptor is not for this type.");
119         }
120         return VALUES[desc.getIndex()];
121       }
122       
123       private final int index;
124       private final int value;
125       
126       private Type(int index, int value) {
127         this.index = index;
128         this.value = value;
129       }
130       
131       // @@protoc_insertion_point(enum_scope:SnapshotDescription.Type)
132     }
133     
134     private int bitField0_;
135     // required string name = 1;
136     public static final int NAME_FIELD_NUMBER = 1;
137     private java.lang.Object name_;
138     public boolean hasName() {
139       return ((bitField0_ & 0x00000001) == 0x00000001);
140     }
141     public String getName() {
142       java.lang.Object ref = name_;
143       if (ref instanceof String) {
144         return (String) ref;
145       } else {
146         com.google.protobuf.ByteString bs = 
147             (com.google.protobuf.ByteString) ref;
148         String s = bs.toStringUtf8();
149         if (com.google.protobuf.Internal.isValidUtf8(bs)) {
150           name_ = s;
151         }
152         return s;
153       }
154     }
155     private com.google.protobuf.ByteString getNameBytes() {
156       java.lang.Object ref = name_;
157       if (ref instanceof String) {
158         com.google.protobuf.ByteString b = 
159             com.google.protobuf.ByteString.copyFromUtf8((String) ref);
160         name_ = b;
161         return b;
162       } else {
163         return (com.google.protobuf.ByteString) ref;
164       }
165     }
166     
167     // optional string table = 2;
168     public static final int TABLE_FIELD_NUMBER = 2;
169     private java.lang.Object table_;
170     public boolean hasTable() {
171       return ((bitField0_ & 0x00000002) == 0x00000002);
172     }
173     public String getTable() {
174       java.lang.Object ref = table_;
175       if (ref instanceof String) {
176         return (String) ref;
177       } else {
178         com.google.protobuf.ByteString bs = 
179             (com.google.protobuf.ByteString) ref;
180         String s = bs.toStringUtf8();
181         if (com.google.protobuf.Internal.isValidUtf8(bs)) {
182           table_ = s;
183         }
184         return s;
185       }
186     }
187     private com.google.protobuf.ByteString getTableBytes() {
188       java.lang.Object ref = table_;
189       if (ref instanceof String) {
190         com.google.protobuf.ByteString b = 
191             com.google.protobuf.ByteString.copyFromUtf8((String) ref);
192         table_ = b;
193         return b;
194       } else {
195         return (com.google.protobuf.ByteString) ref;
196       }
197     }
198     
199     // optional int64 creationTime = 3 [default = 0];
200     public static final int CREATIONTIME_FIELD_NUMBER = 3;
201     private long creationTime_;
202     public boolean hasCreationTime() {
203       return ((bitField0_ & 0x00000004) == 0x00000004);
204     }
205     public long getCreationTime() {
206       return creationTime_;
207     }
208     
209     // optional .SnapshotDescription.Type type = 4 [default = FLUSH];
210     public static final int TYPE_FIELD_NUMBER = 4;
211     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_;
212     public boolean hasType() {
213       return ((bitField0_ & 0x00000008) == 0x00000008);
214     }
215     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() {
216       return type_;
217     }
218     
219     // optional int32 version = 5;
220     public static final int VERSION_FIELD_NUMBER = 5;
221     private int version_;
222     public boolean hasVersion() {
223       return ((bitField0_ & 0x00000010) == 0x00000010);
224     }
225     public int getVersion() {
226       return version_;
227     }
228     
229     private void initFields() {
230       name_ = "";
231       table_ = "";
232       creationTime_ = 0L;
233       type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
234       version_ = 0;
235     }
236     private byte memoizedIsInitialized = -1;
237     public final boolean isInitialized() {
238       byte isInitialized = memoizedIsInitialized;
239       if (isInitialized != -1) return isInitialized == 1;
240       
241       if (!hasName()) {
242         memoizedIsInitialized = 0;
243         return false;
244       }
245       memoizedIsInitialized = 1;
246       return true;
247     }
248     
249     public void writeTo(com.google.protobuf.CodedOutputStream output)
250                         throws java.io.IOException {
251       getSerializedSize();
252       if (((bitField0_ & 0x00000001) == 0x00000001)) {
253         output.writeBytes(1, getNameBytes());
254       }
255       if (((bitField0_ & 0x00000002) == 0x00000002)) {
256         output.writeBytes(2, getTableBytes());
257       }
258       if (((bitField0_ & 0x00000004) == 0x00000004)) {
259         output.writeInt64(3, creationTime_);
260       }
261       if (((bitField0_ & 0x00000008) == 0x00000008)) {
262         output.writeEnum(4, type_.getNumber());
263       }
264       if (((bitField0_ & 0x00000010) == 0x00000010)) {
265         output.writeInt32(5, version_);
266       }
267       getUnknownFields().writeTo(output);
268     }
269     
270     private int memoizedSerializedSize = -1;
271     public int getSerializedSize() {
272       int size = memoizedSerializedSize;
273       if (size != -1) return size;
274     
275       size = 0;
276       if (((bitField0_ & 0x00000001) == 0x00000001)) {
277         size += com.google.protobuf.CodedOutputStream
278           .computeBytesSize(1, getNameBytes());
279       }
280       if (((bitField0_ & 0x00000002) == 0x00000002)) {
281         size += com.google.protobuf.CodedOutputStream
282           .computeBytesSize(2, getTableBytes());
283       }
284       if (((bitField0_ & 0x00000004) == 0x00000004)) {
285         size += com.google.protobuf.CodedOutputStream
286           .computeInt64Size(3, creationTime_);
287       }
288       if (((bitField0_ & 0x00000008) == 0x00000008)) {
289         size += com.google.protobuf.CodedOutputStream
290           .computeEnumSize(4, type_.getNumber());
291       }
292       if (((bitField0_ & 0x00000010) == 0x00000010)) {
293         size += com.google.protobuf.CodedOutputStream
294           .computeInt32Size(5, version_);
295       }
296       size += getUnknownFields().getSerializedSize();
297       memoizedSerializedSize = size;
298       return size;
299     }
300     
301     private static final long serialVersionUID = 0L;
302     @java.lang.Override
303     protected java.lang.Object writeReplace()
304         throws java.io.ObjectStreamException {
305       return super.writeReplace();
306     }
307     
308     @java.lang.Override
309     public boolean equals(final java.lang.Object obj) {
310       if (obj == this) {
311        return true;
312       }
313       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)) {
314         return super.equals(obj);
315       }
316       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) obj;
317       
318       boolean result = true;
319       result = result && (hasName() == other.hasName());
320       if (hasName()) {
321         result = result && getName()
322             .equals(other.getName());
323       }
324       result = result && (hasTable() == other.hasTable());
325       if (hasTable()) {
326         result = result && getTable()
327             .equals(other.getTable());
328       }
329       result = result && (hasCreationTime() == other.hasCreationTime());
330       if (hasCreationTime()) {
331         result = result && (getCreationTime()
332             == other.getCreationTime());
333       }
334       result = result && (hasType() == other.hasType());
335       if (hasType()) {
336         result = result &&
337             (getType() == other.getType());
338       }
339       result = result && (hasVersion() == other.hasVersion());
340       if (hasVersion()) {
341         result = result && (getVersion()
342             == other.getVersion());
343       }
344       result = result &&
345           getUnknownFields().equals(other.getUnknownFields());
346       return result;
347     }
348     
349     @java.lang.Override
350     public int hashCode() {
351       int hash = 41;
352       hash = (19 * hash) + getDescriptorForType().hashCode();
353       if (hasName()) {
354         hash = (37 * hash) + NAME_FIELD_NUMBER;
355         hash = (53 * hash) + getName().hashCode();
356       }
357       if (hasTable()) {
358         hash = (37 * hash) + TABLE_FIELD_NUMBER;
359         hash = (53 * hash) + getTable().hashCode();
360       }
361       if (hasCreationTime()) {
362         hash = (37 * hash) + CREATIONTIME_FIELD_NUMBER;
363         hash = (53 * hash) + hashLong(getCreationTime());
364       }
365       if (hasType()) {
366         hash = (37 * hash) + TYPE_FIELD_NUMBER;
367         hash = (53 * hash) + hashEnum(getType());
368       }
369       if (hasVersion()) {
370         hash = (37 * hash) + VERSION_FIELD_NUMBER;
371         hash = (53 * hash) + getVersion();
372       }
373       hash = (29 * hash) + getUnknownFields().hashCode();
374       return hash;
375     }
376     
377     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
378         com.google.protobuf.ByteString data)
379         throws com.google.protobuf.InvalidProtocolBufferException {
380       return newBuilder().mergeFrom(data).buildParsed();
381     }
382     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
383         com.google.protobuf.ByteString data,
384         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
385         throws com.google.protobuf.InvalidProtocolBufferException {
386       return newBuilder().mergeFrom(data, extensionRegistry)
387                .buildParsed();
388     }
389     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(byte[] data)
390         throws com.google.protobuf.InvalidProtocolBufferException {
391       return newBuilder().mergeFrom(data).buildParsed();
392     }
393     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
394         byte[] data,
395         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
396         throws com.google.protobuf.InvalidProtocolBufferException {
397       return newBuilder().mergeFrom(data, extensionRegistry)
398                .buildParsed();
399     }
400     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input)
401         throws java.io.IOException {
402       return newBuilder().mergeFrom(input).buildParsed();
403     }
404     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
405         java.io.InputStream input,
406         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
407         throws java.io.IOException {
408       return newBuilder().mergeFrom(input, extensionRegistry)
409                .buildParsed();
410     }
411     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input)
412         throws java.io.IOException {
413       Builder builder = newBuilder();
414       if (builder.mergeDelimitedFrom(input)) {
415         return builder.buildParsed();
416       } else {
417         return null;
418       }
419     }
420     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(
421         java.io.InputStream input,
422         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
423         throws java.io.IOException {
424       Builder builder = newBuilder();
425       if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
426         return builder.buildParsed();
427       } else {
428         return null;
429       }
430     }
431     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
432         com.google.protobuf.CodedInputStream input)
433         throws java.io.IOException {
434       return newBuilder().mergeFrom(input).buildParsed();
435     }
436     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
437         com.google.protobuf.CodedInputStream input,
438         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
439         throws java.io.IOException {
440       return newBuilder().mergeFrom(input, extensionRegistry)
441                .buildParsed();
442     }
443     
444     public static Builder newBuilder() { return Builder.create(); }
445     public Builder newBuilderForType() { return newBuilder(); }
446     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription prototype) {
447       return newBuilder().mergeFrom(prototype);
448     }
449     public Builder toBuilder() { return newBuilder(this); }
450     
451     @java.lang.Override
452     protected Builder newBuilderForType(
453         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
454       Builder builder = new Builder(parent);
455       return builder;
456     }
457     public static final class Builder extends
458         com.google.protobuf.GeneratedMessage.Builder<Builder>
459        implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder {
460       public static final com.google.protobuf.Descriptors.Descriptor
461           getDescriptor() {
462         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor;
463       }
464       
465       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
466           internalGetFieldAccessorTable() {
467         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable;
468       }
469       
470       // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder()
471       private Builder() {
472         maybeForceBuilderInitialization();
473       }
474       
475       private Builder(BuilderParent parent) {
476         super(parent);
477         maybeForceBuilderInitialization();
478       }
479       private void maybeForceBuilderInitialization() {
480         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
481         }
482       }
483       private static Builder create() {
484         return new Builder();
485       }
486       
487       public Builder clear() {
488         super.clear();
489         name_ = "";
490         bitField0_ = (bitField0_ & ~0x00000001);
491         table_ = "";
492         bitField0_ = (bitField0_ & ~0x00000002);
493         creationTime_ = 0L;
494         bitField0_ = (bitField0_ & ~0x00000004);
495         type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
496         bitField0_ = (bitField0_ & ~0x00000008);
497         version_ = 0;
498         bitField0_ = (bitField0_ & ~0x00000010);
499         return this;
500       }
501       
502       public Builder clone() {
503         return create().mergeFrom(buildPartial());
504       }
505       
506       public com.google.protobuf.Descriptors.Descriptor
507           getDescriptorForType() {
508         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor();
509       }
510       
511       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() {
512         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
513       }
514       
515       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription build() {
516         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial();
517         if (!result.isInitialized()) {
518           throw newUninitializedMessageException(result);
519         }
520         return result;
521       }
522       
523       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildParsed()
524           throws com.google.protobuf.InvalidProtocolBufferException {
525         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial();
526         if (!result.isInitialized()) {
527           throw newUninitializedMessageException(
528             result).asInvalidProtocolBufferException();
529         }
530         return result;
531       }
532       
533       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildPartial() {
534         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription(this);
535         int from_bitField0_ = bitField0_;
536         int to_bitField0_ = 0;
537         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
538           to_bitField0_ |= 0x00000001;
539         }
540         result.name_ = name_;
541         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
542           to_bitField0_ |= 0x00000002;
543         }
544         result.table_ = table_;
545         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
546           to_bitField0_ |= 0x00000004;
547         }
548         result.creationTime_ = creationTime_;
549         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
550           to_bitField0_ |= 0x00000008;
551         }
552         result.type_ = type_;
553         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
554           to_bitField0_ |= 0x00000010;
555         }
556         result.version_ = version_;
557         result.bitField0_ = to_bitField0_;
558         onBuilt();
559         return result;
560       }
561       
562       public Builder mergeFrom(com.google.protobuf.Message other) {
563         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) {
564           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)other);
565         } else {
566           super.mergeFrom(other);
567           return this;
568         }
569       }
570       
571       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other) {
572         if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) return this;
573         if (other.hasName()) {
574           setName(other.getName());
575         }
576         if (other.hasTable()) {
577           setTable(other.getTable());
578         }
579         if (other.hasCreationTime()) {
580           setCreationTime(other.getCreationTime());
581         }
582         if (other.hasType()) {
583           setType(other.getType());
584         }
585         if (other.hasVersion()) {
586           setVersion(other.getVersion());
587         }
588         this.mergeUnknownFields(other.getUnknownFields());
589         return this;
590       }
591       
592       public final boolean isInitialized() {
593         if (!hasName()) {
594           
595           return false;
596         }
597         return true;
598       }
599       
600       public Builder mergeFrom(
601           com.google.protobuf.CodedInputStream input,
602           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
603           throws java.io.IOException {
604         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
605           com.google.protobuf.UnknownFieldSet.newBuilder(
606             this.getUnknownFields());
607         while (true) {
608           int tag = input.readTag();
609           switch (tag) {
610             case 0:
611               this.setUnknownFields(unknownFields.build());
612               onChanged();
613               return this;
614             default: {
615               if (!parseUnknownField(input, unknownFields,
616                                      extensionRegistry, tag)) {
617                 this.setUnknownFields(unknownFields.build());
618                 onChanged();
619                 return this;
620               }
621               break;
622             }
623             case 10: {
624               bitField0_ |= 0x00000001;
625               name_ = input.readBytes();
626               break;
627             }
628             case 18: {
629               bitField0_ |= 0x00000002;
630               table_ = input.readBytes();
631               break;
632             }
633             case 24: {
634               bitField0_ |= 0x00000004;
635               creationTime_ = input.readInt64();
636               break;
637             }
638             case 32: {
639               int rawValue = input.readEnum();
640               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue);
641               if (value == null) {
642                 unknownFields.mergeVarintField(4, rawValue);
643               } else {
644                 bitField0_ |= 0x00000008;
645                 type_ = value;
646               }
647               break;
648             }
649             case 40: {
650               bitField0_ |= 0x00000010;
651               version_ = input.readInt32();
652               break;
653             }
654           }
655         }
656       }
657       
658       private int bitField0_;
659       
660       // required string name = 1;
661       private java.lang.Object name_ = "";
662       public boolean hasName() {
663         return ((bitField0_ & 0x00000001) == 0x00000001);
664       }
665       public String getName() {
666         java.lang.Object ref = name_;
667         if (!(ref instanceof String)) {
668           String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
669           name_ = s;
670           return s;
671         } else {
672           return (String) ref;
673         }
674       }
675       public Builder setName(String value) {
676         if (value == null) {
677     throw new NullPointerException();
678   }
679   bitField0_ |= 0x00000001;
680         name_ = value;
681         onChanged();
682         return this;
683       }
684       public Builder clearName() {
685         bitField0_ = (bitField0_ & ~0x00000001);
686         name_ = getDefaultInstance().getName();
687         onChanged();
688         return this;
689       }
690       void setName(com.google.protobuf.ByteString value) {
691         bitField0_ |= 0x00000001;
692         name_ = value;
693         onChanged();
694       }
695       
696       // optional string table = 2;
697       private java.lang.Object table_ = "";
698       public boolean hasTable() {
699         return ((bitField0_ & 0x00000002) == 0x00000002);
700       }
701       public String getTable() {
702         java.lang.Object ref = table_;
703         if (!(ref instanceof String)) {
704           String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
705           table_ = s;
706           return s;
707         } else {
708           return (String) ref;
709         }
710       }
711       public Builder setTable(String value) {
712         if (value == null) {
713     throw new NullPointerException();
714   }
715   bitField0_ |= 0x00000002;
716         table_ = value;
717         onChanged();
718         return this;
719       }
720       public Builder clearTable() {
721         bitField0_ = (bitField0_ & ~0x00000002);
722         table_ = getDefaultInstance().getTable();
723         onChanged();
724         return this;
725       }
726       void setTable(com.google.protobuf.ByteString value) {
727         bitField0_ |= 0x00000002;
728         table_ = value;
729         onChanged();
730       }
731       
732       // optional int64 creationTime = 3 [default = 0];
733       private long creationTime_ ;
734       public boolean hasCreationTime() {
735         return ((bitField0_ & 0x00000004) == 0x00000004);
736       }
737       public long getCreationTime() {
738         return creationTime_;
739       }
740       public Builder setCreationTime(long value) {
741         bitField0_ |= 0x00000004;
742         creationTime_ = value;
743         onChanged();
744         return this;
745       }
746       public Builder clearCreationTime() {
747         bitField0_ = (bitField0_ & ~0x00000004);
748         creationTime_ = 0L;
749         onChanged();
750         return this;
751       }
752       
753       // optional .SnapshotDescription.Type type = 4 [default = FLUSH];
754       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
755       public boolean hasType() {
756         return ((bitField0_ & 0x00000008) == 0x00000008);
757       }
758       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() {
759         return type_;
760       }
761       public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value) {
762         if (value == null) {
763           throw new NullPointerException();
764         }
765         bitField0_ |= 0x00000008;
766         type_ = value;
767         onChanged();
768         return this;
769       }
770       public Builder clearType() {
771         bitField0_ = (bitField0_ & ~0x00000008);
772         type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
773         onChanged();
774         return this;
775       }
776       
777       // optional int32 version = 5;
778       private int version_ ;
779       public boolean hasVersion() {
780         return ((bitField0_ & 0x00000010) == 0x00000010);
781       }
782       public int getVersion() {
783         return version_;
784       }
785       public Builder setVersion(int value) {
786         bitField0_ |= 0x00000010;
787         version_ = value;
788         onChanged();
789         return this;
790       }
791       public Builder clearVersion() {
792         bitField0_ = (bitField0_ & ~0x00000010);
793         version_ = 0;
794         onChanged();
795         return this;
796       }
797       
798       // @@protoc_insertion_point(builder_scope:SnapshotDescription)
799     }
800     
801     static {
802       defaultInstance = new SnapshotDescription(true);
803       defaultInstance.initFields();
804     }
805     
806     // @@protoc_insertion_point(class_scope:SnapshotDescription)
807   }
808   
809   public interface RegionServerInfoOrBuilder
810       extends com.google.protobuf.MessageOrBuilder {
811     
812     // optional int32 infoPort = 1;
813     boolean hasInfoPort();
814     int getInfoPort();
815   }
816   public static final class RegionServerInfo extends
817       com.google.protobuf.GeneratedMessage
818       implements RegionServerInfoOrBuilder {
819     // Use RegionServerInfo.newBuilder() to construct.
820     private RegionServerInfo(Builder builder) {
821       super(builder);
822     }
823     private RegionServerInfo(boolean noInit) {}
824     
825     private static final RegionServerInfo defaultInstance;
826     public static RegionServerInfo getDefaultInstance() {
827       return defaultInstance;
828     }
829     
830     public RegionServerInfo getDefaultInstanceForType() {
831       return defaultInstance;
832     }
833     
834     public static final com.google.protobuf.Descriptors.Descriptor
835         getDescriptor() {
836       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_descriptor;
837     }
838     
839     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
840         internalGetFieldAccessorTable() {
841       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_fieldAccessorTable;
842     }
843     
844     private int bitField0_;
845     // optional int32 infoPort = 1;
846     public static final int INFOPORT_FIELD_NUMBER = 1;
847     private int infoPort_;
848     public boolean hasInfoPort() {
849       return ((bitField0_ & 0x00000001) == 0x00000001);
850     }
851     public int getInfoPort() {
852       return infoPort_;
853     }
854     
855     private void initFields() {
856       infoPort_ = 0;
857     }
858     private byte memoizedIsInitialized = -1;
859     public final boolean isInitialized() {
860       byte isInitialized = memoizedIsInitialized;
861       if (isInitialized != -1) return isInitialized == 1;
862       
863       memoizedIsInitialized = 1;
864       return true;
865     }
866     
867     public void writeTo(com.google.protobuf.CodedOutputStream output)
868                         throws java.io.IOException {
869       getSerializedSize();
870       if (((bitField0_ & 0x00000001) == 0x00000001)) {
871         output.writeInt32(1, infoPort_);
872       }
873       getUnknownFields().writeTo(output);
874     }
875     
876     private int memoizedSerializedSize = -1;
877     public int getSerializedSize() {
878       int size = memoizedSerializedSize;
879       if (size != -1) return size;
880     
881       size = 0;
882       if (((bitField0_ & 0x00000001) == 0x00000001)) {
883         size += com.google.protobuf.CodedOutputStream
884           .computeInt32Size(1, infoPort_);
885       }
886       size += getUnknownFields().getSerializedSize();
887       memoizedSerializedSize = size;
888       return size;
889     }
890     
891     private static final long serialVersionUID = 0L;
892     @java.lang.Override
893     protected java.lang.Object writeReplace()
894         throws java.io.ObjectStreamException {
895       return super.writeReplace();
896     }
897     
898     @java.lang.Override
899     public boolean equals(final java.lang.Object obj) {
900       if (obj == this) {
901        return true;
902       }
903       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)) {
904         return super.equals(obj);
905       }
906       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) obj;
907       
908       boolean result = true;
909       result = result && (hasInfoPort() == other.hasInfoPort());
910       if (hasInfoPort()) {
911         result = result && (getInfoPort()
912             == other.getInfoPort());
913       }
914       result = result &&
915           getUnknownFields().equals(other.getUnknownFields());
916       return result;
917     }
918     
919     @java.lang.Override
920     public int hashCode() {
921       int hash = 41;
922       hash = (19 * hash) + getDescriptorForType().hashCode();
923       if (hasInfoPort()) {
924         hash = (37 * hash) + INFOPORT_FIELD_NUMBER;
925         hash = (53 * hash) + getInfoPort();
926       }
927       hash = (29 * hash) + getUnknownFields().hashCode();
928       return hash;
929     }
930     
931     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
932         com.google.protobuf.ByteString data)
933         throws com.google.protobuf.InvalidProtocolBufferException {
934       return newBuilder().mergeFrom(data).buildParsed();
935     }
936     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
937         com.google.protobuf.ByteString data,
938         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
939         throws com.google.protobuf.InvalidProtocolBufferException {
940       return newBuilder().mergeFrom(data, extensionRegistry)
941                .buildParsed();
942     }
943     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(byte[] data)
944         throws com.google.protobuf.InvalidProtocolBufferException {
945       return newBuilder().mergeFrom(data).buildParsed();
946     }
947     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
948         byte[] data,
949         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
950         throws com.google.protobuf.InvalidProtocolBufferException {
951       return newBuilder().mergeFrom(data, extensionRegistry)
952                .buildParsed();
953     }
954     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(java.io.InputStream input)
955         throws java.io.IOException {
956       return newBuilder().mergeFrom(input).buildParsed();
957     }
958     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
959         java.io.InputStream input,
960         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
961         throws java.io.IOException {
962       return newBuilder().mergeFrom(input, extensionRegistry)
963                .buildParsed();
964     }
965     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(java.io.InputStream input)
966         throws java.io.IOException {
967       Builder builder = newBuilder();
968       if (builder.mergeDelimitedFrom(input)) {
969         return builder.buildParsed();
970       } else {
971         return null;
972       }
973     }
974     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(
975         java.io.InputStream input,
976         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
977         throws java.io.IOException {
978       Builder builder = newBuilder();
979       if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
980         return builder.buildParsed();
981       } else {
982         return null;
983       }
984     }
985     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
986         com.google.protobuf.CodedInputStream input)
987         throws java.io.IOException {
988       return newBuilder().mergeFrom(input).buildParsed();
989     }
990     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
991         com.google.protobuf.CodedInputStream input,
992         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
993         throws java.io.IOException {
994       return newBuilder().mergeFrom(input, extensionRegistry)
995                .buildParsed();
996     }
997     
998     public static Builder newBuilder() { return Builder.create(); }
999     public Builder newBuilderForType() { return newBuilder(); }
1000     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo prototype) {
1001       return newBuilder().mergeFrom(prototype);
1002     }
1003     public Builder toBuilder() { return newBuilder(this); }
1004     
1005     @java.lang.Override
1006     protected Builder newBuilderForType(
1007         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1008       Builder builder = new Builder(parent);
1009       return builder;
1010     }
1011     public static final class Builder extends
1012         com.google.protobuf.GeneratedMessage.Builder<Builder>
1013        implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfoOrBuilder {
1014       public static final com.google.protobuf.Descriptors.Descriptor
1015           getDescriptor() {
1016         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_descriptor;
1017       }
1018       
1019       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1020           internalGetFieldAccessorTable() {
1021         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_fieldAccessorTable;
1022       }
1023       
1024       // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.newBuilder()
1025       private Builder() {
1026         maybeForceBuilderInitialization();
1027       }
1028       
1029       private Builder(BuilderParent parent) {
1030         super(parent);
1031         maybeForceBuilderInitialization();
1032       }
1033       private void maybeForceBuilderInitialization() {
1034         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1035         }
1036       }
1037       private static Builder create() {
1038         return new Builder();
1039       }
1040       
1041       public Builder clear() {
1042         super.clear();
1043         infoPort_ = 0;
1044         bitField0_ = (bitField0_ & ~0x00000001);
1045         return this;
1046       }
1047       
1048       public Builder clone() {
1049         return create().mergeFrom(buildPartial());
1050       }
1051       
1052       public com.google.protobuf.Descriptors.Descriptor
1053           getDescriptorForType() {
1054         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDescriptor();
1055       }
1056       
1057       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstanceForType() {
1058         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance();
1059       }
1060       
1061       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo build() {
1062         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = buildPartial();
1063         if (!result.isInitialized()) {
1064           throw newUninitializedMessageException(result);
1065         }
1066         return result;
1067       }
1068       
1069       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo buildParsed()
1070           throws com.google.protobuf.InvalidProtocolBufferException {
1071         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = buildPartial();
1072         if (!result.isInitialized()) {
1073           throw newUninitializedMessageException(
1074             result).asInvalidProtocolBufferException();
1075         }
1076         return result;
1077       }
1078       
1079       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo buildPartial() {
1080         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo(this);
1081         int from_bitField0_ = bitField0_;
1082         int to_bitField0_ = 0;
1083         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1084           to_bitField0_ |= 0x00000001;
1085         }
1086         result.infoPort_ = infoPort_;
1087         result.bitField0_ = to_bitField0_;
1088         onBuilt();
1089         return result;
1090       }
1091       
1092       public Builder mergeFrom(com.google.protobuf.Message other) {
1093         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) {
1094           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)other);
1095         } else {
1096           super.mergeFrom(other);
1097           return this;
1098         }
1099       }
1100       
1101       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other) {
1102         if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance()) return this;
1103         if (other.hasInfoPort()) {
1104           setInfoPort(other.getInfoPort());
1105         }
1106         this.mergeUnknownFields(other.getUnknownFields());
1107         return this;
1108       }
1109       
1110       public final boolean isInitialized() {
1111         return true;
1112       }
1113       
1114       public Builder mergeFrom(
1115           com.google.protobuf.CodedInputStream input,
1116           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1117           throws java.io.IOException {
1118         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1119           com.google.protobuf.UnknownFieldSet.newBuilder(
1120             this.getUnknownFields());
1121         while (true) {
1122           int tag = input.readTag();
1123           switch (tag) {
1124             case 0:
1125               this.setUnknownFields(unknownFields.build());
1126               onChanged();
1127               return this;
1128             default: {
1129               if (!parseUnknownField(input, unknownFields,
1130                                      extensionRegistry, tag)) {
1131                 this.setUnknownFields(unknownFields.build());
1132                 onChanged();
1133                 return this;
1134               }
1135               break;
1136             }
1137             case 8: {
1138               bitField0_ |= 0x00000001;
1139               infoPort_ = input.readInt32();
1140               break;
1141             }
1142           }
1143         }
1144       }
1145       
1146       private int bitField0_;
1147       
1148       // optional int32 infoPort = 1;
1149       private int infoPort_ ;
1150       public boolean hasInfoPort() {
1151         return ((bitField0_ & 0x00000001) == 0x00000001);
1152       }
1153       public int getInfoPort() {
1154         return infoPort_;
1155       }
1156       public Builder setInfoPort(int value) {
1157         bitField0_ |= 0x00000001;
1158         infoPort_ = value;
1159         onChanged();
1160         return this;
1161       }
1162       public Builder clearInfoPort() {
1163         bitField0_ = (bitField0_ & ~0x00000001);
1164         infoPort_ = 0;
1165         onChanged();
1166         return this;
1167       }
1168       
1169       // @@protoc_insertion_point(builder_scope:RegionServerInfo)
1170     }
1171     
1172     static {
1173       defaultInstance = new RegionServerInfo(true);
1174       defaultInstance.initFields();
1175     }
1176     
1177     // @@protoc_insertion_point(class_scope:RegionServerInfo)
1178   }
1179   
1180   private static com.google.protobuf.Descriptors.Descriptor
1181     internal_static_SnapshotDescription_descriptor;
1182   private static
1183     com.google.protobuf.GeneratedMessage.FieldAccessorTable
1184       internal_static_SnapshotDescription_fieldAccessorTable;
1185   private static com.google.protobuf.Descriptors.Descriptor
1186     internal_static_RegionServerInfo_descriptor;
1187   private static
1188     com.google.protobuf.GeneratedMessage.FieldAccessorTable
1189       internal_static_RegionServerInfo_fieldAccessorTable;
1190   
1191   public static com.google.protobuf.Descriptors.FileDescriptor
1192       getDescriptor() {
1193     return descriptor;
1194   }
1195   private static com.google.protobuf.Descriptors.FileDescriptor
1196       descriptor;
1197   static {
1198     java.lang.String[] descriptorData = {
1199       "\n\013hbase.proto\"\274\001\n\023SnapshotDescription\022\014\n" +
1200       "\004name\030\001 \002(\t\022\r\n\005table\030\002 \001(\t\022\027\n\014creationTi" +
1201       "me\030\003 \001(\003:\0010\022.\n\004type\030\004 \001(\0162\031.SnapshotDesc" +
1202       "ription.Type:\005FLUSH\022\017\n\007version\030\005 \001(\005\".\n\004" +
1203       "Type\022\014\n\010DISABLED\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLU" +
1204       "SH\020\002\"$\n\020RegionServerInfo\022\020\n\010infoPort\030\001 \001" +
1205       "(\005B>\n*org.apache.hadoop.hbase.protobuf.g" +
1206       "eneratedB\013HBaseProtosH\001\240\001\001"
1207     };
1208     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1209       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1210         public com.google.protobuf.ExtensionRegistry assignDescriptors(
1211             com.google.protobuf.Descriptors.FileDescriptor root) {
1212           descriptor = root;
1213           internal_static_SnapshotDescription_descriptor =
1214             getDescriptor().getMessageTypes().get(0);
1215           internal_static_SnapshotDescription_fieldAccessorTable = new
1216             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1217               internal_static_SnapshotDescription_descriptor,
1218               new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", },
1219               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class,
1220               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class);
1221           internal_static_RegionServerInfo_descriptor =
1222             getDescriptor().getMessageTypes().get(1);
1223           internal_static_RegionServerInfo_fieldAccessorTable = new
1224             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1225               internal_static_RegionServerInfo_descriptor,
1226               new java.lang.String[] { "InfoPort", },
1227               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.class,
1228               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class);
1229           return null;
1230         }
1231       };
1232     com.google.protobuf.Descriptors.FileDescriptor
1233       .internalBuildGeneratedFileFrom(descriptorData,
1234         new com.google.protobuf.Descriptors.FileDescriptor[] {
1235         }, assigner);
1236   }
1237   
1238   // @@protoc_insertion_point(outer_class_scope)
1239 }